hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d4ebd3198c07949f872169aebb1aa434e799ae1
| 7,161
|
py
|
Python
|
iaso/tests/api/test_task_dhis2_ou_importer.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 29
|
2020-12-26T07:22:19.000Z
|
2022-03-07T13:40:09.000Z
|
iaso/tests/api/test_task_dhis2_ou_importer.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 150
|
2020-11-09T15:03:27.000Z
|
2022-03-07T15:36:07.000Z
|
iaso/tests/api/test_task_dhis2_ou_importer.py
|
BLSQ/iaso
|
95c8087c0182bdd576598eb8cd39c440e58e15d7
|
[
"MIT"
] | 4
|
2020-11-09T10:38:13.000Z
|
2021-10-04T09:42:47.000Z
|
from iaso.test import APITestCase
from iaso import models as m
class ApiDhis2ouimporterTestCase(APITestCase):
@classmethod
def setUp(cls):
cls.account = account = m.Account.objects.create(name="test account")
cls.user = cls.create_user_with_profile(username="test user", account=account, permissions=["iaso_sources"])
def test_no_perm(self):
source = m.DataSource.objects.create(name="test source")
user_no_perm = self.create_user_with_profile(username="test user2", account=self.account, permissions=[])
self.client.force_authenticate(user_no_perm)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
},
)
jr = self.assertJSONResponse(response, 403)
self.assertEqual({"detail": "You do not have permission to perform this action."}, jr)
def test_no_perm_source(self):
source = m.DataSource.objects.create(name="test source")
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
},
)
jr = self.assertJSONResponse(response, 400)
self.assertEqual({"non_field_errors": ["Unauthorized source_id"]}, jr)
def test_ok(self):
project = m.Project.objects.create(name="test proj", app_id="app_id", account=self.account)
credentials = m.ExternalCredentials.objects.create(
url="url", login="login", password="pwd", account=self.account
)
source = m.DataSource.objects.create(name="test source", credentials=credentials)
source.projects.add(project)
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
},
)
jr = self.assertJSONResponse(response, 200)
task = self.assertValidTaskAndInDB(jr)
self.assertEqual(task.launcher, self.user)
self.assertEqual(task.params["kwargs"]["source_id"], source.id)
self.assertEqual(task.params["kwargs"]["url"], None)
self.assertEqual(task.params["kwargs"]["login"], None)
self.assertEqual(task.params["kwargs"]["password"], None)
def test_not_ok_incomplete_credentials(self):
project = m.Project.objects.create(name="test proj", app_id="app_id", account=self.account)
# missing login
credentials = m.ExternalCredentials.objects.create(url="url", login="", password="pwd", account=self.account)
source = m.DataSource.objects.create(name="test source", credentials=credentials)
source.projects.add(project)
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
},
)
jr = self.assertJSONResponse(response, 400)
self.assertEqual({"non_field_errors": ["No valid credentials exist for this source, please provide them"]}, jr)
def test_pass_credentials(self):
project = m.Project.objects.create(name="test proj", app_id="app_id", account=self.account)
source = m.DataSource.objects.create(name="test source")
source.projects.add(project)
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
"dhis2_url": "overid url",
"dhis2_login": "overid login",
"dhis2_password": "overid pwd",
},
)
jr = self.assertJSONResponse(response, 200)
self.assertValidTaskAndInDB(jr)
def test_override_credentials(self):
project = m.Project.objects.create(name="test proj", app_id="app_id", account=self.account)
credentials = m.ExternalCredentials.objects.create(
url="url", login="login", password="pwd", account=self.account
)
source = m.DataSource.objects.create(name="test source", credentials=credentials)
source.projects.add(project)
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
"dhis2_url": "override url",
"dhis2_login": "override login",
"dhis2_password": "override pwd",
},
)
jr = self.assertJSONResponse(response, 200)
task = self.assertValidTaskAndInDB(jr)
self.assertEqual(task.launcher, self.user)
self.assertEqual(task.params["kwargs"]["source_id"], source.id)
self.assertEqual(task.params["kwargs"]["url"], "override url")
self.assertEqual(task.params["kwargs"]["login"], "override login")
self.assertEqual(task.params["kwargs"]["password"], "override pwd")
def test_override_credentials_bad(self):
"""Document bad behaviour, if we only pass some credentials we accept it but we are not going to use it"""
project = m.Project.objects.create(name="test proj", app_id="app_id", account=self.account)
credentials = m.ExternalCredentials.objects.create(
url="url", login="login", password="pwd", account=self.account
)
source = m.DataSource.objects.create(name="test source", credentials=credentials)
source.projects.add(project)
self.client.force_authenticate(self.user)
response = self.client.post(
"/api/dhis2ouimporter/",
format="json",
data={
"source_id": source.id,
"source_version_number": 1,
"dhis2_url": "override url",
"dhis2_password": "override pwd",
},
)
jr = self.assertJSONResponse(response, 200)
task = self.assertValidTaskAndInDB(jr)
self.assertEqual(task.launcher, self.user)
self.assertEqual(task.params["kwargs"]["source_id"], source.id)
self.assertEqual(task.params["kwargs"]["url"], "override url")
self.assertEqual(task.params["kwargs"]["login"], None)
self.assertEqual(task.params["kwargs"]["password"], "override pwd")
def assertValidTaskAndInDB(self, jr, status="QUEUED", name=None):
task_dict = jr["task"]
self.assertEqual(task_dict["status"], status, task_dict)
task = m.Task.objects.get(id=task_dict["id"])
self.assertTrue(task)
if name:
self.assertEqual(task.name, name)
return task
| 41.877193
| 119
| 0.608435
| 768
| 7,161
| 5.561198
| 0.147135
| 0.039335
| 0.055725
| 0.063919
| 0.773355
| 0.773355
| 0.748068
| 0.748068
| 0.736362
| 0.698197
| 0
| 0.008516
| 0.262114
| 7,161
| 170
| 120
| 42.123529
| 0.799773
| 0.016059
| 0
| 0.64
| 0
| 0
| 0.171023
| 0.041761
| 0
| 0
| 0
| 0
| 0.22
| 1
| 0.06
| false
| 0.073333
| 0.066667
| 0
| 0.14
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
b4e7afc5be4d96bc6e23b04f9133da47e3f3a970
| 43
|
py
|
Python
|
cloudberry-py/cloudberry/util/__init__.py
|
olliekrk/cloud-berry
|
8b39fb0b4f8772348fb50c0c1d0200c96df03cbe
|
[
"MIT"
] | null | null | null |
cloudberry-py/cloudberry/util/__init__.py
|
olliekrk/cloud-berry
|
8b39fb0b4f8772348fb50c0c1d0200c96df03cbe
|
[
"MIT"
] | null | null | null |
cloudberry-py/cloudberry/util/__init__.py
|
olliekrk/cloud-berry
|
8b39fb0b4f8772348fb50c0c1d0200c96df03cbe
|
[
"MIT"
] | null | null | null |
from .data_frame_util import DataFrameUtil
| 21.5
| 42
| 0.883721
| 6
| 43
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b4edb8fd937104659d19f952d69424ae9a63b232
| 35
|
py
|
Python
|
src/oscar/apps/core/models.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
src/oscar/apps/core/models.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
src/oscar/apps/core/models.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db.models import Model
| 17.5
| 34
| 0.828571
| 6
| 35
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2ed8a7896cce103b5a83082b27d26ffbaf62c2ff
| 43
|
py
|
Python
|
pbesa/engine/__init__.py
|
scottwedge/pbesa
|
21b161538aa0c508088dc47a3a88413b6fd6504d
|
[
"MIT"
] | 2
|
2020-10-22T22:23:40.000Z
|
2021-09-14T01:18:01.000Z
|
pbesa/engine/__init__.py
|
scottwedge/pbesa
|
21b161538aa0c508088dc47a3a88413b6fd6504d
|
[
"MIT"
] | 2
|
2020-05-27T13:59:42.000Z
|
2022-03-02T14:58:12.000Z
|
pbesa/engine/__init__.py
|
scottwedge/pbesa
|
21b161538aa0c508088dc47a3a88413b6fd6504d
|
[
"MIT"
] | 1
|
2020-05-27T13:50:40.000Z
|
2020-05-27T13:50:40.000Z
|
from .bdi import *
from .rational import *
| 14.333333
| 23
| 0.72093
| 6
| 43
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 24
| 21.5
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
258dce57be584f7ec5e525cc959425f68d4ad23d
| 295
|
py
|
Python
|
torch-two-sample-master/torch_two_sample/__init__.py
|
isaacsultan/ift6135-gan
|
fa18f6664ecd60c43f12034ed2fbf979f3b5922a
|
[
"MIT"
] | 199
|
2017-10-05T08:12:41.000Z
|
2022-03-22T16:14:27.000Z
|
torch-two-sample-master/torch_two_sample/__init__.py
|
isaacsultan/ift6135-gan
|
fa18f6664ecd60c43f12034ed2fbf979f3b5922a
|
[
"MIT"
] | 7
|
2018-04-14T08:49:38.000Z
|
2021-07-27T20:52:10.000Z
|
torch-two-sample-master/torch_two_sample/__init__.py
|
isaacsultan/ift6135-gan
|
fa18f6664ecd60c43f12034ed2fbf979f3b5922a
|
[
"MIT"
] | 28
|
2018-02-11T18:48:43.000Z
|
2022-03-07T21:35:58.000Z
|
from .statistics_diff import (
SmoothFRStatistic, SmoothKNNStatistic, MMDStatistic, EnergyStatistic)
from .statistics_nondiff import FRStatistic, KNNStatistic
__all__ = ['SmoothFRStatistic', 'SmoothKNNStatistic', 'MMDStatistic',
'EnergyStatistic', 'FRStatistic', 'KNNStatistic']
| 42.142857
| 73
| 0.779661
| 21
| 295
| 10.666667
| 0.571429
| 0.125
| 0.419643
| 0.553571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125424
| 295
| 6
| 74
| 49.166667
| 0.868217
| 0
| 0
| 0
| 0
| 0
| 0.288136
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
25d4b7055d85b6064c02b750f052207d6b9c710b
| 596
|
py
|
Python
|
services/customers/app/pubsub/schemas.py
|
rafaeltardivo/micro-order
|
310e4b10d0f2dee24d2e36b544c25dc1c062d6d6
|
[
"MIT"
] | 1
|
2020-05-04T23:09:06.000Z
|
2020-05-04T23:09:06.000Z
|
services/customers/app/pubsub/schemas.py
|
rafaeltardivo/micro-order
|
310e4b10d0f2dee24d2e36b544c25dc1c062d6d6
|
[
"MIT"
] | 5
|
2020-06-05T23:58:35.000Z
|
2021-06-04T23:13:46.000Z
|
services/customers/app/pubsub/schemas.py
|
rafaeltardivo/micro-order
|
310e4b10d0f2dee24d2e36b544c25dc1c062d6d6
|
[
"MIT"
] | null | null | null |
from marshmallow import Schema, fields
customer_detail_schema = Schema.from_dict(
{
'email': fields.Email(),
'address': fields.Str()
}
)
customer_shipping_schema = Schema.from_dict(
{
'id': fields.Integer(),
'customer': fields.Nested(
Schema.from_dict(
{
'email': fields.Email(),
'address': fields.Str()
}
)
)
}
)
customer_request_schema = Schema.from_dict(
{
'id': fields.Integer(),
'customer': fields.Integer()
}
)
| 19.866667
| 44
| 0.498322
| 50
| 596
| 5.74
| 0.32
| 0.139373
| 0.195122
| 0.209059
| 0.71777
| 0.71777
| 0.71777
| 0.71777
| 0.71777
| 0.376307
| 0
| 0
| 0.374161
| 596
| 29
| 45
| 20.551724
| 0.769437
| 0
| 0
| 0.230769
| 0
| 0
| 0.073826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
25f1b832a9257f1c111e4f63d846469686ba4722
| 39
|
py
|
Python
|
index.py
|
UsairimIsani/SECC
|
447101d504b3c1e768c4f5883b33b6fa5cf6ba57
|
[
"MIT"
] | null | null | null |
index.py
|
UsairimIsani/SECC
|
447101d504b3c1e768c4f5883b33b6fa5cf6ba57
|
[
"MIT"
] | null | null | null |
index.py
|
UsairimIsani/SECC
|
447101d504b3c1e768c4f5883b33b6fa5cf6ba57
|
[
"MIT"
] | null | null | null |
print("Hello World with Usairim Isani")
| 39
| 39
| 0.794872
| 6
| 39
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
d33f3944a2c87acc0fc52170a928a178cf1d5055
| 47
|
py
|
Python
|
serverless_secrets/providers/__init__.py
|
trek10inc/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 5
|
2017-09-24T06:21:00.000Z
|
2020-12-19T07:32:48.000Z
|
serverless_secrets/providers/__init__.py
|
imbi7py/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 1
|
2017-10-25T14:30:31.000Z
|
2017-10-25T14:30:31.000Z
|
serverless_secrets/providers/__init__.py
|
imbi7py/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 3
|
2017-10-19T13:46:14.000Z
|
2020-12-19T07:35:11.000Z
|
from serverless_secrets.providers.aws import *
| 23.5
| 46
| 0.851064
| 6
| 47
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d3812fbf013b69db9b926cfd0764ee9b6bd85619
| 7,182
|
py
|
Python
|
In_Prep_Manuscripts/GFC_Anxiety_2019_PBS/scripts/QC/QC_NeuroTeen_GFC_analysis_LONGLEAF.py
|
Telzer-DSNLab/Project-NeuroTeen
|
0ef67d8a902682e8f482b07b0f89dcfc21247b41
|
[
"BSD-3-Clause"
] | null | null | null |
In_Prep_Manuscripts/GFC_Anxiety_2019_PBS/scripts/QC/QC_NeuroTeen_GFC_analysis_LONGLEAF.py
|
Telzer-DSNLab/Project-NeuroTeen
|
0ef67d8a902682e8f482b07b0f89dcfc21247b41
|
[
"BSD-3-Clause"
] | null | null | null |
In_Prep_Manuscripts/GFC_Anxiety_2019_PBS/scripts/QC/QC_NeuroTeen_GFC_analysis_LONGLEAF.py
|
Telzer-DSNLab/Project-NeuroTeen
|
0ef67d8a902682e8f482b07b0f89dcfc21247b41
|
[
"BSD-3-Clause"
] | null | null | null |
#assemble functional connectomes for GFC analysis
#Written by Paul Sharp 12/19/2018
#run for each wave by changing wave variable
import os
import csv
import nipype.interfaces.fsl as fsl
import datetime
import sys
'''Set up variables and paths'''
prefix='NT' #prefix defining subjects
prefix_atlas='Power' #prefix defining atlas ROIs
path_to_subs='/pine/scr/p/s/psharp89/GFC_analysis/all_GFC_files/batch{}'.format(sys.argv[1])
subjects=[x for x in os.listdir(path_to_subs) if x.startswith(prefix)]
output_path='/pine/scr/p/s/psharp89/GFC_analysis/QC/batch{}'.format(sys.argv[1])
path_to_rois='/pine/scr/p/s/psharp89/GFC_analysis/power_atlas_ROIs'
rois=['Power_ROI_sphere_binarized_0.nii.gz', 'Power_ROI_sphere_binarized_1.nii.gz',
'Power_ROI_sphere_binarized_2.nii.gz', 'Power_ROI_sphere_binarized_3.nii.gz', 'Power_ROI_sphere_binarized_4.nii.gz',
'Power_ROI_sphere_binarized_5.nii.gz', 'Power_ROI_sphere_binarized_6.nii.gz', 'Power_ROI_sphere_binarized_7.nii.gz',
'Power_ROI_sphere_binarized_8.nii.gz', 'Power_ROI_sphere_binarized_9.nii.gz', 'Power_ROI_sphere_binarized_10.nii.gz',
'Power_ROI_sphere_binarized_11.nii.gz', 'Power_ROI_sphere_binarized_12.nii.gz', 'Power_ROI_sphere_binarized_14.nii.gz',
'Power_ROI_sphere_binarized_16.nii.gz', 'Power_ROI_sphere_binarized_17.nii.gz', 'Power_ROI_sphere_binarized_18.nii.gz', 'Power_ROI_sphere_binarized_20.nii.gz',
'Power_ROI_sphere_binarized_21.nii.gz', 'Power_ROI_sphere_binarized_22.nii.gz', 'Power_ROI_sphere_binarized_23.nii.gz', 'Power_ROI_sphere_binarized_24.nii.gz',
'Power_ROI_sphere_binarized_26.nii.gz', 'Power_ROI_sphere_binarized_27.nii.gz', 'Power_ROI_sphere_binarized_28.nii.gz', 'Power_ROI_sphere_binarized_29.nii.gz', 'Power_ROI_sphere_binarized_30.nii.gz', 'Power_ROI_sphere_binarized_31.nii.gz', 'Power_ROI_sphere_binarized_32.nii.gz', 'Power_ROI_sphere_binarized_33.nii.gz', 'Power_ROI_sphere_binarized_34.nii.gz', 'Power_ROI_sphere_binarized_35.nii.gz', 'Power_ROI_sphere_binarized_36.nii.gz', 'Power_ROI_sphere_binarized_37.nii.gz', 'Power_ROI_sphere_binarized_38.nii.gz', 'Power_ROI_sphere_binarized_39.nii.gz', 'Power_ROI_sphere_binarized_46.nii.gz', 'Power_ROI_sphere_binarized_48.nii.gz', 'Power_ROI_sphere_binarized_49.nii.gz', 'Power_ROI_sphere_binarized_52.nii.gz', 'Power_ROI_sphere_binarized_53.nii.gz', 'Power_ROI_sphere_binarized_75.nii.gz', 'Power_ROI_sphere_binarized_77.nii.gz', 'Power_ROI_sphere_binarized_80.nii.gz', 'Power_ROI_sphere_binarized_81.nii.gz', 'Power_ROI_sphere_binarized_82.nii.gz', 'Power_ROI_sphere_binarized_83.nii.gz', 'Power_ROI_sphere_binarized_84.nii.gz', 'Power_ROI_sphere_binarized_96.nii.gz', 'Power_ROI_sphere_binarized_97.nii.gz', 'Power_ROI_sphere_binarized_98.nii.gz', 'Power_ROI_sphere_binarized_99.nii.gz', 'Power_ROI_sphere_binarized_108.nii.gz', 'Power_ROI_sphere_binarized_115.nii.gz', 'Power_ROI_sphere_binarized_116.nii.gz', 'Power_ROI_sphere_binarized_118.nii.gz', 'Power_ROI_sphere_binarized_120.nii.gz', 'Power_ROI_sphere_binarized_122.nii.gz', 'Power_ROI_sphere_binarized_123.nii.gz', 'Power_ROI_sphere_binarized_124.nii.gz', 'Power_ROI_sphere_binarized_125.nii.gz', 'Power_ROI_sphere_binarized_126.nii.gz', 'Power_ROI_sphere_binarized_127.nii.gz', 'Power_ROI_sphere_binarized_128.nii.gz', 'Power_ROI_sphere_binarized_131.nii.gz', 'Power_ROI_sphere_binarized_135.nii.gz', 'Power_ROI_sphere_binarized_136.nii.gz', 'Power_ROI_sphere_binarized_137.nii.gz', 'Power_ROI_sphere_binarized_138.nii.gz', 'Power_ROI_sphere_binarized_140.nii.gz', 'Power_ROI_sphere_binarized_141.nii.gz', 'Power_ROI_sphere_binarized_142.nii.gz', 'Power_ROI_sphere_binarized_149.nii.gz', 'Power_ROI_sphere_binarized_150.nii.gz', 'Power_ROI_sphere_binarized_152.nii.gz', 'Power_ROI_sphere_binarized_153.nii.gz', 'Power_ROI_sphere_binarized_160.nii.gz', 'Power_ROI_sphere_binarized_164.nii.gz', 'Power_ROI_sphere_binarized_171.nii.gz', 'Power_ROI_sphere_binarized_173.nii.gz', 'Power_ROI_sphere_binarized_177.nii.gz', 'Power_ROI_sphere_binarized_178.nii.gz', 'Power_ROI_sphere_binarized_179.nii.gz', 'Power_ROI_sphere_binarized_180.nii.gz', 'Power_ROI_sphere_binarized_181.nii.gz', 'Power_ROI_sphere_binarized_182.nii.gz', 'Power_ROI_sphere_binarized_183.nii.gz', 'Power_ROI_sphere_binarized_184.nii.gz', 'Power_ROI_sphere_binarized_190.nii.gz', 'Power_ROI_sphere_binarized_191.nii.gz', 'Power_ROI_sphere_binarized_192.nii.gz', 'Power_ROI_sphere_binarized_202.nii.gz', 'Power_ROI_sphere_binarized_204.nii.gz', 'Power_ROI_sphere_binarized_210.nii.gz', 'Power_ROI_sphere_binarized_242.nii.gz', 'Power_ROI_sphere_binarized_243.nii.gz', 'Power_ROI_sphere_binarized_244.nii.gz', 'Power_ROI_sphere_binarized_245.nii.gz', 'Power_ROI_sphere_binarized_246.nii.gz', 'Power_ROI_sphere_binarized_247.nii.gz', 'Power_ROI_sphere_binarized_248.nii.gz', 'Power_ROI_sphere_binarized_249.nii.gz', 'Power_ROI_sphere_binarized_250.nii.gz', 'Power_ROI_sphere_binarized_252.nii.gz', 'Power_ROI_sphere_binarized_253.nii.gz', 'Power_ROI_sphere_binarized_254.nii.gz', 'Power_ROI_sphere_binarized_255.nii.gz', 'Power_ROI_sphere_binarized_257.nii.gz', 'Power_ROI_sphere_binarized_258.nii.gz', 'Power_ROI_sphere_binarized_260.nii.gz', 'Power_ROI_sphere_binarized_261.nii.gz', 'Power_ROI_sphere_binarized_262.nii.gz', 'Power_ROI_sphere_binarized_263.nii.gz']
tasks=['BOLD_Cups_Parent',
'BOLD_Cups_Peer',
'BOLD_Cups_Self',
'BOLD_Ratings_1',
'BOLD_Ratings_2',
'BOLD_Resting_State',
'BOLD_Shapes_1',
'BOLD_Shapes_2']
Bad_ROIs=[['Subject','Task','ROI','Percent_of_Sphere']]
for subject in subjects:
os.chdir(path_to_subs)
os.chdir(subject)
for task in tasks:
current_smoothed_functional='prep.default.{}_MCcorrected_smooth6mm_MNI2mm_denoised.nii.gz'.format(task)
if os.path.exists(current_smoothed_functional):
get_first_vol=fsl.ImageMaths(in_file=current_smoothed_functional, op_string=' -roi 0 -1 0 -1 0 -1 0 1', out_file='{}/first_vol.nii.gz'.format(output_path))
get_first_vol.run()
print 'Get first vol worked'
roi_num=1
if os.path.exists(current_smoothed_functional):
for roi in rois:
get_reduced_sphere=fsl.ImageMaths(in_file='{}/first_vol.nii.gz'.format(output_path), op_string='-mul {}/{}'.format(path_to_rois,roi), out_file='{}/reduced_sphere.nii.gz'.format(output_path))
get_reduced_sphere.run()
#get volume of each ROI in native diffusion space
get_volume_ROI = fsl.ImageStats(in_file='{}/reduced_sphere.nii.gz'.format(output_path), op_string='-V > {}/ROI_volumes.txt'.format(output_path))
get_volume_ROI.run()
with open('{}/ROI_volumes.txt'.format(output_path), 'r') as f:
lines=f.readlines()
line=lines[0].split()
roi_vol=float(line[0])
percent_sphere=roi_vol/81.0 #81.0 is the size of each sphere with 10mm diameter
current_info_for_csv=[subject,task,roi_num,percent_sphere]
now = datetime.datetime.now()
g=open('{}/record.txt'.format(output_path),'a')
g.write('Another iteration at {}:{}:{}\n'.format(now.hour,now.minute,now.second))
if percent_sphere <0.80:
print current_info_for_csv
Bad_ROIs.append(current_info_for_csv)
roi_num+=1
os.chdir(output_path)
with open('QC_output_ROIs_less_than_80_percent_coverage.csv', 'a') as f:
writer=csv.writer(f)
writer.writerows(Bad_ROIs)
| 89.775
| 3,700
| 0.813144
| 1,218
| 7,182
| 4.333333
| 0.20936
| 0.111785
| 0.299735
| 0.492421
| 0.675635
| 0.668435
| 0.060629
| 0.026904
| 0
| 0
| 0
| 0.047823
| 0.056669
| 7,182
| 79
| 3,701
| 90.911392
| 0.731218
| 0.038012
| 0
| 0.032787
| 0
| 0
| 0.69345
| 0.644833
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.081967
| null | null | 0.032787
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6ca891034d853805f933acf36bd09f8e9974aae6
| 5,700
|
py
|
Python
|
experiments/ant/ddpg_process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/ant/ddpg_process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/ant/ddpg_process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '../../')
from libs_common.RLStatsCompute import *
import matplotlib.pyplot as plt
result_path = "./results/"
files = []
files.append("./models/ddpg_baseline/run_0/result/result.log")
files.append("./models/ddpg_baseline/run_1/result/result.log")
files.append("./models/ddpg_baseline/run_2/result/result.log")
files.append("./models/ddpg_baseline/run_3/result/result.log")
files.append("./models/ddpg_baseline/run_4/result/result.log")
files.append("./models/ddpg_baseline/run_5/result/result.log")
files.append("./models/ddpg_baseline/run_6/result/result.log")
files.append("./models/ddpg_baseline/run_7/result/result.log")
ddpg_baseline = RLStatsCompute(files)
files = []
files.append("./models/ddpg_curiosity/run_0/result/result.log")
files.append("./models/ddpg_curiosity/run_1/result/result.log")
files.append("./models/ddpg_curiosity/run_2/result/result.log")
files.append("./models/ddpg_curiosity/run_3/result/result.log")
files.append("./models/ddpg_curiosity/run_4/result/result.log")
files.append("./models/ddpg_curiosity/run_5/result/result.log")
files.append("./models/ddpg_curiosity/run_6/result/result.log")
files.append("./models/ddpg_curiosity/run_7/result/result.log")
ddpg_curiosity_rnd = RLStatsCompute(files)
files = []
files.append("./models/ddpg_entropy/run_0/result/result.log")
files.append("./models/ddpg_entropy/run_1/result/result.log")
files.append("./models/ddpg_entropy/run_2/result/result.log")
files.append("./models/ddpg_entropy/run_3/result/result.log")
files.append("./models/ddpg_entropy/run_4/result/result.log")
files.append("./models/ddpg_entropy/run_5/result/result.log")
files.append("./models/ddpg_entropy/run_6/result/result.log")
files.append("./models/ddpg_entropy/run_7/result/result.log")
ddpg_curiosity_entropy = RLStatsCompute(files)
plt.cla()
plt.ylabel("score")
plt.xlabel("episode")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(ddpg_baseline.mean[1], ddpg_baseline.mean[4], label="ddpg baseline", color='deepskyblue')
plt.fill_between(ddpg_baseline.mean[1], ddpg_baseline.lower[4], ddpg_baseline.upper[4], color='deepskyblue', alpha=0.2)
plt.plot(ddpg_curiosity_rnd.mean[1], ddpg_curiosity_rnd.mean[4], label="ddpg curiosity RND", color='limegreen')
plt.fill_between(ddpg_curiosity_rnd.mean[1], ddpg_curiosity_rnd.lower[4], ddpg_curiosity_rnd.upper[4], color='limegreen', alpha=0.2)
plt.plot(ddpg_curiosity_entropy.mean[1], ddpg_curiosity_entropy.mean[4], label="ddpg curiosity RND + entropy", color='red')
plt.fill_between(ddpg_curiosity_entropy.mean[1], ddpg_curiosity_entropy.lower[4], ddpg_curiosity_entropy.upper[4], color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "ddpg_score_per_episode.png", dpi = 300)
plt.cla()
plt.ylabel("score")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(ddpg_baseline.mean[0], ddpg_baseline.mean[4], label="ddpg baseline", color='deepskyblue')
plt.fill_between(ddpg_baseline.mean[0], ddpg_baseline.lower[4], ddpg_baseline.upper[4], color='deepskyblue', alpha=0.2)
plt.plot(ddpg_curiosity_rnd.mean[0], ddpg_curiosity_rnd.mean[4], label="ddpg curiosity RND", color='limegreen')
plt.fill_between(ddpg_curiosity_rnd.mean[0], ddpg_curiosity_rnd.lower[4], ddpg_curiosity_rnd.upper[4], color='limegreen', alpha=0.2)
plt.plot(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.mean[4], label="ddpg curiosity RND + entropy", color='red')
plt.fill_between(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.lower[4], ddpg_curiosity_entropy.upper[4], color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "ddpg_score_per_iteration.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(ddpg_curiosity_rnd.mean[0], ddpg_curiosity_rnd.mean[10], label="curiosity", color='deepskyblue')
plt.fill_between(ddpg_curiosity_rnd.mean[0], ddpg_curiosity_rnd.lower[10], ddpg_curiosity_rnd.upper[10], color='deepskyblue', alpha=0.2)
plt.legend(loc='upper right', borderaxespad=0.)
plt.savefig(result_path + "ddpg_curiosity_internal_motivation.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.mean[10], label="curiosity", color='deepskyblue')
plt.fill_between(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.lower[10], ddpg_curiosity_entropy.upper[10], color='deepskyblue', alpha=0.2)
plt.plot(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.mean[12], label="entropy", color='red')
plt.fill_between(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.lower[12], ddpg_curiosity_entropy.upper[12], color='red', alpha=0.2)
plt.legend(loc='upper right', borderaxespad=0.)
plt.savefig(result_path + "ddpg_entropy_internal_motivation.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.mean[9], label="RND loss", color='deepskyblue')
plt.fill_between(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.lower[9], ddpg_curiosity_entropy.upper[9], color='deepskyblue', alpha=0.2)
plt.plot(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.mean[11], label="AE loss", color='red')
plt.fill_between(ddpg_curiosity_entropy.mean[0], ddpg_curiosity_entropy.lower[11], ddpg_curiosity_entropy.upper[11], color='red', alpha=0.2)
plt.legend(loc='upper right', borderaxespad=0.)
plt.savefig(result_path + "ddpg_entropy_loss.png", dpi = 300)
| 43.51145
| 148
| 0.775965
| 878
| 5,700
| 4.826879
| 0.082005
| 0.184049
| 0.146295
| 0.118924
| 0.919538
| 0.918358
| 0.902076
| 0.853469
| 0.845446
| 0.605474
| 0
| 0.026632
| 0.051404
| 5,700
| 130
| 149
| 43.846154
| 0.757167
| 0
| 0
| 0.310345
| 0
| 0
| 0.307018
| 0.219825
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9f4848c1560d60cfc062921ccf0974516e2a51bd
| 70
|
py
|
Python
|
ufdl-image-segmentation-app/src/ufdl/image_segmentation_app/routers/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | null | null | null |
ufdl-image-segmentation-app/src/ufdl/image_segmentation_app/routers/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | 85
|
2020-07-24T00:04:28.000Z
|
2022-02-10T10:35:15.000Z
|
ufdl-image-segmentation-app/src/ufdl/image_segmentation_app/routers/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | null | null | null |
from ._UFDLImageSegmentationRouter import UFDLImageSegmentationRouter
| 35
| 69
| 0.928571
| 4
| 70
| 16
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 70
| 1
| 70
| 70
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9f53932b7e270495d0f06d5b24f7b598351eefde
| 27
|
py
|
Python
|
tensor_ops/__init__.py
|
Parskatt/tensor_ops
|
486d7acdce8aa2c962145c046295a14535375b1e
|
[
"MIT"
] | 1
|
2021-01-27T10:08:00.000Z
|
2021-01-27T10:08:00.000Z
|
tensor_ops/__init__.py
|
Parskatt/tensor_ops
|
486d7acdce8aa2c962145c046295a14535375b1e
|
[
"MIT"
] | null | null | null |
tensor_ops/__init__.py
|
Parskatt/tensor_ops
|
486d7acdce8aa2c962145c046295a14535375b1e
|
[
"MIT"
] | null | null | null |
from .tensors import Tensor
| 27
| 27
| 0.851852
| 4
| 27
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 27
| 1
| 27
| 27
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9f6fc7343aa8d3f1548dc1f730deab32f477d1ec
| 73
|
py
|
Python
|
src/feedingorcas/core/molecular/__init__.py
|
NiklasTiede/FeedingORCAs
|
6b4e7921cd56f5c921e5f1ffd7c370355479de5a
|
[
"MIT"
] | 2
|
2021-02-09T11:08:16.000Z
|
2021-06-13T10:48:19.000Z
|
src/feedingorcas/core/molecular/__init__.py
|
NiklasTiede/feedingORCAs
|
6b4e7921cd56f5c921e5f1ffd7c370355479de5a
|
[
"MIT"
] | null | null | null |
src/feedingorcas/core/molecular/__init__.py
|
NiklasTiede/feedingORCAs
|
6b4e7921cd56f5c921e5f1ffd7c370355479de5a
|
[
"MIT"
] | 1
|
2021-04-02T00:42:55.000Z
|
2021-04-02T00:42:55.000Z
|
from .atom import *
from .molecule import *
from .molecule_list import *
| 18.25
| 28
| 0.753425
| 10
| 73
| 5.4
| 0.5
| 0.37037
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 73
| 3
| 29
| 24.333333
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
9f8ee9d84c49603c5f778ffb93b0b13856ddfc63
| 39
|
py
|
Python
|
json_validator/__init__.py
|
sliwinski-milosz/json_validator
|
b56745d7e6415dcdf63e451b71baed6c88841e24
|
[
"MIT"
] | 1
|
2018-03-13T16:07:54.000Z
|
2018-03-13T16:07:54.000Z
|
json_validator/__init__.py
|
sliwinski-milosz/json_validator
|
b56745d7e6415dcdf63e451b71baed6c88841e24
|
[
"MIT"
] | null | null | null |
json_validator/__init__.py
|
sliwinski-milosz/json_validator
|
b56745d7e6415dcdf63e451b71baed6c88841e24
|
[
"MIT"
] | null | null | null |
from .validator import validate_params
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9faae15e08ea03dbe6d16b8f2ebb7332b4fee49d
| 52
|
py
|
Python
|
tools/PerformanceProfiler.pack/performance_profiler/util/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | 1
|
2021-03-28T02:05:53.000Z
|
2021-03-28T02:05:53.000Z
|
tools/PerformanceProfiler.pack/performance_profiler/util/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | 1
|
2021-04-15T11:47:09.000Z
|
2021-04-15T11:47:09.000Z
|
tools/PerformanceProfiler.pack/performance_profiler/util/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | null | null | null |
from .io import * # noqa
from .fs import * # noqa
| 17.333333
| 25
| 0.615385
| 8
| 52
| 4
| 0.625
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.269231
| 52
| 2
| 26
| 26
| 0.842105
| 0.173077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e2519421d8d8168c54df19ae16678f060f99c959
| 117
|
py
|
Python
|
assignment/wordCount.py
|
FireHead90544/cs-kvs-assignments
|
2eee321d26b8f42f45f4a17a5ba2c4bf7d3caf2a
|
[
"Unlicense"
] | null | null | null |
assignment/wordCount.py
|
FireHead90544/cs-kvs-assignments
|
2eee321d26b8f42f45f4a17a5ba2c4bf7d3caf2a
|
[
"Unlicense"
] | null | null | null |
assignment/wordCount.py
|
FireHead90544/cs-kvs-assignments
|
2eee321d26b8f42f45f4a17a5ba2c4bf7d3caf2a
|
[
"Unlicense"
] | null | null | null |
text = input("Enter Your Text: ").split(" ")
print(len(text))
# print(len(input("Enter Your Text: ").split(" ")))
| 29.25
| 51
| 0.598291
| 16
| 117
| 4.375
| 0.4375
| 0.285714
| 0.4
| 0.514286
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145299
| 117
| 4
| 51
| 29.25
| 0.7
| 0.418803
| 0
| 0
| 0
| 0
| 0.28125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
e29082f21b0dce515d22ddd2d7e3c2e4498c3f4a
| 34
|
py
|
Python
|
autodisc/classifier/__init__.py
|
flowersteam/holmes
|
e38fb8417ec56cfde8142eddd0f751e319e35d8c
|
[
"MIT"
] | 6
|
2020-12-19T00:16:16.000Z
|
2022-01-28T14:59:21.000Z
|
autodisc/classifier/__init__.py
|
Evolutionary-Intelligence/holmes
|
e38fb8417ec56cfde8142eddd0f751e319e35d8c
|
[
"MIT"
] | null | null | null |
autodisc/classifier/__init__.py
|
Evolutionary-Intelligence/holmes
|
e38fb8417ec56cfde8142eddd0f751e319e35d8c
|
[
"MIT"
] | 1
|
2021-05-24T14:58:26.000Z
|
2021-05-24T14:58:26.000Z
|
import autodisc.classifier.static
| 17
| 33
| 0.882353
| 4
| 34
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 34
| 1
| 34
| 34
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e294f802a02d00f82c7c400d7c02fd71ca3c1428
| 161
|
py
|
Python
|
release/scripts/presets/camera/GoPro_Hero3_Black.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 365
|
2015-02-10T15:10:55.000Z
|
2022-03-03T15:50:51.000Z
|
release/scripts/presets/camera/GoPro_Hero3_Black.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 45
|
2015-01-09T15:34:20.000Z
|
2021-10-05T14:44:23.000Z
|
release/scripts/presets/camera/GoPro_Hero3_Black.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 172
|
2015-01-25T15:16:53.000Z
|
2022-01-31T08:25:36.000Z
|
import bpy
bpy.context.camera.sensor_width = 6.16
bpy.context.camera.sensor_height = 4.62
bpy.context.camera.lens = 2.77
bpy.context.camera.sensor_fit = 'AUTO'
| 23
| 39
| 0.776398
| 28
| 161
| 4.357143
| 0.571429
| 0.327869
| 0.52459
| 0.540984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061644
| 0.093168
| 161
| 6
| 40
| 26.833333
| 0.773973
| 0
| 0
| 0
| 0
| 0
| 0.024845
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e2b088f8d78a565ca01a74aee5fb0c21d1de475e
| 1,351
|
py
|
Python
|
MicroPython_bin/modules/wget.py
|
holgerkenn/TheButtonProject
|
18cfbb37773c05a24a960b87c8677ec26d3160d6
|
[
"MIT"
] | null | null | null |
MicroPython_bin/modules/wget.py
|
holgerkenn/TheButtonProject
|
18cfbb37773c05a24a960b87c8677ec26d3160d6
|
[
"MIT"
] | null | null | null |
MicroPython_bin/modules/wget.py
|
holgerkenn/TheButtonProject
|
18cfbb37773c05a24a960b87c8677ec26d3160d6
|
[
"MIT"
] | null | null | null |
import usocket as socket
def get_file(url, file):
_, _, host, path = url.split('/', 3)
if ':' in host:
host, port = host.split(':', 1)
else:
port = 80
addr = socket.getaddrinfo(host, int(port))[0][-1]
s = socket.socket()
s.connect(addr)
s.send(bytes('GET /%s HTTP/1.0\r\nHost: %s\r\n\r\n' % (path, host), 'utf8'))
f = open(file, 'w')
while True:
datastr = s.readline()
if datastr != b'\r\n':
print(datastr)
else:
break
while True:
data = s.recv(100)
if data:
f.write(data)
else:
f.close()
break
def get(url):
_, _, host, path = url.split('/', 3)
if ':' in host:
host, port = host.split(':', 1)
else:
port = 80
addr = socket.getaddrinfo(host, int(port))[0][-1]
s = socket.socket()
s.connect(addr)
s.send(bytes('GET /%s HTTP/1.0\r\nHost: %s\r\n\r\n' % (path, host), 'utf8'))
while True:
datastr = s.readline()
if datastr != b'\r\n':
print(datastr)
else:
break
buffer = b''
while True:
data = s.recv(100)
if data:
buffer += data
else:
return buffer
break
| 23.293103
| 81
| 0.447816
| 171
| 1,351
| 3.508772
| 0.280702
| 0.02
| 0.036667
| 0.053333
| 0.8
| 0.8
| 0.8
| 0.8
| 0.71
| 0.71
| 0
| 0.029448
| 0.396743
| 1,351
| 57
| 82
| 23.701754
| 0.706748
| 0
| 0
| 0.816327
| 0
| 0.040816
| 0.073416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.020408
| 0
| 0.081633
| 0.040816
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e2b1e99151d4b72316d6bde3db06ac8c5b7aee96
| 174
|
py
|
Python
|
python/opscore/RO/Comm/__init__.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
python/opscore/RO/Comm/__init__.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T21:08:14.000Z
|
2021-08-17T21:08:14.000Z
|
python/opscore/RO/Comm/__init__.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
"""Event-based communication and timing
Generic contains some common classes that can work with either the Tk event loop or Twisted Framework.
"""
from .isAvailable import *
| 34.8
| 102
| 0.793103
| 25
| 174
| 5.52
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 174
| 5
| 103
| 34.8
| 0.932432
| 0.804598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2c3b10c8ee6e902275d9ea7701421b2bb1e33355
| 21
|
py
|
Python
|
casing/__init__.py
|
vincentBenet/casing
|
c9ab6216aab4c7411648b678be51d124255f9a21
|
[
"MIT"
] | 6
|
2021-03-14T13:00:27.000Z
|
2021-03-15T21:22:30.000Z
|
casing/__init__.py
|
vincentBenet/casing
|
c9ab6216aab4c7411648b678be51d124255f9a21
|
[
"MIT"
] | null | null | null |
casing/__init__.py
|
vincentBenet/casing
|
c9ab6216aab4c7411648b678be51d124255f9a21
|
[
"MIT"
] | null | null | null |
from .casing import *
| 21
| 21
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2c9618b1b3ff5d3133ae33e6cf3305b8f6db1296
| 3,622
|
py
|
Python
|
split.py
|
HieuNT1998/BraTS-DMFNet
|
c12806a7ae4cf886f379d712fc85cc6fc37bd9a7
|
[
"Apache-2.0"
] | null | null | null |
split.py
|
HieuNT1998/BraTS-DMFNet
|
c12806a7ae4cf886f379d712fc85cc6fc37bd9a7
|
[
"Apache-2.0"
] | null | null | null |
split.py
|
HieuNT1998/BraTS-DMFNet
|
c12806a7ae4cf886f379d712fc85cc6fc37bd9a7
|
[
"Apache-2.0"
] | null | null | null |
# """
# The code will split the training set into k-fold for cross-validation
# """
# import os
# import numpy as np
# from sklearn.model_selection import StratifiedKFold
# root = './data/2018/MICCAI_BraTS_2018_Data_Training'
# valid_data_dir = './data/2018/MICCAI_BraTS_2018_Data_Validation'
# def write(data, fname, root=root):
# fname = os.path.join(root, fname)
# with open(fname, 'w') as f:
# f.write('\n'.join(data))
# hgg = os.listdir(os.path.join(root, 'HGG'))
# hgg = [os.path.join('HGG', f) for f in hgg]
# lgg = os.listdir(os.path.join(root, 'LGG'))
# lgg = [os.path.join('LGG', f) for f in lgg]
# X = hgg + lgg
# Y = [1] * len(hgg) + [0] * len(lgg)
# write(X, 'all.txt')
# X, Y = np.array(X), np.array(Y)
# skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=2018)
# for k, (train_index, valid_index) in enumerate(skf.split(Y, Y)):
# train_list = list(X[train_index])
# valid_list = list(X[valid_index])
# write(train_list, 'train_{}.txt'.format(k))
# write(valid_list, 'valid_{}.txt'.format(k))
# valid = os.listdir(os.path.join(valid_data_dir))
# valid = [f for f in valid if not (f.endswith('.csv') or f.endswith('.txt'))]
# write(valid, 'valid.txt', root=valid_data_dir)
"""
The code will split the training set into k-fold for cross-validation
"""
import os
import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
import shutil
root = './data/2018/MICCAI_BraTS_2018_Data_Training'
valid_data_dir = './data/2018/MICCAI_BraTS_2018_Data_Validation'
backup = './2018/datasets'
backup_files = os.listdir(backup)
if len(backup_files) != 0:
print("Copy from backup")
for file in backup_files:
shutil.copy(os.path.join(backup, file), os.path.join(root, file))
count=0
with open(os.path.join(root, file), 'r') as f:
for line in f:
count += 1
print("File {} has {} lines.".format(file, count))
sys.exit()
def write(data, fname, root=root):
fname = os.path.join(root, fname)
with open(fname, 'w') as f:
f.write('\n'.join(data))
limit = float(sys.argv[1])
hgg = os.listdir(os.path.join(root, 'HGG'))
hgg = [os.path.join('HGG', f) for f in hgg]
lgg = os.listdir(os.path.join(root, 'LGG'))
lgg = [os.path.join('LGG', f) for f in lgg]
print("Original size: HGG:{}, LGG:{}, Total:{}".format(len(hgg), len(lgg), len(hgg) + len(lgg)))
hgg = hgg[:int(limit*len(hgg))]
lgg = lgg[:int(limit*len(lgg))]
print("Limited size: HGG:{}, LGG:{}, Total:{}".format(len(hgg), len(lgg), len(hgg) + len(lgg)))
X = hgg + lgg
Y = [1] * len(hgg) + [0] * len(lgg)
write(X, 'all.txt')
shutil.copy(os.path.join(root,'all.txt'), os.path.join(backup, 'all.txt'))
X, Y = np.array(X), np.array(Y)
skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=2018)
for k, (train_index, valid_index) in enumerate(skf.split(Y, Y)):
train_list = list(X[train_index])
valid_list = list(X[valid_index])
write(train_list, 'train_{}.txt'.format(k))
write(valid_list, 'valid_{}.txt'.format(k))
shutil.copy(os.path.join(root,'train_{}.txt'.format(k)),
os.path.join(backup, 'train_{}.txt'.format(k)))
shutil.copy(os.path.join(root,'valid_{}.txt'.format(k)),
os.path.join(backup, 'valid_{}.txt'.format(k)))
valid = os.listdir(os.path.join(valid_data_dir))
valid = [f for f in valid if not (f.endswith('.csv') or f.endswith('.txt'))]
write(valid, 'valid.txt', root=valid_data_dir)
| 32.339286
| 97
| 0.618995
| 570
| 3,622
| 3.826316
| 0.157895
| 0.057772
| 0.096286
| 0.07061
| 0.854195
| 0.828519
| 0.817515
| 0.793673
| 0.793673
| 0.767079
| 0
| 0.018557
| 0.196576
| 3,622
| 112
| 98
| 32.339286
| 0.730928
| 0.342352
| 0
| 0
| 0
| 0
| 0.153399
| 0.039356
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.1
| 0
| 0.12
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2ca84fdb3018a582d50c2f7e629c75366083e5de
| 9,101
|
py
|
Python
|
cpdb/trr/tests/views/test_trr_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 25
|
2018-07-20T22:31:40.000Z
|
2021-07-15T16:58:41.000Z
|
cpdb/trr/tests/views/test_trr_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 13
|
2018-06-18T23:08:47.000Z
|
2022-02-10T07:38:25.000Z
|
cpdb/trr/tests/views/test_trr_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 6
|
2018-05-17T21:59:43.000Z
|
2020-11-17T00:30:26.000Z
|
from datetime import datetime, date
from django.urls import reverse
from django.contrib.gis.geos import Point
from rest_framework.test import APITestCase
from rest_framework import status
from robber import expect
import pytz
from data.factories import PoliceUnitFactory, OfficerFactory, OfficerHistoryFactory, OfficerAllegationFactory
from email_service.constants import TRR_ATTACHMENT_REQUEST
from email_service.factories import EmailTemplateFactory
from trr.factories import TRRFactory, ActionResponseFactory
from trr.tests.mixins import TRRTestCaseMixin
class TRRViewSetTestCase(TRRTestCaseMixin, APITestCase):
def test_retrieve(self):
unit = PoliceUnitFactory(unit_name='001', description='Unit 001')
officer = OfficerFactory(
first_name='Vinh',
last_name='Vu',
rank='Detective',
race='White',
gender='M',
appointed_date=date(2000, 1, 1),
birth_year=1980,
complaint_percentile=44.4444,
civilian_allegation_percentile=11.1111,
internal_allegation_percentile=22.2222,
trr_percentile=33.3333,
last_unit=unit
)
OfficerHistoryFactory(officer=officer, unit=unit)
trr = TRRFactory(
taser=False,
firearm_used=False,
officer_assigned_beat='Beat 1',
officer_in_uniform=True,
officer_on_duty=False,
trr_datetime=datetime(2001, 1, 1, tzinfo=pytz.utc),
subject_gender='M',
subject_age=37,
officer=officer,
location_recode='Factory',
block='34XX',
street='Douglas Blvd',
beat=1021,
point=Point(1.0, 1.0)
)
OfficerAllegationFactory(
officer=officer,
allegation__incident_date=datetime(2003, 1, 1, tzinfo=pytz.utc),
start_date=date(2004, 1, 1),
end_date=date(2005, 1, 1),
final_finding='SU'
)
ActionResponseFactory(trr=trr, force_type='Verbal Commands', action_sub_category='1')
self.refresh_index()
response = self.client.get(reverse('api-v2:trr-detail', kwargs={'pk': trr.id}))
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(response.data).to.eq({
'id': trr.id,
'officer_assigned_beat': 'Beat 1',
'officer_in_uniform': True,
'officer_on_duty': False,
'officer': {
'id': officer.id,
'rank': 'Detective',
'gender': 'Male',
'race': 'White',
'full_name': 'Vinh Vu',
'appointed_date': '2000-01-01',
'unit': {'unit_name': '001', 'description': 'Unit 001'},
'birth_year': 1980,
'percentile_trr': '33.3333',
'percentile_allegation_internal': '22.2222',
'percentile_allegation_civilian': '11.1111',
'percentile_allegation': '44.4444',
},
'subject_race': 'White',
'subject_gender': 'Male',
'subject_age': 37,
'force_category': 'Other',
'force_types': ['Verbal Commands'],
'date_of_incident': '2001-01-01',
'location_type': 'Factory',
'address': '34XX Douglas Blvd',
'beat': 1021,
'point': {
'lng': 1.0,
'lat': 1.0,
},
})
def test_retrieve_no_point(self):
unit = PoliceUnitFactory(unit_name='001', description='Unit 001')
officer = OfficerFactory(
first_name='Vinh',
last_name='Vu',
race='White',
gender='M',
rank='Detective',
appointed_date=date(2000, 1, 1),
birth_year=1980,
complaint_percentile=44.4444,
civilian_allegation_percentile=11.1111,
internal_allegation_percentile=22.2222,
trr_percentile=33.3333,
last_unit=unit
)
OfficerHistoryFactory(officer=officer, unit=unit)
trr = TRRFactory(
taser=False,
firearm_used=False,
officer_assigned_beat='Beat 1',
officer_in_uniform=True,
officer_on_duty=False,
trr_datetime=datetime(2001, 1, 1, tzinfo=pytz.utc),
subject_gender='M',
subject_age=37,
officer=officer,
location_recode='Factory',
block='34XX',
street='Douglas Blvd',
beat=1021,
)
OfficerAllegationFactory(
officer=officer,
allegation__incident_date=datetime(2003, 1, 1, tzinfo=pytz.utc),
start_date=date(2004, 1, 1),
end_date=date(2005, 1, 1), final_finding='SU')
ActionResponseFactory(trr=trr, force_type='Verbal Commands', action_sub_category=1)
self.refresh_index()
response = self.client.get(reverse('api-v2:trr-detail', kwargs={'pk': trr.id}))
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(response.data).to.eq({
'id': trr.id,
'officer_assigned_beat': 'Beat 1',
'officer_in_uniform': True,
'officer_on_duty': False,
'officer': {
'id': officer.id,
'rank': 'Detective',
'gender': 'Male',
'race': 'White',
'full_name': 'Vinh Vu',
'appointed_date': '2000-01-01',
'unit': {'unit_name': '001', 'description': 'Unit 001'},
'birth_year': 1980,
'percentile_trr': '33.3333',
'percentile_allegation_internal': '22.2222',
'percentile_allegation_civilian': '11.1111',
'percentile_allegation': '44.4444',
},
'subject_race': 'White',
'subject_gender': 'Male',
'subject_age': 37,
'force_category': 'Other',
'force_types': ['Verbal Commands'],
'date_of_incident': '2001-01-01',
'location_type': 'Factory',
'address': '34XX Douglas Blvd',
'beat': 1021,
})
def test_retrieve_not_found(self):
response = self.client.get(reverse('api-v2:trr-detail', kwargs={'pk': 123}))
expect(response.status_code).to.eq(status.HTTP_404_NOT_FOUND)
def test_retrieve_missing_percentile(self):
officer = OfficerFactory(
civilian_allegation_percentile=None,
internal_allegation_percentile=None,
trr_percentile=None
)
trr = TRRFactory(officer=officer)
self.refresh_index()
response = self.client.get(reverse('api-v2:trr-detail', kwargs={'pk': trr.id}))
expect(response.status_code).to.eq(status.HTTP_200_OK)
def test_request_document(self):
EmailTemplateFactory(type=TRR_ATTACHMENT_REQUEST)
TRRFactory(pk=112233)
response = self.client.post(
reverse('api-v2:trr-request-document', kwargs={'pk': 112233}),
{'email': 'valid_email@example.com'}
)
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(response.data).to.eq({
'message': 'Thanks for subscribing',
'trr_id': 112233
})
def test_request_same_document_twice(self):
EmailTemplateFactory(type=TRR_ATTACHMENT_REQUEST)
trr = TRRFactory(pk=112233)
self.client.post(
reverse('api-v2:trr-request-document', kwargs={'pk': trr.id}),
{'email': 'valid_email@example.com'}
)
response2 = self.client.post(
reverse('api-v2:trr-request-document', kwargs={'pk': trr.id}),
{'email': 'valid_email@example.com'}
)
expect(response2.status_code).to.eq(status.HTTP_200_OK)
expect(response2.data).to.eq({
'message': 'Email already added',
'trr_id': 112233
})
def test_request_document_without_email(self):
TRRFactory(pk=321)
response = self.client.post(reverse('api-v2:trr-request-document', kwargs={'pk': 321}))
expect(response.status_code).to.eq(status.HTTP_400_BAD_REQUEST)
expect(response.data).to.eq({
'message': 'Please enter a valid email'
})
def test_request_document_with_invalid_email(self):
TRRFactory(pk=321)
response = self.client.post(reverse('api-v2:trr-request-document', kwargs={'pk': 321}),
{'email': 'invalid@email'})
expect(response.status_code).to.eq(status.HTTP_400_BAD_REQUEST)
expect(response.data).to.eq({
'message': 'Please enter a valid email'
})
def test_request_document_with_invalid_trr(self):
response = self.client.post(reverse('api-v2:trr-request-document', kwargs={'pk': 321}))
expect(response.status_code).to.eq(status.HTTP_404_NOT_FOUND)
| 37.607438
| 109
| 0.573893
| 963
| 9,101
| 5.220145
| 0.182762
| 0.011936
| 0.023871
| 0.029839
| 0.791924
| 0.789934
| 0.753332
| 0.753332
| 0.753332
| 0.74637
| 0
| 0.057089
| 0.303263
| 9,101
| 241
| 110
| 37.763485
| 0.735688
| 0
| 0
| 0.726027
| 0
| 0
| 0.176574
| 0.047797
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041096
| false
| 0
| 0.054795
| 0
| 0.100457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e2e4548dab2a879d64062e71c55d52010a2db898
| 49
|
py
|
Python
|
examples/applications/notebook/nl4dv/visgenie/__init__.py
|
bopan3/nl4dv_modified
|
4c2669e199608ad44d79e149af982a9f14ec3f36
|
[
"MIT"
] | 58
|
2020-07-10T01:40:35.000Z
|
2022-03-09T20:31:26.000Z
|
examples/applications/notebook/nl4dv/visgenie/__init__.py
|
bopan3/nl4dv_modified
|
4c2669e199608ad44d79e149af982a9f14ec3f36
|
[
"MIT"
] | 11
|
2020-07-20T14:15:41.000Z
|
2022-03-10T10:47:20.000Z
|
examples/applications/notebook/nl4dv/visgenie/__init__.py
|
bopan3/nl4dv_modified
|
4c2669e199608ad44d79e149af982a9f14ec3f36
|
[
"MIT"
] | 15
|
2020-08-11T23:20:16.000Z
|
2022-03-30T02:55:05.000Z
|
from .visgenie import *
from .vis_recos import *
| 16.333333
| 24
| 0.755102
| 7
| 49
| 5.142857
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 25
| 24.5
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e2efe71d9dfec9231bd4b2ac61e975cccead0e4e
| 1,003
|
py
|
Python
|
test/common_ip_test.py
|
zhd785576549/py_re_collect
|
a79cd18b5c9def0cbd95b8d1b809e8c0c56cb302
|
[
"MIT"
] | 1
|
2019-10-10T07:00:55.000Z
|
2019-10-10T07:00:55.000Z
|
test/common_ip_test.py
|
zhd785576549/py_re_collect
|
a79cd18b5c9def0cbd95b8d1b809e8c0c56cb302
|
[
"MIT"
] | null | null | null |
test/common_ip_test.py
|
zhd785576549/py_re_collect
|
a79cd18b5c9def0cbd95b8d1b809e8c0c56cb302
|
[
"MIT"
] | 1
|
2019-10-10T07:00:56.000Z
|
2019-10-10T07:00:56.000Z
|
from py_utils.validator.ip_address import common
ipv4_list = [
"113.194.30.119",
"120.83.105.162",
"49.70.89.67",
"1.197.204.72",
"0.0.0.0",
"255.255.255.255",
"127.0.0.1"
]
def test_ipv4():
for i in ipv4_list:
valid = common.ipv4_valid(i)
if valid is False:
print("IPv4 address : {} unpass".format(i))
assert valid == True
ipv6_list = [
"0000:0000:0000:0000:0000:0000:0000:0000",
"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
"1050:0000:0000:0000:0005:0600:300c:326b",
"1050:0:0:0:5:600:300c:326b",
"2001:0db8:85a3:08d3:1319:8a2e:0370:7344",
"2001:0db8:85a3:0000:1319:8a2e:0370:7344",
"2001:0DB8:0000:0000:0000:0000:1428:57ab",
"2001:0DB8:0:0:0:0:1428:57ab",
"0000:0000:0000:0000:0000:0000:874B:2B34",
]
def test_ipv6():
for i in ipv6_list:
valid = common.ipv6_valid(i)
if valid is False:
print("IPv6 address : {} unpass".format(i))
assert valid == True
| 24.463415
| 55
| 0.597208
| 164
| 1,003
| 3.591463
| 0.396341
| 0.2309
| 0.264856
| 0.244482
| 0.434635
| 0.434635
| 0.312394
| 0
| 0
| 0
| 0
| 0.355128
| 0.222333
| 1,003
| 40
| 56
| 25.075
| 0.4
| 0
| 0
| 0.121212
| 0
| 0
| 0.454636
| 0.325025
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.060606
| false
| 0.060606
| 0.030303
| 0
| 0.090909
| 0.060606
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
39072d19a8409558470697f9f2f6d980ba0c7bad
| 220
|
py
|
Python
|
sites/users/tables.py
|
bluebirdio/improbable-sites
|
faeb00a37f3993c93ea9cf45d43258705d7afc22
|
[
"MIT"
] | null | null | null |
sites/users/tables.py
|
bluebirdio/improbable-sites
|
faeb00a37f3993c93ea9cf45d43258705d7afc22
|
[
"MIT"
] | 1
|
2020-07-29T19:07:25.000Z
|
2020-07-29T19:07:25.000Z
|
sites/users/tables.py
|
bluebirdio/samey-sites
|
faeb00a37f3993c93ea9cf45d43258705d7afc22
|
[
"MIT"
] | 1
|
2020-03-25T22:36:48.000Z
|
2020-03-25T22:36:48.000Z
|
from samey.tables import *
class User(TextIdentified, SameyTable):
first_name = Column(String(30), nullable=False)
last_name = Column(String(30), nullable=False)
email = Column(String(254), nullable=False)
| 27.5
| 51
| 0.727273
| 28
| 220
| 5.642857
| 0.642857
| 0.227848
| 0.202532
| 0.227848
| 0.392405
| 0.392405
| 0
| 0
| 0
| 0
| 0
| 0.037433
| 0.15
| 220
| 7
| 52
| 31.428571
| 0.807487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
392b3d437a6c840356504e3a1db8c91b2eb32cce
| 793
|
py
|
Python
|
src/keras/keras/applications/densenet.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
src/keras/keras/applications/densenet.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
src/keras/keras/applications/densenet.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import densenet
from . import keras_modules_injection
@keras_modules_injection
def DenseNet121(*args, **kwargs):
return densenet.DenseNet121(*args, **kwargs)
@keras_modules_injection
def DenseNet169(*args, **kwargs):
return densenet.DenseNet169(*args, **kwargs)
@keras_modules_injection
def DenseNet201(*args, **kwargs):
return densenet.DenseNet201(*args, **kwargs)
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return densenet.decode_predictions(*args, **kwargs)
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return densenet.preprocess_input(*args, **kwargs)
| 24.78125
| 56
| 0.75662
| 88
| 793
| 6.465909
| 0.261364
| 0.175747
| 0.221441
| 0.210896
| 0.239016
| 0.239016
| 0
| 0
| 0
| 0
| 0
| 0.026627
| 0.147541
| 793
| 31
| 57
| 25.580645
| 0.815089
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0.05
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1a572b05ba443e1e9f5b7fc494b43cb4aadd93f7
| 12,537
|
py
|
Python
|
Tests/PythonTests/test_vertex_centered_scalar_grid.py
|
ADMTec/CubbyFlow
|
c71457fd04ccfaf3ef22772bab9bcec4a0a3b611
|
[
"MIT"
] | 216
|
2017-01-25T04:34:30.000Z
|
2021-07-15T12:36:06.000Z
|
Tests/PythonTests/test_vertex_centered_scalar_grid.py
|
ADMTec/CubbyFlow
|
c71457fd04ccfaf3ef22772bab9bcec4a0a3b611
|
[
"MIT"
] | 323
|
2017-01-26T13:53:13.000Z
|
2021-07-14T16:03:38.000Z
|
Tests/PythonTests/test_vertex_centered_scalar_grid.py
|
ADMTec/CubbyFlow
|
c71457fd04ccfaf3ef22772bab9bcec4a0a3b611
|
[
"MIT"
] | 33
|
2017-01-25T05:05:49.000Z
|
2021-06-17T17:30:56.000Z
|
import numpy as np
import pyCubbyFlow
from pytest import approx
from pytest_utils import *
cnt = 0
def test_grid2():
global cnt
a = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
gridSpacing=(1, 2),
gridOrigin=(7, 5))
assert a.resolution == (3, 4)
assert_vector_similar(a.gridOrigin, (7, 5))
assert_vector_similar(a.gridSpacing, (1, 2))
assert_bounding_box_similar(
a.boundingBox, pyCubbyFlow.BoundingBox2D((7, 5), (10, 13)))
f = a.cellCenterPosition
assert_vector_similar(f(0, 0), (7.5, 6))
b = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
gridSpacing=(1, 2),
gridOrigin=(7, 5))
assert a.HasSameShape(b)
def func(idx):
global cnt
assert idx[0] >= 0 and idx[0] < 3
assert idx[1] >= 0 and idx[1] < 4
cnt += 1
cnt = 0
a.ForEachCellIndex(func)
assert cnt == 12
def test_scalar_grid2():
global cnt
a = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
gridSpacing=(1, 2),
gridOrigin=(7, 5))
a.Resize(resolution=(12, 7),
gridSpacing=(3, 4),
gridOrigin=(9, 2))
assert a.resolution == (12, 7)
assert_vector_similar(a.gridOrigin, (9, 2))
assert_vector_similar(a.gridSpacing, (3, 4))
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == 0.0
a[5, 6] = 17.0
assert a[5, 6] == 17.0
a.Fill(42.0)
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == 42.0
def func(pt):
return pt.x ** 2 + pt.y ** 2
a.Fill(func)
pos = a.DataPosition()
acc = np.array(a.DataView(), copy=False)
for j in range(a.resolution.y):
for i in range(a.resolution.x):
pt = pos(i, j)
assert func(pt) == a[i, j]
assert func(pt) == approx(a.Sample(pt))
assert acc[j, i] == a[i, j]
# Can't compare to analytic solution because FDM with such a coarse
# grid will return inaccurate results by design.
assert_vector_similar(a.GradientAtDataPoint((i, j)), a.Gradient(pt))
assert a.LaplacianAtDataPoint((i, j)) == a.Laplacian(pt)
def func(idx):
global cnt
assert idx[0] >= 0 and idx[0] < a.resolution.x + 1
assert idx[1] >= 0 and idx[1] < a.resolution.y + 1
cnt += 1
cnt = 0
a.ForEachDataPointIndex(func)
assert cnt == (a.resolution.x + 1) * (a.resolution.y + 1)
blob = a.Serialize()
b = pyCubbyFlow.VertexCenteredScalarGrid2()
b.Deserialize(blob)
assert b.resolution == (12, 7)
assert_vector_similar(b.gridOrigin, (9, 2))
assert_vector_similar(b.gridSpacing, (3, 4))
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == b[i, j]
def test_cell_centered_scalar_grid2():
# CTOR
a = pyCubbyFlow.VertexCenteredScalarGrid2()
assert a.resolution == (1, 1)
assert_vector_similar(a.gridOrigin, (0.0, 0.0))
assert_vector_similar(a.gridSpacing, (1.0, 1.0))
a = pyCubbyFlow.VertexCenteredScalarGrid2((3, 4), (1, 2), (7, 5))
assert a.resolution == (3, 4)
assert_vector_similar(a.gridOrigin, (7, 5))
assert_vector_similar(a.gridSpacing, (1, 2))
a = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
gridSpacing=(1, 2),
gridOrigin=(7, 5))
assert a.resolution == (3, 4)
assert_vector_similar(a.gridOrigin, (7, 5))
assert_vector_similar(a.gridSpacing, (1, 2))
a = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
domainSizeX=12.0,
gridOrigin=(7, 5))
assert a.resolution == (3, 4)
assert_vector_similar(a.gridOrigin, (7, 5))
assert_vector_similar(a.gridSpacing, (4, 4))
# Properties
a = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(3, 4),
gridSpacing=(1, 2),
gridOrigin=(7, 5))
assert_vector_similar(a.dataSize, (4, 5))
assert_vector_similar(a.dataOrigin, (7, 5))
# Modifiers
b = pyCubbyFlow.VertexCenteredScalarGrid2(resolution=(6, 3),
gridSpacing=(5, 9),
gridOrigin=(1, 2))
a.Fill(42.0)
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == 42.0
a.Swap(b)
assert a.resolution == (6, 3)
assert_vector_similar(a.gridOrigin, (1, 2))
assert_vector_similar(a.gridSpacing, (5, 9))
assert b.resolution == (3, 4)
assert_vector_similar(b.gridOrigin, (7, 5))
assert_vector_similar(b.gridSpacing, (1, 2))
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == 0.0
for j in range(b.resolution.y):
for i in range(b.resolution.x):
assert b[i, j] == 42.0
a.Set(b)
assert a.resolution == (3, 4)
assert_vector_similar(a.gridOrigin, (7, 5))
assert_vector_similar(a.gridSpacing, (1, 2))
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j] == 42.0
c = a.Clone()
assert c.resolution == (3, 4)
assert_vector_similar(c.gridOrigin, (7, 5))
assert_vector_similar(c.gridSpacing, (1, 2))
for j in range(c.resolution.y):
for i in range(c.resolution.x):
assert c[i, j] == 42.0
# ------------------------------------------------------------------------------
def test_grid3():
global cnt
a = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
gridSpacing=(1, 2, 3),
gridOrigin=(7, 5, 3))
assert a.resolution == (3, 4, 5)
assert_vector_similar(a.gridOrigin, (7, 5, 3))
assert_vector_similar(a.gridSpacing, (1, 2, 3))
assert_bounding_box_similar(
a.boundingBox, pyCubbyFlow.BoundingBox3D((7, 5, 3), (10, 13, 18)))
f = a.cellCenterPosition
assert_vector_similar(f(0, 0, 0), (7.5, 6, 4.5))
b = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
gridSpacing=(1, 2, 3),
gridOrigin=(7, 5, 3))
assert a.HasSameShape(b)
def func(idx):
global cnt
assert idx[0] >= 0 and idx[0] < 3
assert idx[1] >= 0 and idx[1] < 4
assert idx[2] >= 0 and idx[2] < 5
cnt += 1
cnt = 0
a.ForEachCellIndex(func)
assert cnt == 60
def test_scalar_grid3():
global cnt
a = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
gridSpacing=(1, 2, 3),
gridOrigin=(7, 5, 3))
a.Resize(resolution=(12, 7, 2),
gridSpacing=(3, 4, 5),
gridOrigin=(9, 2, 5))
assert a.resolution == (12, 7, 2)
assert_vector_similar(a.gridOrigin, (9, 2, 5))
assert_vector_similar(a.gridSpacing, (3, 4, 5))
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == 0.0
a[5, 6, 1] = 17.0
assert a[5, 6, 1] == 17.0
a.Fill(42.0)
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == 42.0
def func(pt):
return pt.x ** 2 + pt.y ** 2 + pt.z ** 2
a.Fill(func)
pos = a.DataPosition()
acc = np.array(a.DataView(), copy=False)
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
pt = pos(i, j, k)
assert func(pt) == a[i, j, k]
assert func(pt) == approx(a.Sample(pt))
assert acc[k, j, i] == a[i, j, k]
# Can't compare to analytic solution because FDM with such a
# coarse grid will return inaccurate results by design.
assert_vector_similar(
a.GradientAtDataPoint((i, j, k)), a.Gradient(pt))
assert a.LaplacianAtDataPoint((i, j, k)) == a.Laplacian(pt)
def func(idx):
global cnt
assert idx[0] >= 0 and idx[0] < a.resolution.x
assert idx[1] >= 0 and idx[1] < a.resolution.y
assert idx[2] >= 0 and idx[2] < a.resolution.z
cnt += 1
cnt = 0
a.ForEachDataPointIndex(func)
assert cnt == a.resolution.x * a.resolution.y * a.resolution.z
blob = a.Serialize()
b = pyCubbyFlow.CellCenteredScalarGrid3()
b.Deserialize(blob)
assert b.resolution == (12, 7, 2)
assert_vector_similar(b.gridOrigin, (9, 2, 5))
assert_vector_similar(b.gridSpacing, (3, 4, 5))
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == b[i, j, k]
def test_cell_centered_scalar_grid3():
# CTOR
a = pyCubbyFlow.CellCenteredScalarGrid3()
assert a.resolution == (1, 1, 1)
assert_vector_similar(a.gridOrigin, (0.0, 0.0, 0.0))
assert_vector_similar(a.gridSpacing, (1.0, 1.0, 1.0))
a = pyCubbyFlow.CellCenteredScalarGrid3((3, 4, 5), (1, 2, 3), (7, 5, 2))
assert a.resolution == (3, 4, 5)
assert_vector_similar(a.gridOrigin, (7, 5, 2))
assert_vector_similar(a.gridSpacing, (1, 2, 3))
a = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
gridSpacing=(1, 2, 3),
gridOrigin=(7, 5, 2))
assert a.resolution == (3, 4, 5)
assert_vector_similar(a.gridOrigin, (7, 5, 2))
assert_vector_similar(a.gridSpacing, (1, 2, 3))
a = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
domainSizeX=12.0,
gridOrigin=(7, 5, 2))
assert a.resolution == (3, 4, 5)
assert_vector_similar(a.gridOrigin, (7, 5, 2))
assert_vector_similar(a.gridSpacing, (4, 4, 4))
# Properties
a = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(3, 4, 5),
gridSpacing=(1, 2, 3),
gridOrigin=(7, 5, 2))
assert_vector_similar(a.dataSize, (3, 4, 5))
assert_vector_similar(a.dataOrigin, (7.5, 6, 3.5))
# Modifiers
b = pyCubbyFlow.CellCenteredScalarGrid3(resolution=(6, 3, 7),
gridSpacing=(5, 9, 3),
gridOrigin=(1, 2, 8))
a.Fill(42.0)
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == 42.0
a.Swap(b)
assert a.resolution == (6, 3, 7)
assert_vector_similar(a.gridOrigin, (1, 2, 8))
assert_vector_similar(a.gridSpacing, (5, 9, 3))
assert b.resolution == (3, 4, 5)
assert_vector_similar(b.gridOrigin, (7, 5, 2))
assert_vector_similar(b.gridSpacing, (1, 2, 3))
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == 0.0
for k in range(b.resolution.z):
for j in range(b.resolution.y):
for i in range(b.resolution.x):
assert b[i, j, k] == 42.0
a.Set(b)
assert a.resolution == (3, 4, 5)
assert_vector_similar(a.gridOrigin, (7, 5, 2))
assert_vector_similar(a.gridSpacing, (1, 2, 3))
for k in range(a.resolution.z):
for j in range(a.resolution.y):
for i in range(a.resolution.x):
assert a[i, j, k] == 42.0
c = a.Clone()
assert c.resolution == (3, 4, 5)
assert_vector_similar(c.gridOrigin, (7, 5, 2))
assert_vector_similar(c.gridSpacing, (1, 2, 3))
for k in range(c.resolution.z):
for j in range(c.resolution.y):
for i in range(c.resolution.x):
assert c[i, j, k] == 42.0
| 35.616477
| 80
| 0.532344
| 1,692
| 12,537
| 3.870567
| 0.066785
| 0.102458
| 0.150863
| 0.116048
| 0.90426
| 0.856619
| 0.831577
| 0.717514
| 0.683158
| 0.637349
| 0
| 0.060929
| 0.327112
| 12,537
| 351
| 81
| 35.717949
| 0.715386
| 0.028396
| 0
| 0.607018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.414035
| 1
| 0.042105
| false
| 0
| 0.014035
| 0.007018
| 0.063158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
46bfd6aef0edb879ce2119136396ba62e4c911ee
| 1,620
|
py
|
Python
|
iati/using_data/migrations/0003_auto_20180828_1740.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 4
|
2019-03-28T06:42:17.000Z
|
2021-06-06T13:10:51.000Z
|
iati/using_data/migrations/0003_auto_20180828_1740.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 177
|
2018-09-28T14:21:56.000Z
|
2022-03-30T21:45:26.000Z
|
iati/using_data/migrations/0003_auto_20180828_1740.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 8
|
2018-10-25T20:43:10.000Z
|
2022-03-17T14:19:27.000Z
|
# Generated by Django 2.0.5 on 2018-08-28 17:40
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('using_data', '0002_toolsindexpage_toolspage'),
]
operations = [
migrations.AddField(
model_name='toolsindexpage',
name='tool_box_editor',
field=wagtail.core.fields.StreamField((('tool_box_text', wagtail.core.blocks.RichTextBlock(required=False)),), blank=True, null=True),
),
migrations.AddField(
model_name='toolsindexpage',
name='tool_box_editor_en',
field=wagtail.core.fields.StreamField((('tool_box_text', wagtail.core.blocks.RichTextBlock(required=False)),), blank=True, null=True),
),
migrations.AddField(
model_name='toolsindexpage',
name='tool_box_editor_es',
field=wagtail.core.fields.StreamField((('tool_box_text', wagtail.core.blocks.RichTextBlock(required=False)),), blank=True, null=True),
),
migrations.AddField(
model_name='toolsindexpage',
name='tool_box_editor_fr',
field=wagtail.core.fields.StreamField((('tool_box_text', wagtail.core.blocks.RichTextBlock(required=False)),), blank=True, null=True),
),
migrations.AddField(
model_name='toolsindexpage',
name='tool_box_editor_pt',
field=wagtail.core.fields.StreamField((('tool_box_text', wagtail.core.blocks.RichTextBlock(required=False)),), blank=True, null=True),
),
]
| 39.512195
| 146
| 0.647531
| 175
| 1,620
| 5.811429
| 0.274286
| 0.129794
| 0.100295
| 0.132743
| 0.79646
| 0.79646
| 0.79646
| 0.79646
| 0.79646
| 0.73943
| 0
| 0.015044
| 0.22037
| 1,620
| 40
| 147
| 40.5
| 0.790182
| 0.027778
| 0
| 0.588235
| 1
| 0
| 0.165925
| 0.018436
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.088235
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
46c49651eb6871465ba385d32daa1ed9d0116177
| 109
|
py
|
Python
|
amcp_pylib/response/types/client_error_response.py
|
dolejska-daniel/amcp-pylib
|
3cccfcf263fce938de942a80e07375b17c5f82f9
|
[
"MIT"
] | 5
|
2020-05-14T18:35:35.000Z
|
2021-08-10T12:29:36.000Z
|
amcp_pylib/response/types/client_error_response.py
|
dolejska-daniel/amcp-pylib
|
3cccfcf263fce938de942a80e07375b17c5f82f9
|
[
"MIT"
] | 3
|
2020-10-14T16:59:38.000Z
|
2021-04-30T15:33:24.000Z
|
amcp_pylib/response/types/client_error_response.py
|
dolejska-daniel/amcp-pylib
|
3cccfcf263fce938de942a80e07375b17c5f82f9
|
[
"MIT"
] | null | null | null |
from ..response_base import ResponseBase as ResponseBase
class ClientErrorResponse(ResponseBase):
pass
| 18.166667
| 56
| 0.816514
| 11
| 109
| 8
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137615
| 109
| 5
| 57
| 21.8
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
201a59b9e6be80741b72889dbd59eec1368585e9
| 87,170
|
py
|
Python
|
sim.py
|
banerjeeutsav/sapphire_sim
|
85b96ef353a6135c96835841bf539de7df086f43
|
[
"MIT"
] | 4
|
2020-03-09T06:05:27.000Z
|
2021-09-17T06:49:06.000Z
|
sim.py
|
banerjeeutsav/sapphire_sim
|
85b96ef353a6135c96835841bf539de7df086f43
|
[
"MIT"
] | null | null | null |
sim.py
|
banerjeeutsav/sapphire_sim
|
85b96ef353a6135c96835841bf539de7df086f43
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python
###################################################################################################
#
# Python Simulator for Sapphire Lattice Crypto-Processor
#
# Author: Utsav Banerjee
# Last Modified: 25-Nov-2019
#
# Inputs: Parameters (n,q), Operating Conditions, Program, Simulation Options
# Outputs: Instruction Count, Cycle Count, Total Time, Average Power, Total Energy
#
###################################################################################################
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import math, sys, os, re, random
from sha3 import *
from core import *
from encoding import *
# Read / Write Cycle Counts
READ_CYCLES = 2 # read data from the crypto core
WRITE_CYCLES = 2 # write data to the crypto core
# Supported Parameters
valid_n = [64, 128, 256, 512, 1024, 2048]
valid_q = [3329, 7681, 12289, 40961, 65537, 120833, 133121, 184321, 4205569, 4206593, 8058881, 8380417, 8404993]
# Power Consumption Table (Current in uA at 1.1 V and 72 MHz)
idd_dict = {
"ctrl" : 1815,
"reg_alu" : 3271,
"reg_poly" : 2795,
"sha3" : 6115,
"poly_read_write" : 6145,
"poly_init" : 6120,
"poly_bitrev" : 6212,
"poly_copy" : 6183,
"poly_eq_check" : 5523,
"poly_norm_check" : 3019,
"poly_shift" : 6201,
"poly_hash" : 7503,
"poly_sum_elems" : 3630,
"poly_max_elems" : 3184,
"poly_mult_psi" : { 3329: 7546, 7681: 7335, 12289: 8067, 40961: 9032, 65537: 7455, 120833: 8890, 133121: 8055, 184321: 8740, 4205569: 10418, 4206593: 9352, 8058881: 11726, 8380417: 8441, 8404993: 9156 },
"poly_ntt" : { 3329: 8591, 7681: 8483, 12289: 9589, 40961: 10783, 65537: 8619, 120833: 10764, 133121: 9958, 184321: 10585, 4205569: 13455, 4206593: 12657, 8058881: 14365, 8380417: 10366, 8404993: 10922 },
"poly_poly_addsub" : { 3329: 5022, 7681: 5290, 12289: 5523, 40961: 5717, 65537: 5464, 120833: 5950, 133121: 5688, 184321: 6125, 4205569: 6422, 4206593: 6498, 8058881: 6862, 8380417: 5921, 8404993: 6071 },
"poly_poly_mul" : { 3329: 7557, 7681: 7347, 12289: 8075, 40961: 9046, 65537: 7464, 120833: 8900, 133121: 8066, 184321: 8753, 4205569: 10433, 4206593: 9367, 8058881: 11734, 8380417: 8454, 8404993: 9173 },
"poly_const_addsub" : { 3329: 3558, 7681: 3581, 12289: 3640, 40961: 3640, 65537: 3630, 120833: 3630, 133121: 3611, 184321: 3644, 4205569: 3653, 4206593: 3655, 8058881: 3620, 8380417: 3611, 8404993: 3628 },
"poly_const_mul" : { 3329: 5946, 7681: 5736, 12289: 6134, 40961: 6940, 65537: 5794, 120833: 7144, 133121: 6396, 184321: 7142, 4205569: 8822, 4206593: 7756, 8058881: 9939, 8380417: 7046, 8404993: 7562 },
"poly_const_and" : 3504,
"poly_const_or" : 3552,
"poly_const_xor" : 3514,
"poly_const_shift" : 3484,
"sample_rej" : 6755,
"sample_bin" : 7545,
"sample_cdt" : 2764,
"sample_uni" : 7573,
"sample_tri_1" : 3645,
"sample_tri_2" : 3627,
"sample_tri_3" : 6791,
}
# Instruction decode and execute
def instr_exec(instr, iter_count):
global keccak_buf
global proc_regs
global poly_mem
global poly_tmp
global param_n
global param_q
global ticks
global pc
global power
instr_t = instr.replace(" ", "")
# INSTRUCTION - Parameter Configuration
matchObj = re.match(r'config\(n=(\d+),q=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
param_n = int(matchObj.group(1))
param_q = int(matchObj.group(2))
#print("config: n = %d, q = %d" % (param_n, param_q))
if param_n not in valid_n:
print("\n[Line %4d] %s\nERROR: Unsupported parameter \"n = %d\" (Valid \"n\": %s)\n" % (lines[pc], instr, param_n, valid_n))
exit()
if param_q not in valid_q:
print("\n[Line %4d] %s\nERROR: Unsupported parameter \"q = %d\" (Valid prime \"q\": %s)\n" % (lines[pc], instr, param_q, valid_q))
exit()
# Initialize polynomial memory
poly_mem = [[0 for i in range(param_n)] for j in range(int(8192/param_n))]
poly_tmp = [0 for i in range(param_n)]
#poly_mem = np.zeros((int(8192/param_n), param_n))
#poly_tmp = np.zeros((param_n))
#poly_mem = np.array(poly_mem, dtype=np.int64).tolist()
#poly_tmp = np.array(poly_mem, dtype=np.int64).tolist()
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 0
# INSTRUCTION - Register Write Operation
matchObj = re.match(r'c(\d)=(\d+)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
val = int(matchObj.group(2))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg))
exit()
if val >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %s too big for 16-bit register \"c%d\"\n" % (lines[pc], instr, val, reg))
exit()
# Update register value
proc_regs["c%s" % reg] = val
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 1
matchObj = re.match(r'c(\d)=c(\d)([\+\-])(\d+)', instr_t, re.M|re.I)
if matchObj:
reg_dst = int(matchObj.group(1))
reg_src = int(matchObj.group(2))
val = int(matchObj.group(4))
if reg_dst > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg_dst))
exit()
if reg_src > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg_src))
exit()
if reg_dst != reg_src:
print("\n[Line %4d] %s\nERROR: Must use \"c0 = c0 +/- <val>\" or \"c1 = c1 +/- <val>\"\n" % (lines[pc], instr))
exit()
if val >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c%d\"\n" % (lines[pc], instr, val, reg_dst))
exit()
# Update register value
if matchObj.group(3) == "+":
proc_regs["c%d" % reg_dst] = (proc_regs["c%d" % reg_dst] + val) % 2**16
if matchObj.group(3) == "-":
proc_regs["c%d" % reg_dst] = (proc_regs["c%d" % reg_dst] - val) % 2**16
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["reg_alu"]]*2)
return 1
matchObj = re.match(r'reg=(\d+)', instr_t, re.M|re.I)
if matchObj:
val = int(matchObj.group(1))
if val >= 2**24:
print("\n[Line %4d] %s\nERROR: Value %d too big for 24-bit register \"reg\"\n" % (lines[pc], instr, val))
exit()
# Update register value
proc_regs["reg"] = val
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 1
matchObj = re.match(r'tmp=(\d+)', instr_t, re.M|re.I)
if matchObj:
val = int(matchObj.group(1))
if val >= 2**24:
print("\n[Line %4d] %s\nERROR: Value %d too big for 24-bit register \"tmp\"\n" % (lines[pc], instr, val))
exit()
# Update register value
proc_regs["tmp"] = val
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 1
matchObj = re.match(r'reg=tmp', instr_t, re.M|re.I)
if matchObj:
# Update register value
proc_regs["reg"] = proc_regs["tmp"]
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 1
# INSTRUCTION - Register ALU Operation
matchObj = re.match(r'tmp=tmp([\+\-\*&\|\^><][><]*)reg', instr_t, re.M|re.I)
if matchObj:
op = matchObj.group(1)
#print("op: %s" % op)
if op == "+":
# Update register value
proc_regs["tmp"] = (proc_regs["tmp"] + proc_regs["reg"]) % param_q
elif op == "-":
# Update register value
proc_regs["tmp"] = (proc_regs["tmp"] - proc_regs["reg"]) % param_q
elif op == "*":
# Update register value
proc_regs["tmp"] = (proc_regs["tmp"] * proc_regs["reg"]) % param_q
elif op == "&":
# Update register value
proc_regs["tmp"] = proc_regs["tmp"] & proc_regs["reg"]
elif op == "|":
# Update register value
proc_regs["tmp"] = proc_regs["tmp"] | proc_regs["reg"]
elif op == "^":
# Update register value
proc_regs["tmp"] = proc_regs["tmp"] ^ proc_regs["reg"]
elif op == ">>":
# Update register value
if proc_regs["reg"] < 24:
proc_regs["tmp"] = (proc_regs["tmp"] >> proc_regs["reg"]) % 2**24
else:
proc_regs["tmp"] = 0
elif op == "<<":
# Update register value
if proc_regs["reg"] < 24:
proc_regs["tmp"] = (proc_regs["tmp"] << proc_regs["reg"]) % 2**24
else:
proc_regs["tmp"] = 0
else:
print("\n[Line %4d] %s\nERROR: Unsupported operation \"%s\", allowed operators are {+, -, *, &, |, ^, >>, <<}\n" % (lines[pc], instr, op))
exit()
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["reg_alu"]]*2)
return 1
# INSTRUCTION - Register Polynomial Operation
matchObj = re.match(r'reg=\(poly=(\d+)\)\[(\d+)\]', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
index = int(matchObj.group(2))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if index >= param_n:
print("\n[Line %4d] %s\nERROR: Index \"%d\" out of range, allowed indices for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, param_n))
exit()
# Read polynomial coefficient and update register value
proc_regs["reg"] = poly_mem[poly][index]
cycles = 2 + 1 + 2
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["reg_poly"]]*cycles)
return 2
matchObj = re.match(r'reg=\(poly=(\d+)\)\[c(\d)\]', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
reg = int(matchObj.group(2))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if int(matchObj.group(1)) > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg))
exit()
# Read polynomial coefficient and update register value
proc_regs["reg"] = poly_mem[poly][proc_regs["c%d" % reg] % param_n]
cycles = 2 + 1 + 2
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["reg_poly"]]*cycles)
return 2
matchObj = re.match(r'\(poly=(\d+)\)\[(\d+)\]=reg', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
index = int(matchObj.group(2))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if index >= param_n:
print("\n[Line %4d] %s\nERROR: Index \"%d\" out of range, allowed indices for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, param_n))
exit()
# Read register value and update polynomial coefficient
poly_mem[poly][index] = proc_regs["reg"]
cycles = 2 + 1 + 1
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["reg_poly"]]*cycles)
return 2
matchObj = re.match(r'\(poly=(\d+)\)\[c(\d)\]=reg', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
reg = int(matchObj.group(2))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg))
exit()
# Read register value and update polynomial coefficient
poly_mem[poly][proc_regs["c%d" % reg] % param_n] = proc_regs["reg"]
cycles = 2 + 1 + 1
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["reg_poly"]]*cycles)
return 2
# INSTRUCTION - Polynomial Absolute Maximum in range [-q/2, + q/2]
matchObj = re.match(r'reg=max\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Compute maximum of coefficients and update register value
proc_regs["reg"] = 0
for i in range(param_n):
if poly_mem[poly][i] < int(param_q/2) and poly_mem[poly][i] > proc_regs["reg"]:
proc_regs["reg"] = poly_mem[poly][i]
if poly_mem[poly][i] >= int(param_q/2) and (param_q - poly_mem[poly][i]) > proc_regs["reg"]:
proc_regs["reg"] = (param_q - poly_mem[poly][i])
cycles = 2 + 1 + 1 + param_n
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_max_elems"]]*cycles)
return 2
# INSTRUCTION - Polynomial Sum of Coefficients in range [-q/2, + q/2]
matchObj = re.match(r'reg=sum\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Compute sum of coefficients and update register value
proc_regs["reg"] = 0
for i in range(param_n):
if poly_mem[poly][i] < int(param_q/2):
proc_regs["reg"] = proc_regs["reg"] + poly_mem[poly][i]
if poly_mem[poly][i] >= int(param_q/2):
proc_regs["reg"] = proc_regs["reg"] + (poly_mem[poly][i] - param_q)
proc_regs["reg"] = abs(proc_regs["reg"])
#print("sum = %d" % proc_regs["reg"])
cycles = 2 + 1 + 1 + param_n
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_sum_elems"]]*cycles)
return 2
# INSTRUCTION - Polynomial Number Theoretic Transform
matchObj = re.match(r'transform\(mode=(DI[FT]_I{0,1}NTT),poly_dst=(\d+),poly_src=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = matchObj.group(1)
poly_dst = int(matchObj.group(2))
poly_src = int(matchObj.group(3))
if poly_dst >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_dst = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_dst, param_n, int(8192/param_n)))
exit()
if poly_src >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_src = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_src, param_n, int(8192/param_n)))
exit()
if not ((poly_src < int(4096/param_n) and poly_dst >= int(4096/param_n)) or (poly_dst < int(4096/param_n) and poly_src >= int(4096/param_n))):
print("\n[Line %4d] %s\nERROR: Polynomial pair \"poly_dst = %d, poly_src = %d\" is not allowed for n = %d, ensure \"poly_dst < %d, poly_src >= %d\" or \"poly_src < %d, poly_dst >= %d\"\n" % (lines[pc], instr, poly_dst, poly_src, param_n, int(4096/param_n), int(4096/param_n), int(4096/param_n), int(4096/param_n)))
exit()
# Compute transform and update polynomial coefficients
if mode == "DIF_NTT":
# assume standard input, bit-reversed output
cycles = dif_ntt(param_n, param_q, poly_mem[poly_src], lines[pc], instr)
poly_mem[poly_dst] = poly_mem[poly_src].copy()
poly_mem[poly_src] = [(random.getrandbits(24) % param_q) for i in range(param_n)] # Source polynomial gets clobbered
if mode == "DIT_NTT":
# assume bit-reversed input, standard output
cycles = dit_ntt(param_n, param_q, poly_mem[poly_src], lines[pc], instr)
poly_mem[poly_dst] = poly_mem[poly_src].copy()
poly_mem[poly_src] = [(random.getrandbits(24) % param_q) for i in range(param_n)] # Source polynomial gets clobbered
if mode == "DIF_INTT":
# assume standard input, bit-reversed output
cycles = dif_intt(param_n, param_q, poly_mem[poly_src], lines[pc], instr)
poly_mem[poly_dst] = poly_mem[poly_src].copy()
poly_mem[poly_src] = [(random.getrandbits(24) % param_q) for i in range(param_n)] # Source polynomial gets clobbered
if mode == "DIT_INTT":
# assume bit-reversed input, standard output
cycles = dit_intt(param_n, param_q, poly_mem[poly_src], lines[pc], instr)
poly_mem[poly_dst] = poly_mem[poly_src].copy()
poly_mem[poly_src] = [(random.getrandbits(24) % param_q) for i in range(param_n)] # Source polynomial gets clobbered
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_ntt"][param_q]]*cycles)
# Need to copy polynomial when n is an even power of 2
if int(math.log(param_n,2)) % 2 == 0:
cycles = 2 + 1 + 1 + int(param_n/4)
ticks = ticks + cycles
power = power + ([idd_dict["poly_copy"]]*cycles)
return 3
# INSTRUCTION - Pre- and Post- Processing for Negative-Wrapped Convolution
matchObj = re.match(r'mult_psi\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Pre-process polynomial coefficients
cycles = mult_psi(param_n, param_q, poly_mem[poly], lines[pc], instr)
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_mult_psi"][param_q]]*cycles)
return 3
matchObj = re.match(r'mult_psi_inv\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Pre-process polynomial coefficients
cycles = mult_psi_inv(param_n, param_q, poly_mem[poly], lines[pc], instr)
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_mult_psi"][param_q]]*cycles)
return 3
# PSEUDO-INSTRUCTION - Rejection Sampling
matchObj = re.match(r'rej_sample\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
poly = int(matchObj.group(5))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
cycles = 2 + 2
# Sample polynomial coefficients
cycles = cycles + rejection_sample(param_n, param_q, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_rej"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Binomial Sampling
matchObj = re.match(r'bin_sample\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),k=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_k = int(matchObj.group(5))
poly = int(matchObj.group(6))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_k < 1 or param_k > 32:
print("\n[Line %4d] %s\nERROR: Value of \"k\" must be in the range 1 to 32\n" % (lines[pc], instr))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
cycles = 2 + 2
# Sample polynomial coefficients
cycles = cycles + binomial_sample(param_n, param_q, param_k, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_bin"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Cumulative Distribution Table Sampling
matchObj = re.match(r'cdt_sample\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),r=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_r = int(matchObj.group(5))
poly = int(matchObj.group(6))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_r < 1 or param_r > 32:
print("\n[Line %4d] %s\nERROR: Value of \"r\" must be in the range 1 to 32\n" % (lines[pc], instr))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if "--cdt" not in sys.argv:
print("\n[Line %4d] %s\nERROR: CDT not provided, please provide a valid CDT file to use CDT-based sampling\n" % (lines[pc], instr))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
cycles = 2 + 2
# Sample polynomial coefficients
cycles = cycles + cdt_sample(param_n, param_q, param_r, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), cdt_mem, poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_cdt"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Uniform Sampling
matchObj = re.match(r'uni_sample\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),eta=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_eta = int(matchObj.group(5))
poly = int(matchObj.group(6))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_eta >= param_q:
print("\n[Line %4d] %s\nERROR: Value of \"eta\" too large, must be less than %d\n" % (lines[pc], instr, param_q))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
proc_regs["reg"] = param_eta
cycles = 2 + 2 + 2
# Sample polynomial coefficients
cycles = cycles + uniform_sample(param_n, param_q, param_eta, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_uni"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Trinary Sampling #1
matchObj = re.match(r'tri_sample_1\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),m=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_m = int(matchObj.group(5))
poly = int(matchObj.group(6))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_m >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
cycles = 2 + 2
# Sample polynomial coefficients
cycles = cycles + trinary_sample_1(param_n, param_q, param_m, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_1"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Trinary Sampling #2
matchObj = re.match(r'tri_sample_2\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),m0=(\d+),m1=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_m0 = int(matchObj.group(5))
param_m1 = int(matchObj.group(6))
poly = int(matchObj.group(7))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_m0 >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m0\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if param_m1 >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m1\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if (param_m0 + param_m1) >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m0 + m1\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
proc_regs["reg"] = param_m0 + (param_m1 * 2**12)
cycles = 2 + 2 + 2
# Sample polynomial coefficients
cycles = cycles + trinary_sample_2(param_n, param_q, param_m0, param_m1, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_2"]]*cycles)
return 4
# PSEUDO-INSTRUCTION - Trinary Sampling #3
matchObj = re.match(r'tri_sample_3\(prng=SHAKE-(\d+),seed=r(\d),c0=(\d+),c1=(\d+),rho=1/(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
val_c0 = int(matchObj.group(3))
val_c1 = int(matchObj.group(4))
param_rho = int(matchObj.group(5))
poly = int(matchObj.group(6))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if val_c0 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c0\"\n" % (lines[pc], instr, val_c0))
exit()
if val_c1 >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %d too big for 16-bit register \"c1\"\n" % (lines[pc], instr, val_c1))
exit()
if param_rho != 2 and param_rho != 4 and param_rho != 8 and param_rho != 16 and param_rho != 32 and param_rho != 64 and param_rho != 128:
print("\n[Line %4d] %s\nERROR: Unsupported parameter \"rho = 1/%d\" (Valid \"rho\": [1/2, 1/4, 1/8, 1/16, 1/32, 1/64, 1/128])\n" % (lines[pc], instr, param_rho))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["c0"] = val_c0
proc_regs["c1"] = val_c1
cycles = 2 + 2
# Sample polynomial coefficients
cycles = cycles + trinary_sample_3(param_n, param_q, param_rho, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_3"]]*cycles)
return 4
# INSTRUCTION - Rejection Sampling
matchObj = re.match(r'rej_sample\(prng=SHAKE-(\d+),seed=r(\d),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
poly = int(matchObj.group(3))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Sample polynomial coefficients
cycles = rejection_sample(param_n, param_q, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_rej"]]*cycles)
return 4
# INSTRUCTION - Binomial Sampling
matchObj = re.match(r'bin_sample\(prng=SHAKE-(\d+),seed=r(\d),k=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_k = int(matchObj.group(3))
poly = int(matchObj.group(4))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_k < 1 or param_k > 32:
print("\n[Line %4d] %s\nERROR: Value of \"k\" must be in the range 1 to 32\n" % (lines[pc], instr))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Sample polynomial coefficients
cycles = binomial_sample(param_n, param_q, param_k, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_bin"]]*cycles)
return 4
# INSTRUCTION - Cumulative Distribution Table Sampling
matchObj = re.match(r'cdt_sample\(prng=SHAKE-(\d+),seed=r(\d),r=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_r = int(matchObj.group(3))
poly = int(matchObj.group(4))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_r < 1 or param_r > 32:
print("\n[Line %4d] %s\nERROR: Value of \"r\" must be in the range 1 to 32\n" % (lines[pc], instr))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if "--cdt" not in sys.argv:
print("\n[Line %4d] %s\nERROR: CDT not provided, please provide a valid CDT file to use CDT-based sampling\n" % (lines[pc], instr))
exit()
# Sample polynomial coefficients
cycles = cdt_sample(param_n, param_q, param_r, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), cdt_mem, poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_cdt"]]*cycles)
return 4
# INSTRUCTION - Uniform Sampling
matchObj = re.match(r'uni_sample\(prng=SHAKE-(\d+),seed=r(\d),eta=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_eta = int(matchObj.group(3))
poly = int(matchObj.group(4))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_eta >= param_q:
print("\n[Line %4d] %s\nERROR: Value of \"eta\" too large, must be less than %d\n" % (lines[pc], instr, param_q))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["reg"] = param_eta
cycles = 2
# Sample polynomial coefficients
cycles = cycles + uniform_sample(param_n, param_q, param_eta, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_uni"]]*cycles)
return 4
# INSTRUCTION - Trinary Sampling #1
matchObj = re.match(r'tri_sample_1\(prng=SHAKE-(\d+),seed=r(\d),m=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_m = int(matchObj.group(3))
poly = int(matchObj.group(4))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_m >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Sample polynomial coefficients
cycles = trinary_sample_1(param_n, param_q, param_m, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_1"]]*cycles)
return 4
# INSTRUCTION - Trinary Sampling #2
matchObj = re.match(r'tri_sample_2\(prng=SHAKE-(\d+),seed=r(\d),m0=(\d+),m1=(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_m0 = int(matchObj.group(3))
param_m1 = int(matchObj.group(4))
poly = int(matchObj.group(5))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_m0 >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m0\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if param_m1 >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m1\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if (param_m0 + param_m1) >= param_n:
print("\n[Line %4d] %s\nERROR: Value of \"m0 + m1\" too large, must be less than %d\n" % (lines[pc], instr, param_n))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Update register values
proc_regs["reg"] = param_m0 + (param_m1 * 2**12)
cycles = 2
# Sample polynomial coefficients
cycles = cycles + trinary_sample_2(param_n, param_q, param_m0, param_m1, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_2"]]*cycles)
return 4
# INSTRUCTION - Trinary Sampling #3
matchObj = re.match(r'tri_sample_3\(prng=SHAKE-(\d+),seed=r(\d),rho=1/(\d+),poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
param_rho = int(matchObj.group(3))
poly = int(matchObj.group(4))
if mode != 128 and mode != 256:
print("\n[Line %4d] %s\nERROR: Only SHAKE-128 and SHAKE-256 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
if param_rho != 2 and param_rho != 4 and param_rho != 8 and param_rho != 16 and param_rho != 32 and param_rho != 64 and param_rho != 128:
print("\n[Line %4d] %s\nERROR: Unsupported parameter \"rho = 1/%d\" (Valid \"rho\": [1/2, 1/4, 1/8, 1/16, 1/32, 1/64, 1/128])\n" % (lines[pc], instr, param_rho))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Sample polynomial coefficients
cycles = trinary_sample_3(param_n, param_q, param_rho, mode, hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0') + hex(proc_regs["c0"])[2:].rstrip("L").rjust(4,'0') + hex(proc_regs["c1"])[2:].rstrip("L").rjust(4,'0'), poly_mem[poly])
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sample_tri_3"]]*cycles)
return 4
# INSTRUCTION - Polynomial Initialization
matchObj = re.match(r'init\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Set all polynomial coefficients to zero
poly_mem[poly] = [0 for i in range(param_n)]
cycles = 2 + 1 + 1 + int(param_n/4)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_init"]]*cycles)
return 5
# INSTRUCTION - Polynomial Copy
matchObj = re.match(r'poly_copy\(poly_dst=(\d+),poly_src=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly_dst = int(matchObj.group(1))
poly_src = int(matchObj.group(2))
if poly_dst >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_dst = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_dst, param_n, int(8192/param_n)))
exit()
if poly_src >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_src = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_src, param_n, int(8192/param_n)))
exit()
# Copy polynomial coefficients (handle both fast and slow cases in cycle count)
poly_mem[poly_dst] = poly_mem[poly_src].copy()
if ((poly_src < int(4096/param_n) and poly_dst >= int(4096/param_n)) or (poly_dst < int(4096/param_n) and poly_src >= int(4096/param_n))):
cycles = 2 + 1 + 1 + int(param_n/4)
else:
cycles = 2 + 1 + 1 + (3*param_n)
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_copy"]]*cycles)
return 5
supported_poly_ops = ["ADD", "SUB", "MUL", "BITREV", "CONST_ADD", "CONST_SUB", "CONST_MUL", "CONST_AND", "CONST_OR", "CONST_XOR", "CONST_RSHIFT", "CONST_LSHIFT"]
# INSTRUCTION - Polynomial ALU Operations
matchObj = re.match(r'poly_op\(op=([\w_]+),poly_dst=(\d+),poly_src=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
op = matchObj.group(1)
poly_dst = int(matchObj.group(2))
poly_src = int(matchObj.group(3))
if poly_dst >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_dst = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_dst, param_n, int(8192/param_n)))
exit()
if poly_src >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_src = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_src, param_n, int(8192/param_n)))
exit()
if not ((poly_src < int(4096/param_n) and poly_dst >= int(4096/param_n)) or (poly_dst < int(4096/param_n) and poly_src >= int(4096/param_n))):
print("\n[Line %4d] %s\nERROR: Polynomial pair \"poly_dst = %d, poly_src = %d\" is not allowed for n = %d, ensure \"poly_dst < %d, poly_src >= %d\" or \"poly_src < %d, poly_dst >= %d\"\n" % (lines[pc], instr, poly_dst, poly_src, param_n, int(4096/param_n), int(4096/param_n), int(4096/param_n), int(4096/param_n)))
exit()
#print("op: %s" % op)
if op == "ADD":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) + int(poly_mem[poly_dst][i])) % param_q
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_poly_addsub"][param_q]]*cycles)
elif op == "SUB":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) - int(poly_mem[poly_dst][i]) + param_q) % param_q
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_poly_addsub"][param_q]]*cycles)
elif op == "MUL":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) * int(poly_mem[poly_dst][i])) % param_q
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_poly_mul"][param_q]]*cycles)
elif op == "BITREV":
# Update polynomial coefficients
for i in range(param_n):
i_rev = int(('{:0{w}b}'.format(i, w=int(math.log(param_n,2))))[::-1], 2)
poly_mem[poly_dst][i_rev] = poly_mem[poly_src][i]
cycles = 2 + 1 + (1+int(param_n/4))
power = power + ([idd_dict["poly_bitrev"]]*cycles)
elif op == "CONST_ADD":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) + proc_regs["reg"]) % param_q
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_addsub"][param_q]]*cycles)
elif op == "CONST_SUB":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) - proc_regs["reg"] + param_q) % param_q
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_addsub"][param_q]]*cycles)
elif op == "CONST_MUL":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (int(poly_mem[poly_src][i]) * proc_regs["reg"]) % param_q
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_mul"][param_q]]*cycles)
elif op == "CONST_AND":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (poly_mem[poly_src][i] & proc_regs["reg"])
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_and"]]*cycles)
elif op == "CONST_OR":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (poly_mem[poly_src][i] | proc_regs["reg"])
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_or"]]*cycles)
elif op == "CONST_XOR":
# Update polynomial coefficients
for i in range(param_n):
poly_mem[poly_dst][i] = (poly_mem[poly_src][i] ^ proc_regs["reg"])
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_xor"]]*cycles)
elif op == "CONST_RSHIFT":
# Update polynomial coefficients
for i in range(param_n):
if proc_regs["reg"] < 24:
poly_mem[poly_dst][i] = (poly_mem[poly_src][i] >> proc_regs["reg"]) % 2**24
else:
poly_mem[poly_dst][i] = 0
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_shift"]]*cycles)
elif op == "CONST_LSHIFT":
# Update polynomial coefficients
for i in range(param_n):
if proc_regs["reg"] < 24:
poly_mem[poly_dst][i] = (poly_mem[poly_src][i] << proc_regs["reg"]) % 2**24
else:
poly_mem[poly_dst][i] = 0
cycles = 2 + 1 + 1 + param_n
power = power + ([idd_dict["poly_const_shift"]]*cycles)
else:
print("\n[Line %4d] %s\nERROR: Unsupported operation \"%s\", allowed operations are %s\n" % (lines[pc], instr, op, supported_poly_ops))
exit()
pc = pc + 1
ticks = ticks + cycles
return 5
# INSTRUCTION - Polynomial Circular Left Shift (Multiplication by x modulo x^N+1 and x^N-1)
matchObj = re.match(r'shift_poly\(ring=x\^N([\+\-])1,poly_dst=(\d+),poly_src=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
ring = matchObj.group(1)
poly_dst = int(matchObj.group(2))
poly_src = int(matchObj.group(3))
if poly_dst >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_dst = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_dst, param_n, int(8192/param_n)))
exit()
if poly_src >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly_src = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly_src, param_n, int(8192/param_n)))
exit()
if not ((poly_src < int(4096/param_n) and poly_dst >= int(4096/param_n)) or (poly_dst < int(4096/param_n) and poly_src >= int(4096/param_n))):
print("\n[Line %4d] %s\nERROR: Polynomial pair \"poly_dst = %d, poly_src = %d\" is not allowed for n = %d, ensure \"poly_dst < %d, poly_src >= %d\" or \"poly_src < %d, poly_dst >= %d\"\n" % (lines[pc], instr, poly_dst, poly_src, param_n, int(4096/param_n), int(4096/param_n), int(4096/param_n), int(4096/param_n)))
exit()
# Update polynomial coefficients
for i in range(1, param_n):
poly_mem[poly_dst][i] = poly_mem[poly_src][i-1]
if ring == "+":
poly_mem[poly_dst][0] = param_q - poly_mem[poly_scr][param_n-1]
if ring == "-":
poly_mem[poly_dst][0] = poly_mem[poly_scr][param_n-1]
cycles = 2 + 1 + 1 + int(param_n/4)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_shift"]]*cycles)
return 5
# INSTRUCTION - Polynomial Equality Check
matchObj = re.match(r'flag=eq_check\(poly0=(\d+),poly1=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly0 = int(matchObj.group(1))
poly1 = int(matchObj.group(2))
if poly0 >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly0 = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly0, param_n, int(8192/param_n)))
exit()
if poly1 >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly1 = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly1, param_n, int(8192/param_n)))
exit()
if not ((poly1 < int(4096/param_n) and poly0 >= int(4096/param_n)) or (poly0 < int(4096/param_n) and poly1 >= int(4096/param_n))):
print("\n[Line %4d] %s\nERROR: Polynomial pair \"poly0 = %d, poly1 = %d\" is not allowed for n = %d, ensure \"poly0 < %d, poly1 >= %d\" or \"poly1 < %d, poly0 >= %d\"\n" % (lines[pc], instr, poly0, poly1, param_n, int(4096/param_n), int(4096/param_n), int(4096/param_n), int(4096/param_n)))
exit()
# Compare polynomial coefficients and update flag
if poly_mem[poly0] == poly_mem[poly1]:
proc_regs["flag"] = 1
else:
proc_regs["flag"] = 0
proc_regs["tmp"] = random.getrandbits(24) # "tmp" register gets clobbered
cycles = 2 + 1 + 2 + param_n
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_eq_check"]]*cycles)
return 6
# INSTRUCTION - Polynomial Infinity Norm Check
matchObj = re.match(r'flag=inf_norm_check\(poly=(\d+),bound=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
bound = int(matchObj.group(2))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if bound >= 2**24:
print("\n[Line %4d] %s\nERROR: Parameter \"bound = %d\" too large, must be less than 2**24\n" % (lines[pc], instr, bound))
exit()
# Update register value
proc_regs["reg"] = bound
cycles = 2
# Compare infinity norm of polynomial with specified bound and update flag
count = 0
for i in range(param_n):
if poly_mem[poly][i] > bound and poly_mem[poly][i] < (param_q - bound):
count = count + 1
if count == 0:
proc_regs["flag"] = 1
else:
proc_regs["flag"] = 0
cycles = cycles + 2 + 1 + 1 + param_n
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_inf_norm_check"]]*cycles)
return 6
# INSTRUCTION - Register Comparison
matchObj = re.match(r'flag=compare\(c(\d),(\d+)\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
val = int(matchObj.group(2))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg))
exit()
if val >= 2**16:
print("\n[Line %4d] %s\nERROR: Value %s too big for 16-bit register \"c%d\"\n" % (lines[pc], instr, val, reg))
exit()
# Compare register value and update flag
if proc_regs["c%s" % reg] < val:
proc_regs["flag"] = -1
elif proc_regs["c%s" % reg] > val:
proc_regs["flag"] = 1
else:
proc_regs["flag"] = 0
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 6
matchObj = re.match(r'flag=compare\(reg,(\d+)\)', instr_t, re.M|re.I)
if matchObj:
val = int(matchObj.group(1))
if val >= 2**24:
print("\n[Line %4d] %s\nERROR: Value %s too big for 24-bit register \"reg\"\n" % (lines[pc], instr, val))
exit()
# Compare register value and update flag
if proc_regs["reg"] < val:
proc_regs["flag"] == -1
elif proc_regs["reg"] > val:
proc_regs["flag"] == 1
else:
proc_regs["flag"] = 0
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 6
matchObj = re.match(r'flag=compare\(tmp,(\d+)\)', instr_t, re.M|re.I)
if matchObj:
val = int(matchObj.group(1))
if val >= 2**24:
print("\n[Line %4d] %s\nERROR: Value %s too big for 24-bit register \"tmp\"\n" % (lines[pc], instr, val))
exit()
# Compare register value and update flag
if proc_regs["tmp"] < val:
proc_regs["flag"] == -1
elif proc_regs["tmp"] > val:
proc_regs["flag"] == 1
else:
proc_regs["flag"] = 0
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 6
# INSTRUCTION - Check Flag and Jump
matchObj = re.match(r'if\(flag([!=]=)([\-\+]{0,1})([01])\)goto([\w\d_]+)', instr_t, re.M|re.I)
if matchObj:
op = matchObj.group(1)
sign = matchObj.group(2)
val = int(matchObj.group(3))
label = matchObj.group(4)
if label not in labels:
print("\n[Line %4d] %s\nERROR: Label \"%s\" not found\n" % (lines[pc], instr, label))
exit()
# Check flag value and jump
if op == "==":
if val == 0:
if proc_regs["flag"] == 0:
pc = labels[label]
else:
pc = pc + 1
if val == 1:
if sign == "+" or sign == "":
if proc_regs["flag"] == 1:
pc = labels[label]
else:
pc = pc + 1
if sign == "-":
if proc_regs["flag"] == -1:
pc = labels[label]
else:
pc = pc + 1
if op == "!=":
if val == 0:
if proc_regs["flag"] != 0:
pc = labels[label]
else:
pc = pc + 1
if val == 1:
if sign == "+" or sign == "":
if proc_regs["flag"] != 1:
pc = labels[label]
else:
pc = pc + 1
if sign == "-":
if proc_regs["flag"] != -1:
pc = labels[label]
else:
pc = pc + 1
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 6
# INSTRUCTION - SHA3 Operations
matchObj = re.match(r'sha3_init', instr_t, re.M|re.I)
if matchObj:
keccak_buf = ""
cycles = 2 + 1 + 25
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sha3"]]*cycles)
return 7
matchObj = re.match(r'sha3_(\d+)_absorb\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
poly = int(matchObj.group(2))
if mode != 256 and mode != 512:
print("\n[Line %4d] %s\nERROR: Only SHA3-256 and SHA3-512 are supported\n" % (lines[pc], instr))
exit()
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
# Push zero-padded polynomial coefficients into Keccak buffer
for i in range(param_n):
keccak_buf = keccak_buf + hex(poly_mem[poly][i])[2:].rstrip("L").rjust(8,'0')
if mode == 256:
cycles = 2 + 1 + 1 + param_n + math.ceil(param_n/34)*(17+25)
if mode == 512:
cycles = 2 + 1 + 1 + param_n + math.ceil(param_n/18)*(9+25)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["poly_hash"]]*cycles)
return 7
matchObj = re.match(r'sha3_(\d+)_absorb\(r(\d)\)', instr_t, re.M|re.I)
if matchObj:
mode = int(matchObj.group(1))
reg = int(matchObj.group(2))
if mode != 256 and mode != 512:
print("\n[Line %4d] %s\nERROR: Only SHA3-256 and SHA3-512 are supported\n" % (lines[pc], instr))
exit()
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
# Push seed register contents into Keccak buffer
keccak_buf = keccak_buf + hex(proc_regs["r%d" % reg])[2:].rstrip("L").rjust(64,'0')
if mode == 256:
cycles = 2 + 1 + (17+25)
if mode == 512:
cycles = 2 + 1 + (9+25)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sha3"]]*cycles)
return 7
matchObj = re.match(r'r(\d)=sha3_256_digest', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
if reg != 0 and reg != 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", allowed registers are r0 and r1\n" % (lines[pc], instr, reg))
exit()
# Generate SHA3-256 digest
digest = sha3_256(keccak_buf)
proc_regs["r%d" % reg] = int(digest, 16)
keccak_buf = ""
cycles = 2 + 1 + (25+25+2)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sha3"]]*cycles)
return 7
matchObj = re.match(r'r0\|\|r1=sha3_512_digest', instr_t, re.M|re.I)
if matchObj:
# Generate SHA3-512 digest
digest = sha3_512(keccak_buf)
proc_regs["r0"] = int(digest, 16) >> 256
proc_regs["r1"] = int(digest, 16) % 2**256
keccak_buf = ""
cycles = 2 + 1 + (25+25+3)
pc = pc + 1
ticks = ticks + cycles
power = power + ([idd_dict["sha3"]]*cycles)
return 7
# INSTRUCTION - End of Program
matchObj = re.match(r'end', instr_t, re.M|re.I)
if matchObj:
#print("end-of-program")
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return 99
# INSTRUCTION - NOP
matchObj = re.match(r'nop', instr_t, re.M|re.I)
if matchObj:
#print("no-operation")
ticks = ticks + 2
power = power + ([idd_dict["ctrl"]]*2)
return -98
# DEBUG-INSTRUCTION - Compare Encoded Polynomials (Debug Only)
# Append "iter_<iter_count>_" to all filenames in case of multiple iterations
if num_iters > 1:
f_prefix = "iter_%d_" % iter_count
else:
f_prefix = ""
matchObj = re.match(r'encode_compare\("(.*)","(.*)",encoding=([\w_]+)\)', instr_t, re.M|re.I)
if matchObj:
f1 = matchObj.group(1)
f2 = matchObj.group(2)
if not f1.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f1))
f1 = f1 + ".npy"
if not f2.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f2))
f2 = f2 + ".npy"
f1 = f1.replace(os.path.basename(f1), f_prefix + os.path.basename(f1))
f2 = f2.replace(os.path.basename(f2), f_prefix + os.path.basename(f2))
encoding = matchObj.group(3)
if not os.path.exists(f1):
print("\n[Line %4d] %s\nERROR: Input file %s for \"encode_compare\" does not exist" % (lines[pc], instr, f1))
exit()
if not os.path.exists(f2):
print("\n[Line %4d] %s\nERROR: Input file %s for \"encode_compare\" does not exist" % (lines[pc], instr, f2))
exit()
b1 = encode_to_bytearray(param_n, param_q, list(np.load(f1, allow_pickle = True)), encoding, lines[pc], instr)
b2 = encode_to_bytearray(param_n, param_q, list(np.load(f2, allow_pickle = True)), encoding, lines[pc], instr)
print("poly_1 = %s" % list(np.load(f1, allow_pickle = True)))
print("poly_2 = %s" % list(np.load(f2, allow_pickle = True)))
print("byte_array_1 = %s" % b1)
print("byte_array_2 = %s" % b2)
if b1 == b2:
print("\n--- MATCH ---\n")
else:
print("\n--- NO MATCH ---\n")
pc = pc + 1
return -98
# DEBUG-INSTRUCTION - Print Encoded Polynomial (Debug Only)
matchObj = re.match(r'encode_print\(poly=(\d+),encoding=([\w_]+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
encoding = matchObj.group(2)
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if "--verbose" in sys.argv:
b = encode_to_bytearray(param_n, param_q, poly_mem[poly], encoding, lines[pc], instr)
print("byte_array = %s" % b)
pc = pc + 1
return -98
# DEBUG-INSTRUCTION - Register / Polynomial Random-Init / Load / Store
# These instructions are not really available in the crypto core, but act as
# substitutes (in the simulator) for the actual 32-bit load / store interface
# Append "iter_<iter_count>_" to all filenames in case of multiple iterations
if num_iters > 1:
f_prefix = "iter_%d_" % iter_count
else:
f_prefix = ""
matchObj = re.match(r'random\(r(\d)\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", please use \"r0\" or \"r1\"\n" % (lines[pc], instr, reg))
exit()
proc_regs["r%d" % reg] = random.getrandbits(256)
cycles = WRITE_CYCLES*8
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["ctrl"]]*cycles)
return -98
matchObj = re.match(r'random\(poly=(\d+),encoding=([\w\d_]+),"(.*)"\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
encoding = matchObj.group(2)
f = matchObj.group(3)
if not f.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f))
f = f + ".npy"
f = f.replace(os.path.basename(f), f_prefix + os.path.basename(f))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if os.path.exists(f):
print("\n[Line %4d] %s\nWARNING: Output file %s for \"random\" already exists" % (lines[pc], instr, f))
random_poly_encode(param_n, param_q, poly_mem[poly], encoding, lines[pc], instr)
np.save(f, np.asarray(poly_mem[poly]))
cycles = WRITE_CYCLES*param_n
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["poly_read_write"]]*cycles)
return -98
matchObj = re.match(r'load\(r(\d),"(.*)"\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
f = matchObj.group(2)
if not f.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f))
f = f + ".npy"
f = f.replace(os.path.basename(f), f_prefix + os.path.basename(f))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", please use \"r0\" or \"r1\"\n" % (lines[pc], instr, reg))
exit()
if not os.path.exists(f):
print("\n[Line %4d] %s\nERROR: Input file %s for \"load\" does not exist" % (lines[pc], instr, f))
exit()
proc_regs["r%d" % reg] = list(np.load(f, allow_pickle = True))[0]
cycles = WRITE_CYCLES*8
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["ctrl"]]*cycles)
return -98
matchObj = re.match(r'save\(r(\d),"(.*)"\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
f = matchObj.group(2)
if not f.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f))
f = f + ".npy"
f = f.replace(os.path.basename(f), f_prefix + os.path.basename(f))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", please use \"r0\" or \"r1\"\n" % (lines[pc], instr, reg))
exit()
if os.path.exists(f):
print("\n[Line %4d] %s\nWARNING: Output file %s for \"save\" already exists" % (lines[pc], instr, f))
np.save(f, np.asarray([proc_regs["r%d" % reg]]))
cycles = READ_CYCLES*8
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["ctrl"]]*cycles)
return -98
matchObj = re.match(r'load\(poly=(\d+),"(.*)"\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
f = matchObj.group(2)
if not f.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f))
f = f + ".npy"
f = f.replace(os.path.basename(f), f_prefix + os.path.basename(f))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if not os.path.exists(f):
print("\n[Line %4d] %s\nERROR: Input file %s for \"load\" does not exist" % (lines[pc], instr, f))
exit()
poly_mem[poly] = list(np.load(f, allow_pickle = True)).copy()
cycles = WRITE_CYCLES*param_n
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["poly_read_write"]]*cycles)
return -98
matchObj = re.match(r'save\(poly=(\d+),"(.*)"\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
f = matchObj.group(2)
if not f.endswith(".npy"):
print("\n[Line %4d] %s\nWARNING: Adding .npy extension to filename \"%s\"\n" % (lines[pc], instr, f))
f = f + ".npy"
f = f.replace(os.path.basename(f), f_prefix + os.path.basename(f))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if os.path.exists(f):
print("\n[Line %4d] %s\nWARNING: Output file %s for \"save\" already exists" % (lines[pc], instr, f))
np.save(f, np.asarray(poly_mem[poly]))
cycles = READ_CYCLES*param_n
pc = pc + 1
if "--free_rw" not in sys.argv:
ticks = ticks + cycles
power = power + ([idd_dict["poly_read_write"]]*cycles)
return -98
# DEBUG-INSTRUCTION - Print (Debug Only)
matchObj = re.match(r'print\(r(\d)\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"r%d\", please use \"r0\" or \"r1\"\n" % (lines[pc], instr, reg))
exit()
if "--verbose" in sys.argv:
print("\nr%d = 0x%s\n" % (reg, hex(proc_regs["r%d" % reg])[2:].upper().rstrip("L").rjust(64,'0')))
pc = pc + 1
return -99
matchObj = re.match(r'print\(reg\)', instr_t, re.M|re.I)
if matchObj:
if "--verbose" in sys.argv:
print("\nreg = %d\n" % proc_regs["reg"])
pc = pc + 1
return -99
matchObj = re.match(r'print\(tmp\)', instr_t, re.M|re.I)
if matchObj:
if "--verbose" in sys.argv:
print("\ntmp = %d\n" % proc_regs["tmp"])
pc = pc + 1
return -99
matchObj = re.match(r'print\(flag\)', instr_t, re.M|re.I)
if matchObj:
if "--verbose" in sys.argv:
print("\nflag = %d\n" % proc_regs["flag"])
pc = pc + 1
return -99
matchObj = re.match(r'print\(c(\d)\)', instr_t, re.M|re.I)
if matchObj:
reg = int(matchObj.group(1))
if reg > 1:
print("\n[Line %4d] %s\nERROR: No such register \"c%d\", please use \"c0\" or \"c1\"\n" % (lines[pc], instr, reg))
exit()
if "--verbose" in sys.argv:
print("\nc%d = %d\n" % (reg, proc_regs["c%d" % reg]))
pc = pc + 1
return -99
matchObj = re.match(r'print\(poly=(\d+)\)', instr_t, re.M|re.I)
if matchObj:
poly = int(matchObj.group(1))
if poly >= int(8192/param_n):
print("\n[Line %4d] %s\nERROR: No such polynomial \"poly = %d\", allowed polynomials for n = %d are 0 to %d\n" % (lines[pc], instr, poly, param_n, int(8192/param_n)))
exit()
if "--verbose" in sys.argv:
print("\npoly[%d] = %s\n" % (poly, poly_mem[poly]))
pc = pc + 1
return -99
# INVALID INSTRUCTION
return -1
#====================================
# SAPPHIRE-SIM
#====================================
# Check arguments
if len(sys.argv) < 7 or ("--prog" not in sys.argv) or ("--vdd" not in sys.argv) or ("--fmhz" not in sys.argv):
print("\nERROR: Incorrect arguments provided for simulator script")
print("Usage: python sim.py --prog <program_file_path>")
print(" --vdd <voltage>")
print(" --fmhz <frequency_mhz>")
print(" [ --verbose ]")
print(" [ --free_rw ]")
print(" [ --plot_power ]")
print(" [ --cdt <cdt_file_path> ]")
print(" [ --iter <num_iterations> ]")
exit()
# Check that program file exists
if not os.path.exists(sys.argv[sys.argv.index("--prog") + 1]):
print("\nERROR: Program file %s does not exist" % sys.argv[2])
exit()
# Check supply voltage
vdd = float(sys.argv[sys.argv.index("--vdd") + 1])
if vdd < 0.68 or vdd > 1.21:
print("\nERROR: Supply voltage outside acceptable range of 0.68-1.21 V\n")
exit()
# Check operating frequency
# fmax = 12 MHz at 0.68 V and 72 MHz at 1.1 V
# Model fmax as a linear function of vdd (not exactly accurate but good enough for our simulator)
fmhz = int(sys.argv[sys.argv.index("--fmhz") + 1])
fmax = int(12 + (72-12)*(vdd - 0.68)/(1.1-0.68))
if fmhz > fmax:
print("\nERROR: Operating frequency above maximum %d MHz at %0.2f V\n" % (fmax, vdd))
exit()
defines = ["main"]
ifdefs = []
active_ifdef = "main"
labels = {}
# Read program file
imem_f = open(sys.argv[sys.argv.index("--prog") + 1])
imem = []
# Process ifdefs
for (i, instr) in enumerate(imem_f):
# Identify `define flags
matchObj = re.match(r'`define\s*(.+)', instr.strip(), re.M|re.I)
if matchObj:
defines.append(matchObj.group(1))
imem.append("")
continue
# Identify `ifdef flags
matchObj = re.match(r'`ifdef\s*(.+)', instr.strip(), re.M|re.I)
if matchObj:
ifdefs.append(active_ifdef)
active_ifdef = matchObj.group(1)
imem.append("")
continue
# Identify `endif flags
matchObj = re.match(r'`endif', instr.strip(), re.M|re.I)
if matchObj:
active_ifdef = ifdefs[-1]
ifdefs = ifdefs[:-1]
imem.append("")
continue
# Ignore instructions inside undeclared `ifdef blocks
if active_ifdef not in defines:
imem.append("")
continue
imem.append(instr)
imem_f.close()
# Remove comments
imem = [re.sub(r'#.*$', "", instr) for instr in imem]
# Remove empty lines and leading / trailing spaces
lines = [i+1 for i in range(len(imem)) if imem[i].strip()]
imem = [instr.strip() for instr in imem if instr.strip()]
# Parse labels (labels must be followed by an instruction in the same line)
for (i, instr) in enumerate(imem):
matchObj = re.match(r'([\w\d_]+)\s*:\s*(.+)', instr.strip(), re.M|re.I)
if matchObj:
label = matchObj.group(1)
labels[label] = i
imem[i] = matchObj.group(2)
# Check if first instruction is "config"
if not re.match(r'config.*', imem[0], re.M|re.I):
print("\nERROR: First instruction of program must be \"config\"\n")
exit()
# Check if last instruction is "end"
if not re.match(r'end', imem[len(imem)-1], re.M|re.I):
print("\nWARNING: Last instruction of program must be \"end\", appending \"end\" at the end of program\n")
imem.append("end")
keccak_buf = ""
proc_regs = {
"r0" : 0,
"r1" : 0,
"reg" : 0,
"tmp" : 0,
"c0" : 0,
"c1" : 0,
"flag" : 0,
}
poly_mem = []
poly_tmp = []
param_n = 0
param_q = 0
ticks = 0
pc = 0
power = []
# Read CDT file, if provided
if "--cdt" in sys.argv:
if not os.path.exists(sys.argv[sys.argv.index("--cdt") + 1]):
print("\nERROR: CDT file %s does not exist" % sys.argv[sys.argv.index("--cdt") + 1])
exit()
cdt_mem = open(sys.argv[sys.argv.index("--cdt") + 1])
cdt_mem = [cdval.strip() for cdval in cdt_mem if cdval.strip()]
cdt_mem = [int(cdval) for cdval in cdt_mem]
if len(cdt_mem) > 64:
print("\nERROR: CDT is longer than 64 entries")
exit()
num_iters = 1
# Read number of iterations, if provided
if "--iter" in sys.argv:
num_iters = int(sys.argv[sys.argv.index("--iter") + 1])
ticks_arr = []
power_arr = []
energy_arr = []
for i in range(num_iters):
keccak_buf = ""
proc_regs["r0"] = 0
proc_regs["r1"] = 0
proc_regs["reg"] = 0
proc_regs["tmp"] = 0
proc_regs["c0"] = 0
proc_regs["c1"] = 0
proc_regs["flag"] = 0
ticks = 0
pc = 0
power = []
# The lattice-crypto core is not pipelined
# Requires 1 cycle to fetch and >= 1 cycles to decode and execute instruction
instr_count = 0
while (1):
if "--verbose" in sys.argv:
if pc in labels.values():
for (label, label_pc) in labels.items():
if label_pc == pc:
break
print("[%3d] %s : %s" %(pc, label, imem[pc]))
else:
print("[%3d] %s" %(pc, imem[pc]))
ret = instr_exec(imem[pc], i)
# Invalid instruction
if ret == -1:
print("\n[Line %4d] %s\nERROR: Instruction not supported\n" % (lines[pc], imem[pc]))
exit()
if ret >= 0:
instr_count = instr_count + 1
# End of program
if ret == 99:
break
# Convert current to power at specified operating condition
# Take into account the fact that leakage power and dynamic power scale differently
# Leakage current is assumed independent of processor state and operating frequency
# i_leak = 102.6 uA at 0.70 V
# i_leak = 121.0 uA at 0.75 V
# i_leak = 139.5 uA at 0.80 V
# i_leak = 159.7 uA at 0.85 V
# i_leak = 188.8 uA at 0.90 V
# i_leak = 220.0 uA at 0.95 V
# i_leak = 257.4 uA at 1.00 V
# i_leak = 303.8 uA at 1.05 V
# i_leak = 355.7 uA at 1.10 V
# Model leakage current as an exponential function of vdd (pretty accurate, curve-fitted from measurements)
# Model active current as proportional to vdd and fmhz (again, not exactly accurate but good enough for our simulator)
i_leak = 11.728*math.exp(3.0933*vdd)
power = [(i_leak + ((idd - 355.7)*(fmhz/72)*(vdd/1.1))) for idd in power]
# Add some tiny random noise (+/-1%) to current values
power = [idd + random.randrange(-int(idd/100),int(idd/100)) for idd in power]
# Finally, convert current to power
power = [idd*vdd for idd in power]
if num_iters > 1:
print("\n[iter = %d]" % (i+1))
else:
print("\n")
print("------------------------------------------------------")
print("Program Execution Summary (at %0.2f V and %d MHz)" % (vdd, fmhz))
print("------------------------------------------------------")
print("* Instructions: %d" % instr_count)
print("* Total Cycles: %s" % format(ticks, ',d'))
ticks_arr.append(ticks)
time_us = ticks/fmhz
if time_us < 1e3:
print("* Total Time: %0.2f us" % (time_us))
elif time_us < 1e6:
print("* Total Time: %0.2f ms" % (time_us/1e3))
elif time_us < 1e9:
print("* Total Time: %0.2f s" % (time_us/1e6))
avg_power_uw = sum(power)/ticks
if avg_power_uw < 1e3:
print("* Average Power: %0.2f uW" % (avg_power_uw))
elif avg_power_uw < 1e6:
print("* Average Power: %0.2f mW" % (avg_power_uw/1e3))
power_arr.append(avg_power_uw)
energy_pj = sum(power)/fmhz
if energy_pj < 1e3:
print("* Total Energy: %0.2f pJ" % (energy_pj))
elif energy_pj < 1e6:
print("* Total Energy: %0.2f nJ" % (energy_pj/1e3))
elif energy_pj < 1e9:
print("* Total Energy: %0.2f uJ" % (energy_pj/1e6))
energy_arr.append(energy_pj)
print("------------------------------------------------------")
print("\n")
# Print average cycles and energy, only in case of multiple iterations
if num_iters > 1:
print("Over %d Iterations:" % (num_iters))
avg_ticks = math.ceil(sum(ticks_arr)/len(ticks_arr))
print(" Average Cycles: %s" % (format(avg_ticks, ',d')))
avg_avg_power_uw = sum(power_arr)/len(power_arr)
if avg_avg_power_uw < 1e3:
print(" Average Power: %0.2f uW" % (avg_avg_power_uw))
elif avg_avg_power_uw < 1e6:
print(" Average Power: %0.2f mW" % (avg_avg_power_uw/1e3))
avg_energy_pj = sum(energy_arr)/len(energy_arr)
if avg_energy_pj < 1e3:
print(" Average Energy: %0.2f pJ" % (avg_energy_pj))
elif avg_energy_pj < 1e6:
print(" Average Energy: %0.2f nJ" % (avg_energy_pj/1e3))
elif avg_energy_pj < 1e9:
print(" Average Energy: %0.2f uJ" % (avg_energy_pj/1e6))
# Plot power profile, only in case of single iteration
if "--plot_power" in sys.argv and num_iters == 1:
power = [i_leak] + power
mpl.rcParams['xtick.major.pad'] = 5
mpl.rcParams['ytick.major.pad'] = 5
plt.figure(figsize=(15,5))
plt.plot(power, linewidth=1.5)
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
plt.xlabel("Cycles", fontsize=16, fontweight='bold')
plt.ylabel("Power (uW)", fontsize=16, fontweight='bold')
plt.tight_layout()
plt.show()
| 48.133628
| 326
| 0.547929
| 12,902
| 87,170
| 3.589288
| 0.051697
| 0.035501
| 0.041201
| 0.038869
| 0.810901
| 0.775292
| 0.753266
| 0.735127
| 0.721091
| 0.707228
| 0
| 0.055365
| 0.279133
| 87,170
| 1,810
| 327
| 48.160221
| 0.681594
| 0.086349
| 0
| 0.662549
| 0
| 0.035761
| 0.217923
| 0.027537
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00065
| false
| 0
| 0.004551
| 0
| 0.045514
| 0.137841
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2020bea896e542e5297a012977545669237cca90
| 3,467
|
py
|
Python
|
test/python/test_biasadd.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | null | null | null |
test/python/test_biasadd.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | null | null | null |
test/python/test_biasadd.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | 1
|
2021-05-12T07:35:34.000Z
|
2021-05-12T07:35:34.000Z
|
# ==============================================================================
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# ==============================================================================
"""Openvino Tensorflow BiasAdd operation test
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import numpy as np
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from common import NgraphTest
np.random.seed(8)
class TestBiasAddOperations(NgraphTest):
def test_BiasAdd1(self):
input_data = (0, 1, 0, 1, 2, 1, 1, 0, 3, 1, 1, 0, 4, 4, 5, 4)
input_data = np.reshape(input_data, (2, 2, 2, 2))
input_var = tf.compat.v1.placeholder(tf.float32, shape=(2, 2, 2, 2))
bias_data = (100., -100.)
bias_var = tf.compat.v1.placeholder(tf.float32, shape=(2))
out = tf.nn.bias_add(input_var, bias_var, 'NHWC')
def run_test(sess):
return sess.run(
out, feed_dict={
input_var: input_data,
bias_var: bias_data
})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_BiasAdd2(self):
input_data = (0, 1, 0, 1, 2, 1, 1, 0, 3, 1, 1, 0, 4, 4, 5, 4)
input_data = np.reshape(input_data, (2, 2, 2, 2))
input_var = tf.compat.v1.placeholder(tf.float32, shape=(2, 2, 2, 2))
bias_data = (100., -100.)
bias_var = tf.compat.v1.placeholder(tf.float32, shape=(2))
out = tf.nn.bias_add(input_var, bias_var, 'NCHW')
def run_test(sess):
return sess.run(
out, feed_dict={
input_var: input_data,
bias_var: bias_data
})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_BiasAdd3(self):
input_data = (0, 1, 0, 1, 2, 1, 1, 0, 3, 1, 1, 0, 4, 4, 5, 4, 3, 5, 1,
2, 0, 4, 0, 1)
input_data = np.reshape(input_data, (2, 3, 2, 2))
input_var = tf.compat.v1.placeholder(tf.float32, shape=(2, 3, 2, 2))
bias_data = (100., -100., 50) # channels = 3
bias_var = tf.compat.v1.placeholder(tf.float32, shape=(3))
out = tf.nn.bias_add(input_var, bias_var, 'NCHW')
def run_test(sess):
return sess.run(
out, feed_dict={
input_var: input_data,
bias_var: bias_data
})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_BiasAdd4(self):
input_data = (0, 1, 0, 1, 2, 1, 1, 0, 3, 1, 1, 0, 4, 4, 5, 4, 3, 5, 1,
2, 0, 4, 0, 1)
input_data = np.reshape(input_data, (2, 2, 2, 3))
input_var = tf.compat.v1.placeholder(tf.float32, shape=(2, 2, 2, 3))
bias_data = (100., -100., 50) # channels = 3
bias_var = tf.compat.v1.placeholder(tf.float32, shape=(3))
out = tf.nn.bias_add(input_var, bias_var, 'NHWC')
def run_test(sess):
return sess.run(
out, feed_dict={
input_var: input_data,
bias_var: bias_data
})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
| 32.401869
| 80
| 0.522354
| 475
| 3,467
| 3.610526
| 0.16
| 0.020991
| 0.017493
| 0.060641
| 0.780758
| 0.780758
| 0.779592
| 0.779592
| 0.779592
| 0.778426
| 0
| 0.075519
| 0.304875
| 3,467
| 106
| 81
| 32.707547
| 0.6361
| 0.08653
| 0
| 0.75
| 0
| 0
| 0.005073
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.111111
| false
| 0
| 0.097222
| 0.055556
| 0.277778
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
204143c9b99d8c2589fc7c630597840034cf1fa6
| 157
|
py
|
Python
|
src/lib/db/src/create_all.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | 1
|
2018-09-11T19:32:25.000Z
|
2018-09-11T19:32:25.000Z
|
src/lib/db/src/create_all.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
src/lib/db/src/create_all.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
from .. import engine
from .. import Model
def create_all():
"""
Creates the schema for the models
"""
Model.metadata.create_all(engine)
| 13.083333
| 37
| 0.643312
| 20
| 157
| 4.95
| 0.65
| 0.20202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.248408
| 157
| 11
| 38
| 14.272727
| 0.838983
| 0.210191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
204f53d4fd1b20828d49f634b30d897ab2f08e54
| 38
|
py
|
Python
|
learnml/tree/__init__.py
|
spyridon97/Learn-Machine-Learning
|
4678430b40a45f25fe9d9dc4400450b974d0b6fb
|
[
"MIT"
] | null | null | null |
learnml/tree/__init__.py
|
spyridon97/Learn-Machine-Learning
|
4678430b40a45f25fe9d9dc4400450b974d0b6fb
|
[
"MIT"
] | null | null | null |
learnml/tree/__init__.py
|
spyridon97/Learn-Machine-Learning
|
4678430b40a45f25fe9d9dc4400450b974d0b6fb
|
[
"MIT"
] | null | null | null |
from .DecisionTree import DecisionTree
| 38
| 38
| 0.894737
| 4
| 38
| 8.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
645a8881193a53add83adceac66523a7ad2296e6
| 3,005
|
py
|
Python
|
model/CRNN/train.py
|
manojakm/sanskrit-ocr-1
|
4a7b58dd68ef30e8a849acde3fff1595b4c607c9
|
[
"MIT"
] | 1
|
2021-08-06T15:29:07.000Z
|
2021-08-06T15:29:07.000Z
|
model/CRNN/train.py
|
Sanskrit-Club/sanskrit-ocr
|
de908c1a62df8539b22a2458b2dfd4bd07462009
|
[
"MIT"
] | null | null | null |
model/CRNN/train.py
|
Sanskrit-Club/sanskrit-ocr
|
de908c1a62df8539b22a2458b2dfd4bd07462009
|
[
"MIT"
] | 1
|
2020-11-07T08:37:52.000Z
|
2020-11-07T08:37:52.000Z
|
import os
import sys
import shutil
import tensorflow as tf
if len(sys.argv)<5:
sys.exit("Format python model/CRNN/train.py <training tfrecord filename> <no.of epochs> <weights_path> <steps_per_checkpoint>")
fil = sys.argv[1]
epochs=int(sys.argv[2])
weights_path = sys.argv[3]
steps_per_checkpoint = sys.argv[4]
c=0
for fn in ["model/CRNN/data/tfReal/"+fil]:
for record in tf.python_io.tf_record_iterator(fn):
c += 1
print(c)
if weights_path=="0":
if steps_per_checkpoint=="0":
print("Started Training")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32)))
print("1 Epoch completed")
for i in range(epochs-1):
ckpt = tf.train.get_checkpoint_state("model/CRNN/model/shadownet")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+ckpt.model_checkpoint_path)
print(str(i+2)+" Epochs completed")
else:
print("Started Training")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --steps_per_checkpoint "+steps_per_checkpoint)
print("1 Epoch completed")
for i in range(epochs-1):
ckpt = tf.train.get_checkpoint_state("model/CRNN/model/shadownet")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+ckpt.model_checkpoint_path + " --steps_per_checkpoint "+steps_per_checkpoint)
print(str(i+2)+" Epochs completed")
else:
print("Started Training")
if steps_per_checkpoint=="0":
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+weights_path)
print("1 Epoch completed")
for i in range(epochs-1):
ckpt = tf.train.get_checkpoint_state("model/CRNN/model/shadownet")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+ckpt.model_checkpoint_path)
print(str(i+2)+" Epochs completed")
else:
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+weights_path+ " --steps_per_checkpoint "+steps_per_checkpoint)
print("1 Epoch completed")
for i in range(epochs-1):
ckpt = tf.train.get_checkpoint_state("model/CRNN/model/shadownet")
os.system("CUDA_VISIBLE_DEVICES=0 python model/CRNN/tools/train_shadownet.py --filename "+fil+" --train_epochs "+str(int(c/32))+ " --weights_path "+ckpt.model_checkpoint_path+ " --steps_per_checkpoint "+steps_per_checkpoint)
print(str(i+2)+" Epochs completed")
| 57.788462
| 237
| 0.685857
| 432
| 3,005
| 4.571759
| 0.145833
| 0.063797
| 0.109367
| 0.076962
| 0.850127
| 0.814177
| 0.814177
| 0.814177
| 0.814177
| 0.814177
| 0
| 0.018319
| 0.164393
| 3,005
| 51
| 238
| 58.921569
| 0.76822
| 0
| 0
| 0.583333
| 0
| 0.020833
| 0.454243
| 0.230616
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
648f6fe54cc3b30d371d72b0b7a91ccdc493e375
| 4,288
|
py
|
Python
|
pizza_cutter/des_pizza_cutter/tests/test_se_image_ccd_bnds.py
|
beckermr/pizza-cutter
|
04eefd2d4b2a63975fe809c60b5c8e7e3fcf26c6
|
[
"BSD-3-Clause"
] | null | null | null |
pizza_cutter/des_pizza_cutter/tests/test_se_image_ccd_bnds.py
|
beckermr/pizza-cutter
|
04eefd2d4b2a63975fe809c60b5c8e7e3fcf26c6
|
[
"BSD-3-Clause"
] | 194
|
2018-10-24T23:40:47.000Z
|
2021-11-17T16:02:35.000Z
|
pizza_cutter/des_pizza_cutter/tests/test_se_image_ccd_bnds.py
|
beckermr/pizza-cutter
|
04eefd2d4b2a63975fe809c60b5c8e7e3fcf26c6
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import pytest
from meds.bounds import Bounds
from .._se_image import SEImageSlice
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
def test_se_image_ccd_bnds_in(se_image_data):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
in_bnds = Bounds(10, 20, 50, 60)
assert se_im.ccd_contains_bounds(in_bnds)
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
@pytest.mark.parametrize('in_bnds', [
Bounds(0, 20, 50, 60),
Bounds(10, 4095, 50, 60),
Bounds(10, 20, 0, 60),
Bounds(10, 20, 50, 2047)])
def test_se_image_ccd_bnds_in_edge(se_image_data, in_bnds):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
assert se_im.ccd_contains_bounds(in_bnds)
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
@pytest.mark.parametrize('over_bnds', [
Bounds(-10, 20, 50, 60),
Bounds(10, 8000, 50, 60),
Bounds(10, 20, -50, 60),
Bounds(10, 20, 50, 6000),
Bounds(-10, 8000, -50, 6000)])
def test_se_image_ccd_bnds_over(se_image_data, over_bnds):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
assert not se_im.ccd_contains_bounds(over_bnds)
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
@pytest.mark.parametrize('out_bnds', [
Bounds(-20, -10, -60, -50),
Bounds(-20, -10, 50000, 60000),
Bounds(10000, 20000, -60, -50),
Bounds(10000, 20000, 50000, 60000)])
def test_se_image_ccd_bnds_out(se_image_data, out_bnds):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
assert not se_im.ccd_contains_bounds(out_bnds)
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
@pytest.mark.parametrize('buffer', [0, 5, 10])
def test_se_image_ccd_bnds_buffer_in(se_image_data, buffer):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
in_bnds = Bounds(20, 4075, 20, 2027)
assert se_im.ccd_contains_bounds(in_bnds, buffer=buffer)
@pytest.mark.skipif(
os.environ.get('TEST_DESDATA', None) is None,
reason=(
'SEImageSlice can only be tested if '
'test data is at TEST_DESDATA'))
@pytest.mark.parametrize('out_bnds', [
Bounds(10, 4075, 20, 2027),
Bounds(20, 4085, 20, 2027),
Bounds(20, 4075, 10, 2027),
Bounds(20, 4075, 20, 2037)])
def test_se_image_ccd_bnds_buffer_out(se_image_data, out_bnds):
se_im = SEImageSlice(
source_info=se_image_data['source_info'],
psf_model=None,
wcs=se_image_data['eu_wcs'],
wcs_position_offset=1,
wcs_color=0,
psf_kwargs=None,
noise_seeds=[10],
mask_tape_bumps=False,
)
assert not se_im.ccd_contains_bounds(out_bnds, buffer=15)
| 28.397351
| 63
| 0.64319
| 627
| 4,288
| 4.097289
| 0.118022
| 0.06812
| 0.077073
| 0.04204
| 0.852472
| 0.836123
| 0.811989
| 0.755936
| 0.743091
| 0.743091
| 0
| 0.072083
| 0.236474
| 4,288
| 150
| 64
| 28.586667
| 0.712584
| 0
| 0
| 0.676923
| 0
| 0
| 0.137593
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 1
| 0.046154
| false
| 0
| 0.030769
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
64afbc0c3fb80ce36f09d95ea77bbc5097c6cee4
| 6,021
|
py
|
Python
|
api/migrations/versions/ea937f8dea77_v1.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
api/migrations/versions/ea937f8dea77_v1.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
api/migrations/versions/ea937f8dea77_v1.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
"""v1
Revision ID: ea937f8dea77
Revises:
Create Date: 2022-02-08 14:54:03.738035
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ea937f8dea77'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_roles',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('description', sa.String(length=50), nullable=False),
sa.Column('permission', sa.String(length=10), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('admin_users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('email', sa.String(length=50), nullable=False),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('_password', sa.String(length=250), nullable=False),
sa.Column('permission', sa.String(length=50), nullable=False),
sa.Column('avatar', sa.String(length=250), nullable=False),
sa.Column('login_time', sa.DATETIME(timezone=6), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('classifications',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('logs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('model', sa.String(length=20), nullable=False),
sa.Column('action', sa.String(length=10), nullable=False),
sa.Column('content', sa.String(length=500), nullable=False),
sa.Column('update_time', sa.DateTime(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('packages',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('name', sa.String(length=25), nullable=False),
sa.Column('no', sa.String(length=50), nullable=False),
sa.Column('_type', sa.String(length=50), nullable=False),
sa.Column('cover_img', sa.String(length=100), nullable=False),
sa.Column('detail_img', sa.String(length=100), nullable=False),
sa.Column('status', sa.BOOLEAN(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.Column('product_list', sa.String(length=100), nullable=False),
sa.Column('remark', sa.String(length=500), nullable=True),
sa.Column('update_user_id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('products',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('no', sa.String(length=25), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('cover_img', sa.String(length=100), nullable=False),
sa.Column('detail_img', sa.String(length=100), nullable=False),
sa.Column('description_img', sa.String(length=100), nullable=False),
sa.Column('classification', sa.String(length=50), nullable=False),
sa.Column('status', sa.BOOLEAN(), nullable=False),
sa.Column('in_price', sa.Integer(), nullable=False),
sa.Column('out_price', sa.Integer(), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.Column('level', sa.String(length=10), nullable=False),
sa.Column('position', sa.String(length=100), nullable=False),
sa.Column('package_list', sa.String(length=100), nullable=False),
sa.Column('remark', sa.String(length=500), nullable=True),
sa.Column('update_user_id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('no')
)
op.create_table('tests',
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=250), nullable=False),
sa.Column('login_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tests')
op.drop_table('products')
op.drop_table('packages')
op.drop_table('logs')
op.drop_table('classifications')
op.drop_table('admin_users')
op.drop_table('admin_roles')
# ### end Alembic commands ###
| 45.270677
| 72
| 0.684272
| 799
| 6,021
| 5.082603
| 0.130163
| 0.139867
| 0.173603
| 0.22753
| 0.828367
| 0.824181
| 0.80916
| 0.756956
| 0.631618
| 0.604285
| 0
| 0.023265
| 0.129048
| 6,021
| 132
| 73
| 45.613636
| 0.751144
| 0.045175
| 0
| 0.473684
| 0
| 0
| 0.130427
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0.008772
| 0.017544
| 0
| 0.035088
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b3971245ad4433a75ed9239c1f914a72379a52d2
| 114
|
py
|
Python
|
telethon/__init__.py
|
madcat1991/Telethon
|
afcddfd7c155cd88e73075c7faf2aaa39b065e99
|
[
"MIT"
] | null | null | null |
telethon/__init__.py
|
madcat1991/Telethon
|
afcddfd7c155cd88e73075c7faf2aaa39b065e99
|
[
"MIT"
] | null | null | null |
telethon/__init__.py
|
madcat1991/Telethon
|
afcddfd7c155cd88e73075c7faf2aaa39b065e99
|
[
"MIT"
] | 1
|
2019-07-29T03:50:02.000Z
|
2019-07-29T03:50:02.000Z
|
from .telegram_bare_client import TelegramBareClient
from .telegram_client import TelegramClient
from . import tl
| 28.5
| 52
| 0.868421
| 14
| 114
| 6.857143
| 0.571429
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 114
| 3
| 53
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b3f87c2ec5c66ef86a076ab5ead3a0a02a84583b
| 49
|
py
|
Python
|
gym_tetris/envs/__init__.py
|
michielcx/Tetris-DQN
|
66ee640aaa2068f3db4798ddf51f1518c3779cc6
|
[
"MIT"
] | 13
|
2019-10-23T09:32:33.000Z
|
2021-08-01T10:39:57.000Z
|
gym_tetris/envs/__init__.py
|
michielcx/Tetris-DQN
|
66ee640aaa2068f3db4798ddf51f1518c3779cc6
|
[
"MIT"
] | 1
|
2022-02-10T00:29:36.000Z
|
2022-02-10T00:29:36.000Z
|
gym_tetris/envs/__init__.py
|
michielcx/Tetris-DQN
|
66ee640aaa2068f3db4798ddf51f1518c3779cc6
|
[
"MIT"
] | 6
|
2020-10-16T01:43:35.000Z
|
2022-01-28T12:05:32.000Z
|
from gym_tetris.envs.tetris_env import TetrisEnv
| 24.5
| 48
| 0.877551
| 8
| 49
| 5.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
373f9362c5c11db0defb122a3f1b5ba744ed13e2
| 2,079
|
py
|
Python
|
usaspending_api/search/tests/test_spending_over_time.py
|
COEJKnight/five
|
0777303f4bee60a10afaccbd0f5c56d880ad0059
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/search/tests/test_spending_over_time.py
|
COEJKnight/five
|
0777303f4bee60a10afaccbd0f5c56d880ad0059
|
[
"CC0-1.0"
] | 3
|
2020-02-12T01:16:46.000Z
|
2021-06-10T20:36:57.000Z
|
usaspending_api/search/tests/test_spending_over_time.py
|
COEJKnight/five
|
0777303f4bee60a10afaccbd0f5c56d880ad0059
|
[
"CC0-1.0"
] | null | null | null |
import json
import pytest
from rest_framework import status
from usaspending_api.search.tests.test_mock_data_search import all_filters
@pytest.mark.skip
@pytest.mark.django_db
def test_spending_over_time_success(client):
# test for needed filters
resp = client.post(
'/api/v2/search/spending_over_time',
content_type='application/json',
data=json.dumps({
"group": "fiscal_year",
"filters": {
"keyword": "test"
}
}))
assert resp.status_code == status.HTTP_200_OK
# test all filters
resp = client.post(
'/api/v2/search/spending_over_time',
content_type='application/json',
data=json.dumps({
"group": "quarter",
"filters": all_filters()
}))
assert resp.status_code == status.HTTP_200_OK
@pytest.mark.skip
@pytest.mark.django_db
def test_spending_over_time_failure(client):
"""Verify error on bad autocomplete request for budget function."""
resp = client.post(
'/api/v2/search/spending_over_time/',
content_type='application/json',
data=json.dumps({'group': 'fiscal_year'}))
assert resp.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
def test_spending_over_time_subawards_success(client):
resp = client.post(
'/api/v2/search/spending_over_time',
content_type='application/json',
data=json.dumps({
"group": "quarter",
"filters": all_filters(),
"subawards": True
}))
assert resp.status_code == status.HTTP_200_OK
@pytest.mark.django_db
def test_spending_over_time_subawards_failure(client):
"""Verify error on bad autocomplete request for budget function."""
resp = client.post(
'/api/v2/search/spending_over_time',
content_type='application/json',
data=json.dumps({
"group": "quarter",
"filters": all_filters(),
"subawards": "string"
}))
assert resp.status_code == status.HTTP_400_BAD_REQUEST
| 27.72
| 74
| 0.641174
| 248
| 2,079
| 5.112903
| 0.233871
| 0.085174
| 0.113565
| 0.067035
| 0.851735
| 0.851735
| 0.851735
| 0.851735
| 0.824132
| 0.756309
| 0
| 0.012682
| 0.241462
| 2,079
| 74
| 75
| 28.094595
| 0.791376
| 0.079365
| 0
| 0.690909
| 0
| 0
| 0.198212
| 0.087277
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.072727
| false
| 0
| 0.072727
| 0
| 0.145455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
377a9608b41b3c81eb1b8b23aaddbca00d411687
| 168
|
py
|
Python
|
spydrnet_physical/ir/tests/test_pin.py
|
talashilkarraj/spydrnet-physical
|
d13bcbb0feef7d5c93aa60af4a916f837128a5ad
|
[
"BSD-3-Clause"
] | 3
|
2021-11-05T18:25:21.000Z
|
2022-03-02T22:03:02.000Z
|
spydrnet_physical/ir/tests/test_pin.py
|
talashilkarraj/spydrnet-physical
|
d13bcbb0feef7d5c93aa60af4a916f837128a5ad
|
[
"BSD-3-Clause"
] | null | null | null |
spydrnet_physical/ir/tests/test_pin.py
|
talashilkarraj/spydrnet-physical
|
d13bcbb0feef7d5c93aa60af4a916f837128a5ad
|
[
"BSD-3-Clause"
] | 2
|
2022-01-10T14:27:59.000Z
|
2022-03-13T08:21:33.000Z
|
import unittest
from spydrnet.ir import FirstClassElement
from spydrnet.ir import Pin
class TestPin(unittest.TestCase):
def setUp(self):
self.pin = Pin()
| 18.666667
| 41
| 0.738095
| 22
| 168
| 5.636364
| 0.590909
| 0.193548
| 0.225806
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184524
| 168
| 9
| 42
| 18.666667
| 0.905109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
378853ca353968e0328e2efced38b948aecff575
| 45
|
py
|
Python
|
cfgdir/__init__.py
|
rstms/cfgdir
|
10ec25363e049807b4b68a935c81718893a6f0fe
|
[
"MIT"
] | null | null | null |
cfgdir/__init__.py
|
rstms/cfgdir
|
10ec25363e049807b4b68a935c81718893a6f0fe
|
[
"MIT"
] | 1
|
2020-02-14T15:39:23.000Z
|
2020-02-14T15:39:23.000Z
|
cfgdir/__init__.py
|
rstms/cfgdir
|
10ec25363e049807b4b68a935c81718893a6f0fe
|
[
"MIT"
] | null | null | null |
from .cfgdir import *
from .version import *
| 15
| 22
| 0.733333
| 6
| 45
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 23
| 22.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
37a717d2eb9eff7a7e90bbf36628f38935b02c7e
| 116
|
py
|
Python
|
plugin/src/test/resources/inspections/ReplaceNotEqOperator.py
|
consulo/consulo-python
|
586c3eaee3f9c2cc87fb088dc81fb12ffa4b3a9d
|
[
"Apache-2.0"
] | null | null | null |
plugin/src/test/resources/inspections/ReplaceNotEqOperator.py
|
consulo/consulo-python
|
586c3eaee3f9c2cc87fb088dc81fb12ffa4b3a9d
|
[
"Apache-2.0"
] | 11
|
2017-02-27T22:35:32.000Z
|
2021-12-24T08:07:40.000Z
|
plugin/src/test/resources/inspections/ReplaceNotEqOperator.py
|
consulo/consulo-python
|
586c3eaee3f9c2cc87fb088dc81fb12ffa4b3a9d
|
[
"Apache-2.0"
] | null | null | null |
print(<warning descr="Python version 3.0, 3.1, 3.2, 3.3 do not support <>, use != instead.">a <> <caret>b</warning>)
| 116
| 116
| 0.637931
| 22
| 116
| 3.363636
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079208
| 0.12931
| 116
| 1
| 116
| 116
| 0.653465
| 0
| 0
| 0
| 0
| 1
| 0.581197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
806615937cfcc3f95ae6966325648ef6a32d1791
| 40
|
py
|
Python
|
nested_cv/__init__.py
|
yasutakakuniyoshi/Nested-Cross-Validation
|
d8fc062e48182b4b4d2d67887830c93d366e3826
|
[
"MIT"
] | 51
|
2019-08-06T11:14:34.000Z
|
2022-03-01T18:27:06.000Z
|
nested_cv/__init__.py
|
yasutakakuniyoshi/Nested-Cross-Validation
|
d8fc062e48182b4b4d2d67887830c93d366e3826
|
[
"MIT"
] | 14
|
2019-05-30T17:22:22.000Z
|
2021-07-06T14:21:43.000Z
|
nested_cv/__init__.py
|
yasutakakuniyoshi/Nested-Cross-Validation
|
d8fc062e48182b4b4d2d67887830c93d366e3826
|
[
"MIT"
] | 20
|
2019-05-17T19:30:40.000Z
|
2022-03-12T14:20:15.000Z
|
from nested_cv.nested_cv import NestedCV
| 40
| 40
| 0.9
| 7
| 40
| 4.857143
| 0.714286
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
80d0b36a12d7047a728abc2ca8d5c1e4d9627a8f
| 10,774
|
py
|
Python
|
tests/utils/test_exporter.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
tests/utils/test_exporter.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
tests/utils/test_exporter.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
import pytest
from poetry.packages import Locker as BaseLocker
from poetry.utils._compat import Path
from poetry.utils.exporter import Exporter
class Locker(BaseLocker):
def __init__(self):
self._locked = True
self._content_hash = self._get_content_hash()
def locked(self, is_locked=True):
self._locked = is_locked
return self
def mock_lock_data(self, data):
self._lock_data = data
def is_locked(self):
return self._locked
def is_fresh(self):
return True
def _get_content_hash(self):
return "123456789"
@pytest.fixture()
def locker():
return Locker()
def test_exporter_can_export_requirements_txt_with_standard_packages(tmp_dir, locker):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "main",
"optional": False,
"python-versions": "*",
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": [], "bar": []},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
bar==4.5.6
foo==1.2.3
"""
assert expected == content
def test_exporter_can_export_requirements_txt_with_standard_packages_and_hashes(
tmp_dir, locker
):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "main",
"optional": False,
"python-versions": "*",
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": ["12345"], "bar": ["67890"]},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
bar==4.5.6 \\
--hash=sha256:67890
foo==1.2.3 \\
--hash=sha256:12345
"""
assert expected == content
def test_exporter_can_export_requirements_txt_with_standard_packages_and_hashes_disabled(
tmp_dir, locker
):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "main",
"optional": False,
"python-versions": "*",
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": ["12345"], "bar": ["67890"]},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir), with_hashes=False)
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
bar==4.5.6
foo==1.2.3
"""
assert expected == content
def test_exporter_exports_requirements_txt_without_dev_packages_by_default(
tmp_dir, locker
):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "dev",
"optional": False,
"python-versions": "*",
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": ["12345"], "bar": ["67890"]},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
foo==1.2.3 \\
--hash=sha256:12345
"""
assert expected == content
def test_exporter_exports_requirements_txt_with_dev_packages_if_opted_in(
tmp_dir, locker
):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "dev",
"optional": False,
"python-versions": "*",
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": ["12345"], "bar": ["67890"]},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir), dev=True)
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
bar==4.5.6 \\
--hash=sha256:67890
foo==1.2.3 \\
--hash=sha256:12345
"""
assert expected == content
def test_exporter_can_export_requirements_txt_with_git_packages(tmp_dir, locker):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
"source": {
"type": "git",
"url": "https://github.com/foo/foo.git",
"reference": "123456",
},
}
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": []},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
-e git+https://github.com/foo/foo.git@123456#egg=foo
"""
assert expected == content
def test_exporter_can_export_requirements_txt_with_directory_packages(tmp_dir, locker):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
"source": {"type": "directory", "url": "../foo", "reference": ""},
}
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": []},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
-e ../foo
"""
assert expected == content
def test_exporter_can_export_requirements_txt_with_file_packages(tmp_dir, locker):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
"source": {"type": "file", "url": "../foo.tar.gz", "reference": ""},
}
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": []},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir))
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
-e ../foo.tar.gz
"""
assert expected == content
def test_exporter_exports_requirements_txt_with_legacy_packages(tmp_dir, locker):
locker.mock_lock_data(
{
"package": [
{
"name": "foo",
"version": "1.2.3",
"category": "main",
"optional": False,
"python-versions": "*",
},
{
"name": "bar",
"version": "4.5.6",
"category": "dev",
"optional": False,
"python-versions": "*",
"source": {
"type": "legacy",
"url": "https://example.com/simple/",
"reference": "",
},
},
],
"metadata": {
"python-versions": "*",
"content-hash": "123456789",
"hashes": {"foo": ["12345"], "bar": ["67890"]},
},
}
)
exporter = Exporter(locker)
exporter.export("requirements.txt", Path(tmp_dir), dev=True)
with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f:
content = f.read()
expected = """\
bar==4.5.6 \\
--index-url https://example.com/simple/ \\
--hash=sha256:67890
foo==1.2.3 \\
--hash=sha256:12345
"""
assert expected == content
| 26.536946
| 89
| 0.428624
| 913
| 10,774
| 4.894852
| 0.106243
| 0.090624
| 0.040277
| 0.090624
| 0.868427
| 0.857686
| 0.845156
| 0.845156
| 0.845156
| 0.838219
| 0
| 0.046907
| 0.41628
| 10,774
| 405
| 90
| 26.602469
| 0.663699
| 0
| 0
| 0.667647
| 0
| 0.002941
| 0.225357
| 0
| 0
| 0
| 0
| 0
| 0.026471
| 1
| 0.047059
| false
| 0
| 0.011765
| 0.011765
| 0.076471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
03a94311a2a0fc1a0b8ed9b85098e0540e48abf0
| 97
|
py
|
Python
|
lib/solutions/hello.py
|
CX-Checkout/beli01
|
344e52da22806a0989ea1b8ab446f87c37d7bfb5
|
[
"Apache-2.0"
] | null | null | null |
lib/solutions/hello.py
|
CX-Checkout/beli01
|
344e52da22806a0989ea1b8ab446f87c37d7bfb5
|
[
"Apache-2.0"
] | null | null | null |
lib/solutions/hello.py
|
CX-Checkout/beli01
|
344e52da22806a0989ea1b8ab446f87c37d7bfb5
|
[
"Apache-2.0"
] | null | null | null |
# noinspection PyUnusedLocal
def hello(friend_name):
return 'Hello, {}!'.format(friend_name)
| 24.25
| 43
| 0.742268
| 11
| 97
| 6.363636
| 0.727273
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 3
| 44
| 32.333333
| 0.823529
| 0.268041
| 0
| 0
| 0
| 0
| 0.144928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
03e8856e780d1a692d5d83333e9f8fb0361c3b4a
| 3,989
|
py
|
Python
|
data/operator/bbox/utility/image.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 143
|
2021-12-03T02:33:36.000Z
|
2022-03-29T00:01:48.000Z
|
data/operator/bbox/utility/image.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 33
|
2021-12-03T10:32:05.000Z
|
2022-03-31T02:13:55.000Z
|
data/operator/bbox/utility/image.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 24
|
2021-12-04T06:46:42.000Z
|
2022-03-30T07:57:47.000Z
|
from data.types.pixel_coordinate_system import PixelCoordinateSystem
from data.types.bounding_box_format import BoundingBoxFormat
from data.types.bounding_box_coordinate_system import BoundingBoxCoordinateSystem
from data.types.pixel_definition import PixelDefinition
def _common_routine(bounding_box, image_size, bounding_box_format: BoundingBoxFormat,
pixel_coordinate_system: PixelCoordinateSystem,
bounding_box_coordinate_system: BoundingBoxCoordinateSystem,
pixel_definition, rasterized_xyxy_func, rasterized_polygon_func,
spatial_xyxy_func, spatial_polygon_func):
if bounding_box_coordinate_system == BoundingBoxCoordinateSystem.Rasterized:
if bounding_box_format == BoundingBoxFormat.XYWH or bounding_box_format == BoundingBoxFormat.XYXY:
if bounding_box_format == BoundingBoxFormat.XYWH:
from data.operator.bbox.rasterized.xywh2xyxy import bbox_xywh2xyxy
bounding_box = bbox_xywh2xyxy(bounding_box)
return rasterized_xyxy_func(bounding_box, image_size)
else:
return rasterized_polygon_func(bounding_box, image_size)
else:
if bounding_box_format == BoundingBoxFormat.XYWH or bounding_box_format == BoundingBoxFormat.XYXY:
if bounding_box_format == BoundingBoxFormat.XYWH:
from data.operator.bbox.rasterized.xywh2xyxy import bbox_xywh2xyxy
bounding_box = bbox_xywh2xyxy(bounding_box)
return spatial_xyxy_func(bounding_box, image_size, pixel_coordinate_system, pixel_definition)
else:
return spatial_polygon_func(bounding_box, image_size, pixel_coordinate_system, pixel_definition)
def bounding_box_is_intersect_with_image(bounding_box, image_size, bounding_box_format: BoundingBoxFormat,
pixel_coordinate_system: PixelCoordinateSystem,
bounding_box_coordinate_system: BoundingBoxCoordinateSystem,
pixel_definition: PixelDefinition = PixelDefinition.Point):
import data.operator.bbox.rasterized.utility.image
import data.operator.bbox.spatial.utility.image
return _common_routine(bounding_box, image_size, bounding_box_format, pixel_coordinate_system,
bounding_box_coordinate_system, pixel_definition,
data.operator.bbox.rasterized.utility.image.bounding_box_is_intersect_with_image,
data.operator.bbox.rasterized.utility.image.bounding_box_is_intersect_with_image_polygon,
data.operator.bbox.spatial.utility.image.bounding_box_is_intersect_with_image,
data.operator.bbox.spatial.utility.image.bounding_box_is_intersect_with_image_polygon)
def bounding_box_fit_in_image_boundary(bounding_box, image_size, bounding_box_format: BoundingBoxFormat,
pixel_coordinate_system: PixelCoordinateSystem,
bounding_box_coordinate_system: BoundingBoxCoordinateSystem,
pixel_definition: PixelDefinition = PixelDefinition.Point):
import data.operator.bbox.rasterized.utility.image
import data.operator.bbox.spatial.utility.image
return _common_routine(bounding_box, image_size, bounding_box_format, pixel_coordinate_system,
bounding_box_coordinate_system, pixel_definition,
data.operator.bbox.rasterized.utility.image.bounding_box_fit_in_image_boundary,
data.operator.bbox.rasterized.utility.image.bounding_box_fit_in_image_boundary_polygon,
data.operator.bbox.spatial.utility.image.bounding_box_fit_in_image_boundary,
data.operator.bbox.spatial.utility.image.bounding_box_fit_in_image_boundary_polygon)
| 71.232143
| 116
| 0.713462
| 408
| 3,989
| 6.556373
| 0.10049
| 0.17271
| 0.083738
| 0.06729
| 0.876262
| 0.838131
| 0.786542
| 0.786542
| 0.786542
| 0.771215
| 0
| 0.00197
| 0.2364
| 3,989
| 55
| 117
| 72.527273
| 0.876231
| 0
| 0
| 0.55102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.204082
| 0
| 0.387755
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2070be2a7c05388f722d2e645f5b719b52990674
| 75
|
py
|
Python
|
{{cookiecutter.project_name}}/configs/dev_config.py
|
Colaplusice/cookiecutter-flask-devops
|
66eacf25ca679525d9f7ac351027cb708d003618
|
[
"MIT"
] | 5
|
2019-04-24T14:09:06.000Z
|
2020-05-03T22:44:59.000Z
|
{{cookiecutter.project_name}}/configs/dev_config.py
|
Colaplusice/cookiecutter-flask-devops
|
66eacf25ca679525d9f7ac351027cb708d003618
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}/configs/dev_config.py
|
Colaplusice/cookiecutter-flask-devops
|
66eacf25ca679525d9f7ac351027cb708d003618
|
[
"MIT"
] | null | null | null |
from configs.base_config import Config
class DevConfig(Config):
pass
| 12.5
| 38
| 0.773333
| 10
| 75
| 5.7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 75
| 5
| 39
| 15
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
2094791ecdc609a12b4e639dcde735c4fea2be22
| 131
|
py
|
Python
|
pandas/tseries/api.py
|
CJL89/pandas
|
6210077d32a9e9675526ea896e6d1f9189629d4a
|
[
"BSD-3-Clause"
] | 28,899
|
2016-10-13T03:32:12.000Z
|
2022-03-31T21:39:05.000Z
|
venv/lib/python3.7/site-packages/pandas/tseries/api.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 31,004
|
2016-10-12T23:22:27.000Z
|
2022-03-31T23:17:38.000Z
|
venv/lib/python3.7/site-packages/pandas/tseries/api.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 15,149
|
2016-10-13T03:21:31.000Z
|
2022-03-31T18:46:47.000Z
|
"""
Timeseries API
"""
# flake8: noqa
from pandas.tseries.frequencies import infer_freq
import pandas.tseries.offsets as offsets
| 14.555556
| 49
| 0.778626
| 17
| 131
| 5.941176
| 0.764706
| 0.257426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.129771
| 131
| 8
| 50
| 16.375
| 0.877193
| 0.21374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
209ef9c71a6358600e8c4bb616f3a3b7cc68a38c
| 29
|
py
|
Python
|
ffprobe/__init__.py
|
ayoy/ffprobe-python
|
a5bcb94198b22d966f0829fdf635a94bdba94621
|
[
"MIT"
] | 40
|
2019-11-13T20:58:44.000Z
|
2022-03-09T01:08:23.000Z
|
ffprobe/__init__.py
|
ayoy/ffprobe-python
|
a5bcb94198b22d966f0829fdf635a94bdba94621
|
[
"MIT"
] | 18
|
2019-12-03T06:50:53.000Z
|
2021-11-28T23:24:32.000Z
|
ffprobe/__init__.py
|
ayoy/ffprobe-python
|
a5bcb94198b22d966f0829fdf635a94bdba94621
|
[
"MIT"
] | 39
|
2019-11-13T20:58:49.000Z
|
2022-03-15T01:10:55.000Z
|
from .ffprobe import FFProbe
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
20bd9e85eb3a90e4bc40bde3f8650e10e062f3a5
| 181
|
py
|
Python
|
test/test_wordpress_stats.py
|
JulienLeonard/socialstats
|
944e3e4ceba2d977537934299e0c91abd5375d53
|
[
"MIT"
] | null | null | null |
test/test_wordpress_stats.py
|
JulienLeonard/socialstats
|
944e3e4ceba2d977537934299e0c91abd5375d53
|
[
"MIT"
] | null | null | null |
test/test_wordpress_stats.py
|
JulienLeonard/socialstats
|
944e3e4ceba2d977537934299e0c91abd5375d53
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, './../lib')
import wordpress_stats
from mysocialids import *
wordpress_stats.wordpress_dump(wordpress_blogid(),"wordpress_stats.xml")
| 15.083333
| 73
| 0.723757
| 22
| 181
| 5.727273
| 0.590909
| 0.333333
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006536
| 0.154696
| 181
| 11
| 74
| 16.454545
| 0.816993
| 0
| 0
| 0
| 0
| 0
| 0.162651
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b31a1b920b7d2ae286d87e618add23ad0f9280c1
| 117
|
py
|
Python
|
builder_engine/custom_components/__init__.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
builder_engine/custom_components/__init__.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
builder_engine/custom_components/__init__.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
from .optimizers import *
from .callbacks import *
from .metrics import *
from .losses import *
from .layers import *
| 23.4
| 25
| 0.752137
| 15
| 117
| 5.866667
| 0.466667
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162393
| 117
| 5
| 26
| 23.4
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b334c7e194d68ee212ba86fea643181e81f4009f
| 87
|
py
|
Python
|
molecool/io/__init__.py
|
aatishpr/molecool
|
73a52479b41ae2847b32707b2c32ca4e23ca83c4
|
[
"BSD-3-Clause"
] | null | null | null |
molecool/io/__init__.py
|
aatishpr/molecool
|
73a52479b41ae2847b32707b2c32ca4e23ca83c4
|
[
"BSD-3-Clause"
] | null | null | null |
molecool/io/__init__.py
|
aatishpr/molecool
|
73a52479b41ae2847b32707b2c32ca4e23ca83c4
|
[
"BSD-3-Clause"
] | null | null | null |
"""
IO sub-package
"""
from .pdb import open_pdb
from .xyz import open_xyz, write_xyz
| 12.428571
| 36
| 0.724138
| 15
| 87
| 4
| 0.6
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16092
| 87
| 6
| 37
| 14.5
| 0.821918
| 0.16092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b35ed5bea16814e787e446a9403854649961f2e5
| 36
|
py
|
Python
|
addons14/project_timeline/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/project_timeline/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/project_timeline/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import test_project_timeline
| 18
| 35
| 0.861111
| 5
| 36
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b36fe4046b8f9fa34681d3615fd085e25d55dae4
| 71
|
py
|
Python
|
viterbi_trellis/__init__.py
|
jesusfbes/viterbi_trellis
|
1743128116674c949c8f0ddf6f76713ed76a52bc
|
[
"MIT"
] | 6
|
2018-08-04T05:05:42.000Z
|
2021-06-15T11:59:41.000Z
|
viterbi_trellis/__init__.py
|
jesusfbes/viterbi_trellis
|
1743128116674c949c8f0ddf6f76713ed76a52bc
|
[
"MIT"
] | null | null | null |
viterbi_trellis/__init__.py
|
jesusfbes/viterbi_trellis
|
1743128116674c949c8f0ddf6f76713ed76a52bc
|
[
"MIT"
] | 2
|
2018-10-22T13:08:57.000Z
|
2019-07-12T09:12:21.000Z
|
from .viterbi import ViterbiTrellis
from .viterbi_exceptions import *
| 17.75
| 35
| 0.830986
| 8
| 71
| 7.25
| 0.625
| 0.37931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 71
| 3
| 36
| 23.666667
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2febac12ea78752566e578a3e8ef8e4b192bd2e9
| 66
|
py
|
Python
|
network_serializer/__init__.py
|
bubblemans/network-serializer
|
329fb725d3cc4c6a7ee3ef7b6ad4696379cd7349
|
[
"MIT"
] | null | null | null |
network_serializer/__init__.py
|
bubblemans/network-serializer
|
329fb725d3cc4c6a7ee3ef7b6ad4696379cd7349
|
[
"MIT"
] | null | null | null |
network_serializer/__init__.py
|
bubblemans/network-serializer
|
329fb725d3cc4c6a7ee3ef7b6ad4696379cd7349
|
[
"MIT"
] | null | null | null |
from network_serializer.network_serializer import Encoder, Decoder
| 66
| 66
| 0.909091
| 8
| 66
| 7.25
| 0.75
| 0.586207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 66
| 1
| 66
| 66
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2ffde592ff53d8bee57d45572316b20d33df1388
| 1,809
|
py
|
Python
|
EfficientDet/helper_function.py
|
shenghh2015/cell_detection
|
872ea94bf00714b36e617b214e8b13596b2fe61a
|
[
"Apache-2.0"
] | 1
|
2021-02-20T05:44:27.000Z
|
2021-02-20T05:44:27.000Z
|
EfficientDet/helper_function.py
|
shenghh2015/cell_detection
|
872ea94bf00714b36e617b214e8b13596b2fe61a
|
[
"Apache-2.0"
] | null | null | null |
EfficientDet/helper_function.py
|
shenghh2015/cell_detection
|
872ea94bf00714b36e617b214e8b13596b2fe61a
|
[
"Apache-2.0"
] | null | null | null |
import cv2
def draw_pr_boxes(image, boxes, scores, labels, colors, classes):
for b, l, s in zip(boxes, labels, scores):
class_id = int(l)
class_name = classes[class_id]
xmin, ymin, xmax, ymax = list(map(int, b))
score = '{:.2f}'.format(s)
color = colors[class_id]
label = ':'.join([class_name, score])
ret, baseline = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 1, 2)
cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 2)
# cv2.rectangle(image, (xmin, ymax - ret[1] - baseline), (xmin + ret[0], ymax), color, -1)
# cv2.putText(image, label, (xmin, ymax - baseline), cv2.FONT_HERSHEY_PLAIN, 1, (0, 0, 0), 2)
cv2.rectangle(image, (xmin, ymax), (xmin + baseline + ret[0], ymax + baseline + ret[1]), color, -1)
cv2.putText(image, label, (xmin, ymax + baseline//2 + ret[1]), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
def draw_gt_boxes(image, boxes, labels, colors, classes):
for b, l in zip(boxes, labels):
class_id = int(l)
class_name = classes[class_id]
xmin, ymin, xmax, ymax = list(map(int, b))
color = colors[class_id]
label = ':'.join([class_name])
ret, baseline = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 1, 2)
cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 2)
# cv2.rectangle(image, (xmin, ymax - ret[1] - baseline), (xmin + ret[0], ymax), color, -1)
# cv2.putText(image, label, (xmin, ymax - baseline), cv2.FONT_HERSHEY_PLAIN, 1, (0, 0, 0), 2)
cv2.rectangle(image, (xmin, ymax), (xmin + baseline + ret[0], ymax + baseline + ret[1]), color, -1)
cv2.putText(image, label, (xmin, ymax + baseline//2 + ret[1]), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
| 50.25
| 119
| 0.594251
| 260
| 1,809
| 4.034615
| 0.180769
| 0.06101
| 0.080076
| 0.102955
| 0.884652
| 0.884652
| 0.838894
| 0.838894
| 0.770257
| 0.770257
| 0
| 0.053507
| 0.235489
| 1,809
| 35
| 120
| 51.685714
| 0.704989
| 0.199558
| 0
| 0.666667
| 0
| 0
| 0.005544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6414d6640ecd5c21667a4c1447f370b168c32d83
| 232
|
py
|
Python
|
Server/Python/src/dbs/dao/MySQL/Site/ListBlockSite.py
|
vkuznet/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 8
|
2015-08-14T04:01:32.000Z
|
2021-06-03T00:56:42.000Z
|
Server/Python/src/dbs/dao/MySQL/Site/ListBlockSite.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 162
|
2015-01-07T21:34:47.000Z
|
2021-10-13T09:42:41.000Z
|
Server/Python/src/dbs/dao/MySQL/Site/ListBlockSite.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 16
|
2015-01-22T15:27:29.000Z
|
2021-04-28T09:23:28.000Z
|
#!/usr/bin/env python
"""
This module provides Site.ListBlockSite data access object.
"""
from dbs.dao.Oracle.Site.ListBlockSite import ListBlockSite as OraSiteListBlockSite
class ListBlockSite(OraSiteListBlockSite):
pass
| 23.2
| 83
| 0.784483
| 26
| 232
| 7
| 0.807692
| 0.186813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 232
| 9
| 84
| 25.777778
| 0.90099
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
928102213c48d797f1368cbd20affe68096ed0e5
| 23
|
py
|
Python
|
PcapViz/pcapviz/__init__.py
|
Jasobeczek/RPi-packet-analyser
|
5ae688a5a172cd993a5227fc6177fb8110cb031a
|
[
"MIT"
] | null | null | null |
PcapViz/pcapviz/__init__.py
|
Jasobeczek/RPi-packet-analyser
|
5ae688a5a172cd993a5227fc6177fb8110cb031a
|
[
"MIT"
] | null | null | null |
PcapViz/pcapviz/__init__.py
|
Jasobeczek/RPi-packet-analyser
|
5ae688a5a172cd993a5227fc6177fb8110cb031a
|
[
"MIT"
] | null | null | null |
from scapy.all import *
| 23
| 23
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2bad28b1b5e6a27200b2c63f2b1bcb7504a7fc44
| 108
|
py
|
Python
|
office365/teams/chatMessageAttachment.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 544
|
2016-08-04T17:10:16.000Z
|
2022-03-31T07:17:20.000Z
|
office365/teams/chatMessageAttachment.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 438
|
2016-10-11T12:24:22.000Z
|
2022-03-31T19:30:35.000Z
|
office365/teams/chatMessageAttachment.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 202
|
2016-08-22T19:29:40.000Z
|
2022-03-30T20:26:15.000Z
|
from office365.runtime.client_value import ClientValue
class ChatMessageAttachment(ClientValue):
pass
| 18
| 54
| 0.833333
| 11
| 108
| 8.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031579
| 0.12037
| 108
| 5
| 55
| 21.6
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
2bc49c5c063352a5e4a530d121e88d4f4da9b301
| 82,088
|
py
|
Python
|
executions/atis/lambda_calculus/query.py
|
JasperGuo/MeaningRepresentationBenchmark
|
b61e8ed68fdbd934c195fa968445540bfa897f2f
|
[
"MIT"
] | 9
|
2020-11-11T08:54:05.000Z
|
2022-03-22T11:16:03.000Z
|
executions/atis/lambda_calculus/query.py
|
JasperGuo/MeaningRepresentationBenchmark
|
b61e8ed68fdbd934c195fa968445540bfa897f2f
|
[
"MIT"
] | null | null | null |
executions/atis/lambda_calculus/query.py
|
JasperGuo/MeaningRepresentationBenchmark
|
b61e8ed68fdbd934c195fa968445540bfa897f2f
|
[
"MIT"
] | 2
|
2021-01-14T08:25:25.000Z
|
2021-06-08T21:41:32.000Z
|
# coding=utf8
import sys
sys.path += ['..']
import re
import mysql.connector
from pprint import pprint
db = None
def normalize(sql):
s = re.sub(' +', ' ', sql)
s = s.replace('MAX (', 'MAX(')
s = s.replace('MIN (', 'MIN(')
s = s.replace('AVG (', 'AVG(')
s = s.replace('COUNT (', 'COUNT(')
s = s.replace('count (', 'count(')
s = s.replace('SUM (', 'SUM(')
s = s.replace('< =', '<=')
s = s.replace('> =', '>=')
return s
def format_headers(header):
s = header.replace("( ", "(").replace(" )", ")").strip().lower()
return s
def get_connection():
global db
if db and db.is_connected():
return db
else:
db = mysql.connector.connect(
host="localhost",
user="root",
passwd="123456",
database="atis",
auth_plugin='mysql_native_password'
)
return db
def close_connection():
if db is not None and db.is_connected():
db.close()
def get_result(sql):
db = get_connection()
_sql = normalize(sql)
cursor = db.cursor()
cursor.execute(_sql)
# print(cursor.description)
headers = cursor.description
results = cursor.fetchall()
formatted_results = list()
for x in results:
r = dict()
for value, header in zip(x, headers):
r[format_headers(header[0])] = value
formatted_results.append(r)
# pprint(formatted_results)
return formatted_results
ENTITY_TYPE_MAP = {
"ac": "aircraft_code",
"al": "airline_code",
"ci": "city_name",
"ap": "airport_code",
"fn": "flight_number",
"cl": "class_description",
"ti": "time",
"pd": "day_period",
"mf": "manufacturer",
"mn": "month",
"da": "day",
"i": "integer",
"yr": "year",
"dn": "day_number",
"do": "dollar",
"hr": "hour",
"rc": "meal_code",
"st": "state_name",
"fb": "fare_basis_code",
"me": "meal_description",
"bat": "basis_type"
}
# Entity Set
def get_all_flight_ids():
sql = "SELECT distinct flight_id FROM flight"
return get_result(sql)
def get_all_city_names():
sql = "SELECT distinct city_name FROM city"
return get_result(sql)
def get_all_airline_codes():
sql = "SELECT distinct airline_code FROM airline"
return get_result(sql)
def get_all_aircraft_codes():
sql = "SELECT distinct aircraft_code FROM aircraft"
return get_result(sql)
def get_all_airport_codes():
sql = "SELECT distinct airport_code FROM airport"
return get_result(sql)
def get_all_booking_class_descriptions():
sql = "SELECT distinct class_description FROM class_of_service"
return get_result(sql)
def get_all_transport_types():
sql = "SELECT distinct transport_type FROM ground_service"
return get_result(sql)
def get_all_meal_codes():
sql = "SELECT distinct meal_code FROM food_service"
return get_result(sql)
def get_all_meal_descriptions():
sql = "SELECT distinct meal_description FROM food_service"
return get_result(sql)
def get_all_fare_basis_codes():
sql = "SELECT distinct fare_basis_code FROM fare_basis"
return get_result(sql)
def get_all_time_zone_codes():
sql = "SELECT distinct time_zone_code FROM time_zone"
return get_result(sql)
def get_all_one_direction_cost():
sql = "SELECT distinct one_direction_cost FROM fare"
return get_result(sql)
def get_all_capacity():
sql = "SELECT distinct capacity FROM aircraft"
return get_result(sql)
def get_all_flight_number():
sql = "SELECT distinct flight_number FROM flight"
return get_result(sql)
def get_all_departure_time():
sql = "SELECT distinct departure_time FROM flight"
return get_result(sql)
def get_all_stop_arrival_time():
sql = "SELECT distinct arrival_time FROM flight_stop"
return get_result(sql)
def process_entity_string(entity, default=""):
if isinstance(entity, str):
if ":_" in entity:
splits = entity.split(":_")
entity_name = splits[0]
entity_type = ENTITY_TYPE_MAP[splits[1]]
else:
entity_type = default
entity_name = entity
if '_' in entity_name:
entity_name = entity_name.replace("_", " ")
elif isinstance(entity, dict):
key = list(entity.keys())[0]
entity_type = key
entity_name = entity[key]
elif isinstance(entity, list) and len(entity) > 0:
# TODO: simply take the first one
key = list(entity[0].keys())[0]
entity_type = key
entity_name = entity[0][key]
else:
raise Exception("Invalid Entity Type %s" % str(entity))
if entity_type == 'city_name':
if entity_name == 'st louis':
entity_name = 'st. louis'
elif entity_name == 'st petersburg':
entity_name = 'st. petersburg'
elif entity_name == 'st paul':
entity_name = 'st. paul'
return entity_name, entity_type
# Entity
def fb(entity):
"""
fare basis
"""
sql = "SELECT DISTINCT fare_basis_1.fare_basis_code FROM fare_basis fare_basis_1 WHERE fare_basis_1.fare_basis_code = '%s'" % (entity)
return get_result(sql)
def rc(entity):
"""
Meal code
"""
sql = "SELECT DISTINCT food_service_1.meal_description FROM food_service food_service_1 WHERE food_service_1.meal_code = '%s'" % (entity)
return get_result(sql)
def dc(entity):
"""
day name
"""
sql = "SELECT DISTINCT days_1.day_name FROM days days_1 WHERE days_1.days_code = '%s'" % (entity)
return get_result(sql)
def al(entity):
"""
airline code
"""
sql = "SELECT DISTINCT airline_1.airline_code FROM airline airline_1 WHERE airline_1.airline_code = '%s'" % (
entity)
return get_result(sql)
def ap(entity):
"""
airport code
"""
sql = "SELECT DISTINCT airport_1.airport_code FROM airport airport_1 WHERE airport_1.airport_code = '%s'" % (entity)
return get_result(sql)
def ac(entity):
"""
aircraft code
"""
sql = "SELECT DISTINCT aircraft_1.aircraft_code FROM aircraft aircraft_1 WHERE aircraft_1.aircraft_code = '%s'" % (entity)
return get_result(sql)
def ci(city_name):
"""
city_name
return city_code
"""
entity_name, _ = process_entity_string(city_name)
sql = "SELECT DISTINCT city_code FROM city WHERE city_name = '%s'" % (
entity_name)
return get_result(sql)
def abbrev(entity):
"""
abbrev of airline_code
"""
entity_name, entity_type = process_entity_string(entity)
sql = "SELECT DISTINCT airline_1.airline_code FROM airline airline_1 WHERE airline_1.airline_name like '%" + entity_name + "%'"
results = get_result(sql)
print(results)
if len(results) == 1:
return results[0]
return results
def capacity(argument):
"""
return airline
"""
if isinstance(argument, str):
entities = [argument]
elif isinstance(argument, list):
entities = argument
else:
assert isinstance(argument, dict)
entities = [argument]
results = list()
flight_number_template = "SELECT aircraft_1.capacity FROM aircraft as aircraft_1 JOIN flight as flight_1 on aircraft_1.aircraft_code = flight_1.aircraft_code_sequence WHERE flight_1.flight_number = %s;"
flight_id_template = "SELECT aircraft_1.capacity FROM aircraft as aircraft_1 JOIN flight as flight_1 on aircraft_1.aircraft_code = flight_1.aircraft_code_sequence WHERE flight_1.flight_id = %s;"
aircraft_code_template = "SELECT DISTINCT aircraft_1.capacity FROM aircraft aircraft_1 WHERE aircraft_1.aircraft_code = '%s'"
for e in entities:
entity_name, entity_type = process_entity_string(e, "aircraft_code")
if entity_type == 'aircraft_code':
sql = aircraft_code_template % entity_name
elif entity_type == 'flight_id':
# flight id
sql = flight_id_template % entity_name
else:
# entity_type == 'flight_number':
sql = flight_number_template % entity_name
results += get_result(sql)
return results
def flight_number(argument):
"""
Return flight number
_flight_number(_argmin((lambda x: _and(_flight(x),_from(x,"boston:_ci"),_to(x,"washington:_ci"))),(lambda x: _departure_time(x))))
"""
if isinstance(argument, str):
entities = [argument]
elif isinstance(argument, list):
entities = argument
else:
assert isinstance(argument, dict)
entities = [argument]
results = list()
sql_template = "SELECT flight_number FROM flight WHERE flight_id = %s"
for e in entities:
entity_name, _ = process_entity_string(e, "flight_id")
sql = sql_template % entity_name
results += get_result(sql)
return results
def get_flight_destination(flight_id):
"""
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT to_airport FROM flight WHERE flight_id = %s" % processed_flight_id
results = get_result(sql)
return results
def get_flight_fare(flight_id):
"""
_fare $1
:entity_type: flight_id
"""
if flight_id is None or (isinstance(flight_id, list) and len(flight_id) == 0):
return None
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT fare.one_direction_cost FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON fare.fare_id = flight_fare.fare_id WHERE flight.flight_id = %s" % (processed_flight_id)
results = get_result(sql)
return results
def get_flight_cost(flight_id):
"""
_cost $1
:entity_type: flight_id
"""
if flight_id is None or (isinstance(flight_id, list) and len(flight_id) == 0):
return None
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT fare.round_trip_cost FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON fare.fare_id = flight_fare.fare_id WHERE flight.flight_id = %s" % (processed_flight_id)
results = get_result(sql)
return results
def get_booking_class_fare(class_description):
"""
_fare $1
:entity_type: flight_id
"""
processed_class_description, entity_type = process_entity_string(
class_description, "class_description")
sql = "SELECT fare.one_direction_cost FROM fare JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN class_of_service ON fare_basis.booking_class = class_of_service.booking_class WHERE class_of_service.class_description = '%s'" % (
processed_class_description)
results = get_result(sql)
return results
def airline_name(argument):
"""
_airline_name
"""
if isinstance(argument, str):
entities = [argument]
elif isinstance(argument, list):
entities = argument
else:
assert isinstance(argument, dict)
entities = [argument]
sql_tempalte = "SELECT airline_name FROM flight JOIN airline ON flight.airline_code = airline.airline_code WHERE flight.flight_id = %s"
results = list()
for e in entities:
entity_name, entity_type = process_entity_string(e, "aircraft_code")
sql = sql_tempalte % entity_name
results += get_result(sql)
return results
def departure_time(argument):
"""
_departure_time
"""
if argument is None:
return None
if isinstance(argument, str):
entities = [argument]
elif isinstance(argument, list):
entities = argument
else:
assert isinstance(argument, dict)
entities = [argument]
sql_tempalte = "SELECT departure_time FROM flight WHERE flight_id = %s"
results = list()
for e in entities:
entity_name, entity_type = process_entity_string(e, "flight_id")
sql = sql_tempalte % entity_name
results += get_result(sql)
return results
def arrival_time(argument):
"""
_arrival_time
"""
if isinstance(argument, str):
entities = [argument]
elif isinstance(argument, list):
entities = argument
else:
assert isinstance(argument, dict)
entities = [argument]
sql_tempalte = "SELECT arrival_time FROM flight WHERE flight_id = %s"
results = list()
for e in entities:
entity_name, entity_type = process_entity_string(e, "flight_id")
sql = sql_tempalte % entity_name
results += get_result(sql)
return results
def miles_distant(airport_code, city_name):
"""
_miles_distant
:entity_type: (airport_code, city_name)
"""
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT airport_service.miles_distant FROM airport_service JOIN city ON city.city_code = airport_service.city_code WHERE city.city_name = '%s' AND airport_service.airport_code = '%s'" % (
processed_city_name, processed_airport_code)
return get_result(sql)
def miles_distant_between_city(city_name_1, city_name_2):
"""
_miles_distant
:entity_type: (city_name, city_name)
"""
processed_city_name_1, _ = process_entity_string(
city_name_1, "city_name")
processed_city_name_2, _ = process_entity_string(
city_name_2, "city_name_2")
sql = "SELECT distinct airport_service.miles_distant FROM airport_service JOIN city ON airport_service.city_code = city.city_code WHERE city.city_name = '%s' AND airport_service.airport_code IN (SELECT T1.airport_code FROM airport_service AS T1 JOIN city AS T2 ON T1.city_code = T2.city_code WHERE T2.city_name = '%s');" % (
processed_city_name_1, processed_city_name_2)
return get_result(sql)
def minimum_connection_time(airport_code):
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT DISTINCT airport_1.minimum_connect_time FROM airport airport_1 WHERE airport_1.airport_code = '%s'" % (processed_airport_code)
return get_result(sql)
def get_number_of_stops(flight_id):
"""
_stops(x)
:entity_type flight_id
"""
if isinstance(flight_id, list) and len(flight_id) == 0:
return list()
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT stops FROM flight WHERE flight.flight_id = %s" % (
processed_flight_id)
return get_result(sql)
def time_elapsed(flight_id):
"""
_time_elapsed(x)
:entity_type flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT time_elapsed FROM flight WHERE flight_id = %s" % processed_flight_id
return get_result(sql)
def get_flight_aircraft_code(flight_id):
"""
_aircraft_code $1
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT aircraft_code FROM flight JOIN equipment_sequence AS T ON flight.aircraft_code_sequence = T.aircraft_code_sequence WHERE flight.flight_id = %s" % processed_flight_id
return get_result(sql)
def get_flight_airline_code(flight_id):
"""
_airline:_e $1
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT airline_code FROM flight WHERE flight.flight_id = %s" % processed_flight_id
return get_result(sql)
def get_flight_booking_class(flight_id):
"""
_booking_class $1
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT class_of_service.class_description FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN class_of_service ON fare_basis.booking_class = class_of_service.booking_class WHERE flight_fare.flight_id = %s" % processed_flight_id
return get_result(sql)
def get_flight_meal(flight_id):
"""
_meal $1
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT food_service.meal_description FROM flight JOIN food_service ON flight.meal_code = food_service.meal_code WHERE flight_id = %s" % (
processed_flight_id)
return get_result(sql)
def get_flight_stop_airport(flight_id):
"""
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight_stop.stop_airport FROM flight_stop WHERE flight_stop.flight_id = %s" % (
processed_flight_id)
return get_result(sql)
def get_ground_fare(transport_type):
"""
_ground_fare $1
:entity_type (transport_type)
"""
processed_transport_type, _ = process_entity_string(
transport_type, "transport_type")
sql = "SELECT ground_fare FROM ground_service WHERE transport_type = '%s'" % (
processed_transport_type)
return get_result(sql)
def get_ground_fare_1(city_name, transport_type):
"""
_ground_fare $1
:entity_type (city_name, transport_type)
"""
processed_transport_type, _ = process_entity_string(
transport_type, "transport_type")
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT ground_fare FROM ground_service JOIN city ON ground_service.city_code = city.city_code WHERE city.city_name = '%s' AND transport_type = '%s'" % (
processed_city_name, processed_transport_type)
return get_result(sql)
def get_ground_fare_2(airport_code, transport_type):
"""
_ground_fare $1
:entity_type (airport_code, transport_type)
"""
processed_transport_type, _ = process_entity_string(
transport_type, "transport_type")
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT ground_fare FROM ground_service WHERE airport_code = '%s' AND transport_type = '%s'" % (
processed_airport_code, processed_transport_type)
return get_result(sql)
def get_ground_fare_3(city_name, airport_code, transport_type):
"""
_ground_fare $1
:entity_type (city_name, airport_code, transport_type)
"""
processed_transport_type, _ = process_entity_string(
transport_type, "transport_type")
processed_city_name, _ = process_entity_string(city_name, "city_name")
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT ground_fare FROM ground_service JOIN city ON ground_service.city_code = city.city_code WHERE city.city_name = '%s' AND airport_code = '%s' AND transport_type = '%s'" % (
processed_city_name, processed_airport_code, processed_transport_type)
return get_result(sql)
def get_minutes_distant_1(city_name):
"""
:entity_type (city_name)
"""
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT minutes_distant FROM airport_service JOIN city ON airport_service.city_code = city.city_code WHERE city.city_name = '%s'" % (
processed_city_name)
return get_result(sql)
def get_minutes_distant_2(airport_code):
"""
:entity_type (airport_code)
"""
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT minutes_distant FROM airport_service WHERE airport_code = '%s'" % (
processed_airport_code)
return get_result(sql)
def get_minutes_distant_3(city_name, airport_code):
"""
:entity_type (city_name, airport_code)
"""
processed_city_name, _ = process_entity_string(city_name, "city_name")
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT minutes_distant FROM airport_service JOIN city ON airport_service.city_code = city.city_code WHERE city.city_name = '%s' AND airport_code = '%s'" % (
processed_city_name, processed_airport_code)
return get_result(sql)
def get_flight_stop_arrival_time(flight_id):
"""
_stop_arrival_time $0
:entity_type flight_id
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
sql = "SELECT flight_stop.arrival_time, city.city_name FROM flight_stop JOIN airport_service ON flight_stop.stop_airport = airport_service.airport_code JOIN city ON city.city_code = airport_service.city_code WHERE flight_stop.flight_id = %s" % (processed_flight_id)
return get_result(sql)
def get_flight_restriction_code(flight_id):
"""
_restriction_code $0
:entity_type flight_id
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
sql = "SELECT restriction.restriction_code FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN restriction ON fare.restriction_code = restriction.restriction_code WHERE flight_fare.flight_id = %s" % (processed_flight_id)
return get_result(sql)
# Binary Predicate
def is_mf(entity, manufacturer):
"""
:_mf
mf(x,"boeing:_mf")
"""
return True
def is_flight_manufacturer(flight_id, manufacturer):
"""
_manufacturer(x,"boeing:_mf")
:entity_type (flight_id, manufacturer)
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_manufacturer, entity_type = process_entity_string(
manufacturer, "manufacturer")
sql = "SELECT flight.flight_id FROM flight JOIN aircraft ON flight.aircraft_code_sequence = aircraft.aircraft_code WHERE aircraft.manufacturer = '%s' AND flight.flight_id = %s" % (processed_manufacturer, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_services(airline_code, city_name):
"""
_services(x,y)
"""
processed_airline_code, _ = process_entity_string(airline_code, "airline_code")
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT flight_id FROM flight JOIN airport_service ON flight.to_airport = airport_service.airport_code JOIN city ON city.city_code = airport_service.city_code WHERE city.city_name = '%s' AND flight.airline_code = '%s'" % (
processed_city_name, processed_airline_code)
results = get_result(sql)
return len(results) > 0
def is_airline_services(airline_code, airport_code):
"""
_services ff:_al $x
:entity_type: (airline_code, airport_code)
"""
processed_airline_code, _ = process_entity_string(airline_code, "airline_code")
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT DISTINCT flight.to_airport FROM flight WHERE flight.to_airport = '%s' AND flight.airline_code = '%s'" % (
processed_airport_code, processed_airline_code)
results = get_result(sql)
return len(results) > 0
def is_to(flight_id, entity):
"""
_to(x,"mke:_ap"/"indianapolis:_ci")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
entity, entity_type = process_entity_string(entity, "airport_code")
if entity_type == 'airport_code':
sql = "SELECT flight_id FROM flight WHERE flight.flight_id = %s AND flight.to_airport = '%s'" % (
processed_flight_id, entity)
elif entity_type == 'city_name':
sql = "SELECT DISTINCT flight_1.flight_id FROM flight AS flight_1 JOIN airport_service AS airport_service_1 ON flight_1.to_airport = airport_service_1.airport_code JOIN city AS city_1 ON airport_service_1.city_code = city_1.city_code WHERE city_1.city_name = '%s' AND flight_1.flight_id = %s" % (
entity, processed_flight_id)
else:
# entity_type == 'state_name':
sql = "SELECT DISTINCT flight_1.flight_id FROM flight AS flight_1 JOIN airport_service AS airport_service_1 ON flight_1.to_airport = airport_service_1.airport_code JOIN city AS city_1 ON airport_service_1.city_code = city_1.city_code JOIN state ON city_1.state_code = state.state_code WHERE state.state_name = '%s' AND flight_1.flight_id = %s" % (
entity, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_from(flight_id, entity):
"""
_from(x,"mke:_ap"/"indianapolis:_ci")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
entity, entity_type = process_entity_string(entity, "airport_code")
if entity_type == 'airport_code':
sql = "SELECT flight_id FROM flight WHERE flight.flight_id = %s AND flight.from_airport = '%s'" % (
processed_flight_id, entity)
else:
# entity_type == 'city_name'
sql = "SELECT DISTINCT flight_1.flight_id FROM flight AS flight_1 JOIN airport_service AS airport_service_1 ON flight_1.from_airport = airport_service_1.airport_code JOIN city AS city_1 ON airport_service_1.city_code = city_1.city_code WHERE city_1.city_name = '%s' AND flight_1.flight_id = %s" % (
entity, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_loc_t(airport_code, city_name):
"""
_loc:_t(airport_code,city_name)
:entity_type (airport_code, city_name)
"""
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
processed_city_name, _ = process_entity_string(
city_name, "city_name")
sql = "SELECT * FROM airport_service AS T JOIN city ON T.city_code = city.city_code WHERE city.city_name = '%s' AND T.airport_code = '%s';" % (
processed_city_name, processed_airport_code)
results = get_result(sql)
return len(results) > 0
def is_loc_t_state(airport_code, state_name):
"""
_loc:_t(airport_code,state_name)
:entity_type (airport_code, state_name)
"""
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
processed_state_name, _ = process_entity_string(
state_name, "state_name")
sql = "SELECT * FROM airport_service AS T JOIN city ON T.city_code = city.city_code JOIN state ON city.state_code = state.state_code WHERE state.state_name = '%s' AND T.airport_code = '%s';" % (
processed_state_name, processed_airport_code)
results = get_result(sql)
return len(results) > 0
def is_loc_t_city_time_zone(city_name, time_zone_code):
"""
_loc:_t(city_name,time_zone_code)
:entity_type (city_name, time_zone_code)
"""
processed_city_name, _ = process_entity_string(city_name, "city_name")
processed_time_zone_code, _ = process_entity_string(
time_zone_code, "time_zone_code")
sql = "SELECT city_name FROM city WHERE city_name = '%s' AND time_zone_code = '%s'" % (
processed_city_name, processed_time_zone_code)
results = get_result(sql)
return len(results) > 0
def is_from_airport(transport_way, entity):
"""
Transport Type
_from_airport(x,"toronto:_ci"/"pit:_ap")
"""
processed_transport_way, _ = process_entity_string(transport_way, "transport_type")
entity_name, entity_type = process_entity_string(entity)
airport_code_template = "SELECT DISTINCT ground_service_1.transport_type FROM ground_service ground_service_1 WHERE ground_service_1.airport_code = '%s' AND ground_service_1.transport_type = '%s'"
if entity_type == 'city_name':
sql = city_name_template % (entity_name, processed_transport_way)
else:
# entity_type == 'airport_code'
sql = airport_code_template % (entity_name, processed_transport_way)
results = get_result(sql)
return len(results) > 0
def is_from_airports_of_city(transport_way, city_name):
"""
Transport Type
_from_airport(x,"toronto:_ci"/"pit:_ap")
"""
processed_transport_way, _ = process_entity_string(transport_way, "transport_type")
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT DISTINCT T3.transport_type FROM airport_service AS T1 JOIN city AS T2 ON T1.city_code = T2.city_code JOIN ground_service AS T3 ON T1.airport_code = T3.airport_code WHERE T2.city_name = '%s' AND T3.transport_type = '%s'" % (
processed_city_name, processed_transport_way
)
results = get_result(sql)
return len(results) > 0
def is_to_city(transport_way, city_name):
"""
Transport Type
_to_city(x,"boston:_ci")
"""
processed_transport_way, _ = process_entity_string(
transport_way, "transport_type")
entity_name, entity_type = process_entity_string(city_name)
assert entity_type == 'city_name'
sql = "SELECT DISTINCT ground_service_1.transport_type FROM ground_service AS ground_service_1 JOIN city AS city_1 ON ground_service_1.city_code = city_1.city_code WHERE city_1.city_name = '%s' AND ground_service_1.transport_type = '%s'" % (
entity_name, processed_transport_way)
results = get_result(sql)
return len(results) > 0
def is_flight_airline(flight_id, airline_code):
"""
_airline(x,"dl:_al")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_airline_code, _ = process_entity_string(airline_code, "airline_code")
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND airline_code = '%s'" % (
processed_flight_id, processed_airline_code)
results = get_result(sql)
return len(results) > 0
def is_aircraft_airline(aircraft_code, airline_code):
"""
_airline(x,"dl:_al")
"""
processed_aircraft_code, _ = process_entity_string(
aircraft_code, "aircraft_code")
processed_airline_code, _ = process_entity_string(
airline_code, "airline_code")
sql = "SELECT aircraft_code_sequence FROM flight WHERE aircraft_code_sequence = '%s' AND airline_code = '%s'" % (
processed_aircraft_code, processed_airline_code)
results = get_result(sql)
return len(results) > 0
def is_aircraft_basis_type(aircraft_code, basis_type):
"""
_basis_type(x,"737:_bat")
:entity_type: (aircraft_code, basis_type)
"""
processed_aircraft_code, _ = process_entity_string(
aircraft_code, "aircraft_code")
processed_basis_type, _ = process_entity_string(
basis_type, "basis_type")
sql = "SELECT aircraft_code FROM aircraft WHERE aircraft_code = '%s' AND basic_type = '%s'" % (
processed_aircraft_code, processed_basis_type)
results = get_result(sql)
return len(results) > 0
def is_flight_number(flight_id, flight_number):
"""
_flight_number(x,"201:_fn")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_flight_number, _ = process_entity_string(
flight_number, "flight_number")
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND flight_number = '%s'" % (
processed_flight_id, processed_flight_number)
results = get_result(sql)
return len(results) > 0
def is_flight_stop_at_city(flight_id, city_name):
"""
_stop(x,"denver:_ci")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_city_name, _ = process_entity_string(
city_name, "city_name")
sql = "SELECT flight.flight_id FROM flight JOIN flight_stop ON flight.flight_id = flight_stop.flight_id JOIN airport_service ON flight_stop.stop_airport = airport_service.airport_code JOIN city ON city.city_code = airport_service.city_code WHERE city.city_name = '%s' AND flight.flight_id = %s" % (
processed_city_name, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_stop_at_airport(flight_id, airport_code):
"""
_stop(x,"denver:_ci")
:entity_type (flight_id, airport_code)
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_airport_code, _ = process_entity_string(
airport_code, "airport_code")
sql = "SELECT flight_stop.flight_id FROM flight_stop WHERE flight_stop.stop_airport = '%s' AND flight_stop.flight_id = %s" % (
processed_airport_code, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_stops_specify_number_of_times(flight_id, integer):
"""
_stops(x,"a:_i")
:entity_type: (flight_id, integer)
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_integer, _ = process_entity_string(
integer, "integer")
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND stops = %s" % (processed_flight_id, processed_integer)
results = get_result(sql)
return len(results) > 0
def is_flight_has_class_type(flight_id, class_description):
"""
_class_type(x,"first:_cl")
:entity_type: (flight_id, class_type)
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_class_description, _ = process_entity_string(
class_description, "class_description")
sql = "SELECT flight_fare.flight_id FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code WHERE flight_fare.flight_id = %s AND fare_basis.class_type = '%s'" % (
processed_flight_id, processed_class_description)
results = get_result(sql)
return len(results) > 0
def is_fare_basis_code_class_type(fare_basis_code, class_description):
"""
_class_type(x,"first:_cl")
:entity_type: (fare_basis_code, class_type)
"""
processed_fare_basis_code, _ = process_entity_string(
fare_basis_code, "fare_basis_code")
processed_class_description, _ = process_entity_string(
class_description, "class_description")
sql = "SELECT fare_basis_code FROM fare_basis JOIN class_of_service ON fare_basis.booking_class = class_of_service.booking_class WHERE fare_basis_code = '%s' AND class_description = '%s'" % (
processed_fare_basis_code, processed_class_description)
results = get_result(sql)
return len(results) > 0
def is_flight_after_day(flight_id, day):
"""
_after_day(x,"wednesday:_da")
"""
return True
def is_flight_before_day(flight_id, day):
"""
_before_day(x,"wednesday:_da")
"""
return True
def is_flight_approx_arrival_time(flight_id, arrival_time):
"""
_approx_arrival_time()
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_arrival_time, _ = process_entity_string(
arrival_time, "time")
if len(processed_arrival_time) == 4:
if processed_arrival_time[2:] == '00':
start_time = "%d%d" % (int(processed_arrival_time[:2]) - 1, 30)
end_time = "%d%d" % (int(processed_arrival_time[:2]), 30)
elif processed_arrival_time[2:] == '15':
start_time = "%d%d" % (int(processed_arrival_time[:2]) - 1, 45)
end_time = "%d%d" % (int(processed_arrival_time[:2]), 45)
elif processed_arrival_time[2:] == '30':
start_time = "%d%d" % (int(processed_arrival_time[:2]), 00)
end_time = "%d%d" % (int(processed_arrival_time[:2]) + 1, 00)
else:
assert processed_arrival_time[2:] == '45'
start_time = "%d%d" % (int(processed_arrival_time[:2]), 15)
end_time = "%d%d" % (int(processed_arrival_time[:2]) + 1, 15)
else:
if processed_arrival_time[1:] == '00':
start_time = "%d%d" % (int(processed_arrival_time[:1]) - 1, 30)
end_time = "%d%d" % (int(processed_arrival_time[:1]), 30)
elif processed_arrival_time[1:] == '15':
start_time = "%d%d" % (int(processed_arrival_time[:1]) - 1, 45)
end_time = "%d%d" % (int(processed_arrival_time[:1]), 45)
elif processed_arrival_time[1:] == '30':
start_time = "%d%d" % (int(processed_arrival_time[:1]), 00)
end_time = "%d%d" % (int(processed_arrival_time[:1]) + 1, 00)
else:
assert processed_arrival_time[1:] == '45'
start_time = "%d%d" % (int(processed_arrival_time[:1]), 15)
end_time = "%d%d" % (int(processed_arrival_time[:1]) + 1, 15)
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE flight_1.arrival_time >= %s AND flight_1.arrival_time <= %s AND flight_1.flight_id = %s" % (
start_time, end_time, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_arrival_time(flight_id, arrival_time):
"""
_arrival_time(x,"1700:_ti")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_arrival_time, _ = process_entity_string(
arrival_time, "time")
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE flight_1.arrival_time = %s AND flight_1.flight_id = %s" % (
processed_arrival_time, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_departure_time(flight_id, departure_time):
"""
_departure_time()
:entity_type: (flight_id, time)
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_departure_time, _ = process_entity_string(
departure_time, "time")
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE flight_1.departure_time = %s AND flight_1.flight_id = %s" % (
processed_departure_time, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_approx_departure_time(flight_id, departure_time):
"""
_approx_departure_time()
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_departure_time, _ = process_entity_string(
departure_time, "time")
if len(processed_departure_time) == 4:
if processed_departure_time[2:] == '00':
start_time = "%d%d" % (int(processed_departure_time[:2]) - 1, 30)
end_time = "%d%d" % (int(processed_departure_time[:2]), 30)
elif processed_departure_time[2:] == '15':
start_time = "%d%d" % (int(processed_departure_time[:2]) - 1, 45)
end_time = "%d%d" % (int(processed_departure_time[:2]), 45)
elif processed_departure_time[2:] == '30':
start_time = "%d%d" % (int(processed_departure_time[:2]), 00)
end_time = "%d%d" % (int(processed_departure_time[:2]) + 1, 00)
else:
assert processed_departure_time[2:] == '45'
start_time = "%d%d" % (int(processed_departure_time[:2]), 15)
end_time = "%d%d" % (int(processed_departure_time[:2]) + 1, 15)
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE flight_1.departure_time >= %s AND flight_1.departure_time <= %s AND flight_1.flight_id = %s" % (
start_time, end_time, processed_flight_id)
elif len(processed_departure_time) == 3:
if processed_departure_time[1:] == '00':
start_time = "%d%d" % (int(processed_departure_time[:1]) - 1, 30)
end_time = "%d%d" % (int(processed_departure_time[:1]), 30)
elif processed_departure_time[1:] == '15':
start_time = "%d%d" % (int(processed_departure_time[:1]) - 1, 45)
end_time = "%d%d" % (int(processed_departure_time[:1]), 45)
elif processed_departure_time[1:] == '30':
start_time = "%d%d" % (int(processed_departure_time[:1]), 00)
end_time = "%d%d" % (int(processed_departure_time[:1]) + 1, 00)
else:
assert processed_departure_time[1:] == '45'
start_time = "%d%d" % (int(processed_departure_time[:1]), 15)
end_time = "%d%d" % (int(processed_departure_time[:1]) + 1, 15)
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE flight_1.departure_time >= %s AND flight_1.departure_time <= %s AND flight_1.flight_id = %s" % (
start_time, end_time, processed_flight_id)
elif processed_departure_time == "0":
start_time = "2330"
end_time = "30"
sql = "SELECT flight_1.flight_id FROM flight flight_1 WHERE (flight_1.departure_time >= %s OR flight_1.departure_time <= %s) AND flight_1.flight_id = %s" % (
start_time, end_time, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_approx_return_time(flight_id, return_time):
"""
_approx_return_time(x,"1900:_ti")
"""
return is_flight_approx_arrival_time(flight_id, return_time)
def is_flight_during_day(flight_id, day_period):
"""
_during_day(x,"evening:_pd")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_day_period, _ = process_entity_string(
day_period, "day_period")
period_map = {
"morning": [0, 1200],
"afternoon": [1200, 1800],
"early": [0, 800],
"evening": [1800, 2200],
"pm": [1200, 2400],
"late": [601, 1759],
"breakfast": [600, 900],
"late evening": [2000, 2400],
"late night": [2159, 301],
"daytime": [600,1800]
}
if processed_day_period == 'late night':
sql = "SELECT DISTINCT flight.flight_id FROM flight JOIN days ON flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE flight.flight_id = %s AND date_day.year = 1991 AND date_day.month_number = 3 AND ( (date_day.day_number = 21 AND flight.departure_time > 2159) OR (date_day.day_number = 22 AND flight.departure_time < 301))" % (processed_flight_id)
else:
start, end = period_map[processed_day_period]
sql = "SELECT DISTINCT flight_1.flight_id FROM flight flight_1 WHERE flight_1.flight_id = %s AND flight_1.departure_time BETWEEN %d AND %d" % (
processed_flight_id, start, end)
results = get_result(sql)
return len(results) > 0
def is_flight_during_day_arrival(flight_id, day_period):
"""
_during_day(x,"evening:_pd")
"""
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_day_period, _ = process_entity_string(
day_period, "day_period")
period_map = {
"morning": [0, 1200],
"afternoon": [1200, 1800],
"early": [0, 800],
"evening": [1800, 2200],
"pm": [1200, 2400],
"late": [601, 1759],
"breakfast": [600, 900],
"late evening": [2000, 2400],
"daytime": [600, 1800],
"late night": [2159, 301],
'mealtime': [1700,2000]
}
if processed_day_period == 'late night':
sql = "SELECT DISTINCT flight.flight_id FROM flight JOIN days ON flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE flight.flight_id = %s AND date_day.year = 1991 AND date_day.month_number = 3 AND ( (date_day.day_number = 21 AND flight.arrival_time > 2159) OR (date_day.day_number = 22 AND flight.arrival_time < 301))" % (
processed_flight_id)
else:
start, end = period_map[processed_day_period]
sql = "SELECT DISTINCT flight_1.flight_id FROM flight flight_1 WHERE flight_1.flight_id = %s AND flight_1.arrival_time BETWEEN %d AND %d" % (
processed_flight_id, start, end)
results = get_result(sql)
return len(results) > 0
def is_flight_on_day_number(flight_id, day_number):
"""
_day_number(x,"26:_dn")
:entity_type (flight_id, day_number)
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_day_number, _ = process_entity_string(day_number, "day_number")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.day_number = %s AND flight.flight_id = %s" % (
processed_day_number, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_on_day(flight_id, day):
"""
_day $0 monday:_da
:entity_type: (flight_id, day)
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_day, _ = process_entity_string(day, "day")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code WHERE days.day_name = '%s' AND flight.flight_id = %s" % (
processed_day, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_day_arrival(flight_id, day):
"""
_day_arrival(x, "sunday:_da")
:entity_type (flight_id, day)
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
processed_day, _ = process_entity_string(day, "day")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code WHERE days.day_name = '%s' AND flight.flight_id = %s" % (
processed_day, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_day_return(flight_id, day):
"""
_day_return(x, "tuesday:_da")
:entity_type (flight_id, day)
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
processed_day, _ = process_entity_string(day, "day")
sql = "SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN days ON fare_basis.basis_days = days.days_code WHERE flight.flight_id = %s AND days.day_name = '%s'" % (
processed_flight_id, processed_day)
results = get_result(sql)
return len(results) > 0
def is_flight_day_number_arrival(flight_id, day_number):
"""
_day_number_arrival(x, "14:_dn")
:entity_type (flight_id, day_number)
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_day_number, _ = process_entity_string(day_number, "day_number")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE flight.flight_id = %s AND date_day.year = 1991 AND ((date_day.day_number = %s AND flight.arrival_time < flight.departure_time) OR (date_day.day_number = %s))" % (
processed_flight_id, str(int(processed_day_number) - 1), processed_day_number)
results = get_result(sql)
return len(results) > 0
def is_flight_day_number_return(flight_id, day_number):
"""
_day_number_return(x, "14:_dn")
:entity_type (flight_id, day_number)
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_day_number, _ = process_entity_string(day_number, "day_number")
sql = "SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN days ON fare_basis.basis_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE flight.flight_id = %s AND date_day.day_number = %s" % (
processed_flight_id, processed_day_number)
results = get_result(sql)
return len(results) > 0
def is_flight_month_arrival(flight_id, month):
"""
_month_arrival(x, "june:_mn")
:entity_type (flight_id, month)
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
processed_month, _ = process_entity_string(month, "month")
month_map = {
"january": 1,
"february": 2,
"march": 3,
"april": 4,
"may": 5,
"june": 6,
"july": 7,
"august": 8,
"september": 9,
"october": 10,
"november": 11,
"december": 12
}
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = %s AND flight.flight_id = %s" % (
month_map[processed_month], processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_on_month(flight_id, month):
"""
_month(x, "june:_mn")
:entity_type (flight_id, month)
"""
return is_flight_month_arrival(flight_id, month)
def is_flight_month_return(flight_id, month):
"""
_month_return(x, "june:_mn")
:entity_type (flight_id, month)
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_month, _ = process_entity_string(month, "month")
month_map = {
"january": 1,
"february": 2,
"march": 3,
"april": 4,
"may": 5,
"june": 6,
"july": 7,
"august": 8,
"september": 9,
"october": 10,
"november": 11,
"december": 12
}
sql = "SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN days ON fare_basis.basis_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = %s AND flight.flight_id = %s" % (
month_map[processed_month], processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_next_days_flight(flight_id, integer):
"""
_next_days $0 2:_i
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_integer, _ = process_entity_string(integer, "integer")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 1 AND date_day.day_number BETWEEN 20 and %s AND flight.flight_id = %s" % (
int(processed_integer) + 20, processed_flight_id, )
results = get_result(sql)
return len(results) > 0
def is_overnight_flight(flight_id):
"""
TODO implementation
_overnight $0
:entity_type flight_id
"""
return True
def is_flight_days_from_today(flight_id, integer):
"""
_overnight $0
:entity_type flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
processed_integer, _ = process_entity_string(integer, "integer")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 5 AND date_day.day_number = %s AND flight.flight_id = %s" % (
int(processed_integer) + 27, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_tomorrow_flight(flight_id):
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 1 AND date_day.day_number = 20 AND flight.flight_id = %s" % (processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_tomorrow_arrival_flight(flight_id):
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 1 AND date_day.day_number = 20 AND flight.departure_time > flight.arrival_time AND flight.flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_today_flight(flight_id):
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 6 AND date_day.day_number = 22 AND flight.flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_day_after_tomorrow_flight(flight_id):
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight.flight_id FROM flight JOIN days on flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE date_day.year = 1991 AND date_day.month_number = 1 AND date_day.day_number = 21 AND flight.flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_airport_of_city(city_name, airport_code):
"""
_airport(washington:_ci,x)
:entity_type city_name, airport_code
"""
processed_city_name, _ = process_entity_string(city_name, "city_name")
processed_airport_code, entity_type = process_entity_string(airport_code, "airport_code")
sql = 'SELECT airport_code FROM airport_service JOIN city ON city.city_code = airport_service.city_code WHERE city.city_name = "%s" AND airport_service.airport_code = "%s"' % (processed_city_name, processed_airport_code)
results = get_result(sql)
return len(results) > 0
def is_specific_fare_basis_code(entity, fare_basis_code):
"""
_fare_basis_code $0 415:_do
:entity_type: (fare_basis_code, fare_basis_code)
"""
processed_entity, _ = process_entity_string(entity, "fare_basis_code")
processed_fare_basis_code, _ = process_entity_string(fare_basis_code, "fare_basis_code")
return processed_entity.lower() == processed_fare_basis_code.lower()
def is_flight_has_specific_fare_basis_code(flight_id, fare_basis_code):
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_fare_basis_code, _ = process_entity_string(
fare_basis_code, "fare_basis_code")
sql = "SELECT flight_id FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id WHERE flight_id = %s AND fare.fare_basis_code = '%s'" % (processed_flight_id, processed_fare_basis_code)
results = get_result(sql)
return len(results) > 0
def is_flight_cost_fare(flight_id, dollar):
"""
_fare $0 415:_do
:entity_type: (flight_id, dollar)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_dollar, _ = process_entity_string(dollar, "dollar")
sql = "SELECT fare.one_direction_cost FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON fare.fare_id = flight_fare.fare_id WHERE flight.flight_id = %s AND fare.one_direction_cost = %s" % (processed_flight_id, processed_dollar)
results = get_result(sql)
return len(results) > 0
def is_time_elapsed(flight_id, hour):
"""
_time_elapsed $0 9:_hr
:entity_type: (flight_id, hour)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_hour, _ = process_entity_string(hour, "hour")
minutes = (int(processed_hour) * 60)
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND time_elapsed = %s" % (processed_flight_id, minutes)
results = get_result(sql)
return len(results) > 0
def is_flight_meal_code(flight_id, meal_code):
"""
_meal_code $0 b:_rc
:entity_type: (flight_id, meal_code)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_meal_code, _ = process_entity_string(meal_code, "meal_code")
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND meal_code = '%s'" % (processed_flight_id, processed_meal_code)
results = get_result(sql)
return len(results) > 0
def is_flight_has_specific_meal(flight_id, meal_description):
"""
_meal $0 dinner:_me
:entity_type: (flight_id, meal_description)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_meal_description, _ = process_entity_string(
meal_description, "meal_description")
sql = "SELECT flight_id FROM flight JOIN food_service ON flight.meal_code = food_service.meal_code WHERE flight_id = %s AND food_service.meal_description = '%s'" % (
processed_flight_id, processed_meal_description)
results = get_result(sql)
return len(results) > 0
def is_flight_aircraft(flight_id, aircraft_code):
"""
_meal_code $0 b:_rc
:entity_type: (flight_id, meal_code)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_aircraft_code, _ = process_entity_string(aircraft_code, "aircraft_code")
sql = "SELECT flight_id FROM flight WHERE flight_id = %s AND aircraft_code_sequence = '%s'" % (processed_flight_id, processed_aircraft_code)
results = get_result(sql)
return len(results) > 0
def is_airline_has_booking_class(class_description, airline_code):
"""
_airline(x, us:_al)
:entity_type: (class_description, airline_code)
"""
processed_class_description, _ = process_entity_string(
class_description, "class_description")
processed_airline_code, _ = process_entity_string(
airline_code, "airline_code")
sql = "SELECT class_description FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN class_of_service ON fare_basis.booking_class = class_of_service.booking_class WHERE class_of_service.class_description = '%s' AND flight.airline_code = '%s'" % (
processed_class_description, processed_airline_code)
results = get_result(sql)
return len(results) > 0
def is_airline_provide_meal(meal_code, airline_code):
processed_meal_code, _ = process_entity_string(
meal_code, "meal_code")
processed_airline_code, _ = process_entity_string(
airline_code, "airline_code")
sql = "SELECT meal_code FROM flight WHERE airline_code = '%s' AND meal_code = '%s'" % (processed_airline_code, processed_meal_code)
results = get_result(sql)
return len(results) > 0
def is_flight_has_booking_class(flight_id, class_description):
"""
_booking_class(x, us:_al)
:entity_type: (flight_id, class_description)
"""
processed_flight_id, _ = process_entity_string(flight_id, "flight_id")
processed_class_description, _ = process_entity_string(
class_description, "class_description")
sql = "SELECT flight_fare.flight_id FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code JOIN class_of_service ON fare_basis.booking_class = class_of_service.booking_class WHERE class_of_service.class_description = '%s' AND flight_fare.flight_id = %s" % (
processed_class_description, processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_with_specific_aircraft(flight_id, aircraft_code):
processed_flight_id, _ = process_entity_string(
flight_id, "flight_id")
processed_aircraft_code, _ = process_entity_string(
aircraft_code, "aircraft_code")
sql = "SELECT flight_id FROM flight JOIN equipment_sequence ON flight.aircraft_code_sequence = equipment_sequence.aircraft_code_sequence WHERE flight.flight_id = %s AND equipment_sequence.aircraft_code = '%s'" % (processed_flight_id, processed_aircraft_code)
results = get_result(sql)
return len(results) > 0
# Unit Predicate
def is_aircraft(aircraft_code):
"""
_aircraft(x)
:entity_type: aircraft_code
"""
processed_aircraft_code, _ = process_entity_string(aircraft_code, "aircraft_code")
sql = "SELECT aircraft_code FROM aircraft WHERE aircraft_code = '%s'" % (
processed_aircraft_code)
results = get_result(sql)
return len(results) > 0
def aircraft_code(aircraft_code):
"""
_aircraft_code:_t $0
:entity_type: aircraft_code
"""
return is_aircraft(aircraft_code)
def is_city(city_name):
"""
_city(x)
:entity_type: city_name
"""
processed_city_name, _ = process_entity_string(city_name, "city_name")
sql = "SELECT city_name FROM city WHERE city_name = '%s'" % (
processed_city_name)
results = get_result(sql)
return len(results) > 0
def is_airline(entity):
"""
_airline(x)
:entity_type airline_code
"""
# assert isinstance(entity, str)
entity_name, entity_type = process_entity_string(entity, "airline_code")
sql = 'SELECT airline_code FROM airline WHERE airline_code = "%s"' % entity_name
results = get_result(sql)
return len(results) > 0
def is_airport(entity):
"""
airport(x)
:entity_type airport_code
"""
# assert isinstance(entity, str)
entity_name, entity_type = process_entity_string(entity, "airport_code")
sql = 'SELECT airport_code FROM airport WHERE airport_code = "%s"' % entity_name
results = get_result(sql)
return len(results) > 0
def is_flight(entity):
"""
flight(x)
:entity_type flight_id
"""
# assert isinstance(entity, str)
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = 'SELECT flight_id FROM flight WHERE flight_id = %s' % entity_name
results = get_result(sql)
return len(results) > 0
def is_daily_flight(entity):
"""
_daily(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = "SELECT flight_id FROM flight WHERE flight_days = 'daily' AND flight_id = %s" % entity_name
results = get_result(sql)
return len(results) > 0
def is_discounted_flight(entity):
"""
_discounted(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = "SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON fare.fare_id = flight_fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code WHERE fare_basis.discounted = 'YES' AND flight.flight_id = %s" % entity_name
results = get_result(sql)
return len(results) > 0
def is_connecting_flight(entity):
"""
_connecting(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = 'SELECT flight_id FROM flight WHERE flight_id = %s AND connections > 0' % entity_name
results = get_result(sql)
return len(results) > 0
def is_oneway(entity):
"""
oneway(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = 'SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id WHERE fare.round_trip_required = "NO" AND flight.flight_id = %s' % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_flight_has_stop(entity):
"""
_has_stops(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = 'SELECT T1.flight_id FROM flight AS T1 JOIN flight_stop AS T2 ON T1.flight_id = T2.flight_id WHERE T1.flight_id = %s' % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_non_stop_flight(flight_id):
"""
_nonstop(x)
:entity_type flight_id
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = 'SELECT flight.flight_id FROM flight WHERE flight.stops = 0 AND flight.flight_id = %s' % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_meal(entity):
"""
TODO: not sure
_meal:_t(x)
:entity_type meal_code
"""
entity_name, entity_type = process_entity_string(entity, "meal_code")
sql = "SELECT meal_code FROM food_service WHERE food_service.meal_code = '%s'" % (entity_name)
results = get_result(sql)
return len(results) > 0
def is_meal_code(entity):
"""
_meal_code(x)
:entity_type meal_code
"""
entity_name, entity_type = process_entity_string(entity, "meal_code")
sql = "SELECT meal_code FROM food_service WHERE food_service.meal_code = '%s'" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_flight_has_meal(entity):
"""
_has_meal(x):
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = "SELECT flight_id FROM flight WHERE meal_code is not NULL AND flight_id = %s" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_flight_tonight(entity):
"""
_tonight(x)
:entity_type flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = "SELECT flight_id FROM flight WHERE departure_time BETWEEN %d AND %d AND flight_id = %s" % (
1800, 2359, entity_name)
results = get_result(sql)
return len(results) > 0
def is_booking_class_t(entity):
"""
_booking_class:_t(x)
:entity_type: class_description
"""
entity_name, entity_type = process_entity_string(
entity, "class_description")
sql = "SELECT DISTINCT class_description FROM class_of_service WHERE class_description = '%s';" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_class_of_service(entity):
"""
_class_of_service(x)
:entity_type: booking_class
"""
return is_booking_class_t(entity)
def is_fare_basis_code(entity):
"""
_fare_basis_code(x)
:entity_type: fare_basis_code
"""
entity_name, entity_type = process_entity_string(entity, "fare_basis_code")
sql = "SELECT DISTINCT fare_basis_1.fare_basis_code FROM fare_basis fare_basis_1 WHERE fare_basis_code = '%s'" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_flight_economy(flight_id):
"""
_economy(x)
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(flight_id, "flight_id")
sql = "SELECT flight_fare.flight_id FROM flight_fare JOIN fare ON flight_fare.fare_id = fare.fare_id JOIN fare_basis ON fare.fare_basis_code = fare_basis.fare_basis_code WHERE fare_basis.economy = 'YES' AND flight_fare.flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_economy(entity):
"""
_economy(x)
:entity_type: fare_basis_code
"""
entity_name, entity_type = process_entity_string(entity, "fare_basis_code")
sql = "SELECT DISTINCT fare_basis_code FROM fare_basis fare_basis_1 WHERE fare_basis_1.economy = 'YES' AND fare_basis_1.fare_basis_code = '%s'" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_fare(entity):
"""
_fare(x)
:entity_type: fare_id
"""
entity_name, entity_type = process_entity_string(entity, "fare_id")
sql = "SELECT DISTINCT fare_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id WHERE fare_id = %s" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_aircraft_code_t(entity):
"""
_aircraft_code:t(x)
:entity_type: aircraft_code
"""
entity_name, entity_type = process_entity_string(entity, "aircraft_code")
sql = "SELECT aircraft_code FROM aircraft WHERE aircraft_code = '%s'" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_ground_transport(transport_type):
"""
_ground_transport(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(
transport_type, "transport_type")
sql = "SELECT DISTINCT ground_service_1.transport_type FROM ground_service ground_service_1 WHERE ground_service_1.transport_type = '%s'" % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_round_trip(entity):
"""
_round_trip(x)
:entity_type: flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = 'SELECT flight.flight_id FROM flight JOIN flight_fare ON flight.flight_id = flight_fare.flight_id JOIN fare ON flight_fare.fare_id = fare.fare_id WHERE fare.round_trip_required IS NOT NULL AND flight.flight_id = %s' % (
entity_name)
results = get_result(sql)
return len(results) > 0
def is_rental_car(entity):
"""
_rental_car(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(entity, "transport_type")
return entity_name.lower() == "rental car"
def is_limousine(entity):
"""
_limousine(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(entity, "transport_type")
return entity_name.upper() == "LIMOUSINE"
def is_rapid_transit(entity):
"""
_rapid_transit(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(entity, "transport_type")
return entity_name.upper() == "RAPID TRANSIT"
def is_taxi(entity):
"""
_taxi(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(entity, "transport_type")
return entity_name.upper() == "TAXI"
def is_air_taxi_operation(entity):
"""
_air_taxi_operation(x)
:entity_type: transport_type
"""
entity_name, entity_type = process_entity_string(entity, "transport_type")
return entity_name.upper() == "AIR TAXI OPERATION"
def is_ground_transport_on_weekday(entity):
"""
_weekday(x)
:entity_type: transport_type
"""
return True
def is_flight_on_year(entity, year):
"""
_year(x,"1991:_yr")
:entity_type: flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
processed_year, _ = process_entity_string(year, "year")
sql = "SELECT flight_id FROM flight JOIN days ON flight.flight_days = days.days_code JOIN date_day ON days.day_name = date_day.day_name WHERE flight_id = %s AND date_day.year = %s" % (entity_name, processed_year)
results = get_result(sql)
return len(results) > 0
def is_flight_on_weekday(entity):
"""
_weekday(x)
:entity_type: flight_id
"""
entity_name, entity_type = process_entity_string(entity, "flight_id")
sql = "SELECT distinct day_name FROM flight JOIN days ON flight.flight_days = days.days_code WHERE flight_id = %s AND day_name IN ('MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', 'FRIDAY')" % entity_name
results = get_result(sql)
return len(results) == 5
def is_time_zone_code(entity):
"""
_time_zone_code(x)
:entity_type: time_zone_code
"""
entity_name, entity_type = process_entity_string(entity, "time_zone_code")
return entity_name.upper() in {"CST", "EST", "MST", "PST"}
def is_turboprop(aircraft_code):
"""
_turboprop(x)
:entity_type: aircraft_code
"""
processed_aircraft_code, entity_type = process_entity_string(
aircraft_code, "aircraft_code")
sql = "SELECT aircraft_code FROM aircraft WHERE aircraft_code = '%s' AND propulsion = 'TURBOPROP'" % (
processed_aircraft_code)
results = get_result(sql)
return len(results) > 0
def is_flight_turboprop(flight_id):
"""
_turboprop(x)
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight_id FROM flight JOIN aircraft ON flight.aircraft_code_sequence = aircraft.aircraft_code WHERE propulsion = 'TURBOPROP' AND flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
def is_flight_jet(flight_id):
"""
_jet(x)
:entity_type: flight_id
"""
processed_flight_id, entity_type = process_entity_string(
flight_id, "flight_id")
sql = "SELECT flight_id FROM flight JOIN aircraft ON flight.aircraft_code_sequence = aircraft.aircraft_code WHERE propulsion = 'JET' AND flight_id = %s" % (
processed_flight_id)
results = get_result(sql)
return len(results) > 0
# Meta Predicate
# TODO implement meta-predicates
def equals(entity_1, entity_2):
if entity_1 is None or entity_2 is None:
return False
processed_entity_1, _ = process_entity_string(entity_1)
processed_entity_2, _ = process_entity_string(entity_2)
return str(processed_entity_1).lower() == str(processed_entity_2).lower()
def count(function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
count = 0
for entity in entity_set_function():
if function(entity):
count += 1
return count
def exists(function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
for e in entity_set_function():
if function(e):
return True
return False
def the(function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
return [entity for entity in entity_set_function() if function(entity)]
def argmax(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
values.append((e, _v[list(_v.keys())[0]],))
elif isinstance(v, dict):
values.append((e, v[list(v.keys())[0]],))
else:
assert isinstance(v, int) or isinstance(v, float)
values.append((e, v,))
max_value, max_indices = 0, list()
for idx, (e, v) in enumerate(values):
if v is None:
continue
if v > max_value:
max_value = v
max_indices = [idx]
elif v == max_value:
max_indices.append(idx)
if len(max_indices) > 0:
return [values[idx][0] for idx in max_indices]
return None
def argmin(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
values.append((e, _v[list(_v.keys())[0]],))
elif isinstance(v, dict):
values.append((e, v[list(v.keys())[0]],))
else:
assert isinstance(v, int) or isinstance(v, float)
values.append((e, v,))
min_value, min_indices = 10000000, list()
for idx, (e, v) in enumerate(values):
if v is None:
continue
if v < min_value:
min_value = v
min_indices = [idx]
elif v == min_value:
min_indices.append(idx)
if len(min_indices) > 0:
return [values[idx][0] for idx in min_indices]
return None
def sum_predicate(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
values.append((e, _v[list(_v.keys())[0]],))
elif isinstance(v, dict):
values.append((e, v[list(v.keys())[0]],))
else:
assert isinstance(v, int) or isinstance(v, float)
values.append((e, v,))
print(values)
total = 0
for e, v in values:
total += v
return total
def max_predicate(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
values.append((e, _v[list(_v.keys())[0]],))
elif isinstance(v, dict):
values.append((e, v[list(v.keys())[0]],))
else:
assert isinstance(v, int) or isinstance(v, float)
values.append((e, v,))
if len(values) == 0:
return None
max_value = 0
for e, v in values:
if v > max_value:
max_value = v
return max_value
def min_predicate(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
values.append((e, _v[list(_v.keys())[0]],))
elif isinstance(v, dict):
values.append((e, v[list(v.keys())[0]],))
else:
assert isinstance(v, int) or isinstance(v, float)
values.append((e, v,))
if len(values) == 0:
return None
min_value = 100000000
for e, v in values:
if v < min_value:
min_value = v
return min_value
def get_target_value(predicate, target_function, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
v = target_function(e)
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
v_dict = dict()
v_dict.update(e)
v_dict[target_function.__name__ + '_0'] = _v[list(_v.keys())[0]]
values.append(v_dict)
elif isinstance(v, dict):
v_dict = dict()
v_dict.update(e)
v_dict[target_function.__name__ + '_0'] = v[list(v.keys())[0]]
values.append(v_dict)
else:
v_dict = dict()
v_dict.update(e)
v_dict[target_function.__name__ + '_0'] = v
values.append(v_dict)
return values
def get_target_values(predicate, target_functions, entity_set_function):
if entity_set_function is None:
entity_set_function = get_all_flight_ids
values = list()
for e in entity_set_function():
if predicate(e):
_values = list()
v_dict = dict()
v_dict.update(e)
for tf_idx, tf in enumerate(target_functions):
v = tf(e)
suffix = "_%d" % tf_idx
if isinstance(v, list):
for _v in v:
assert isinstance(_v, dict)
v_dict[tf.__name__ + suffix] = _v[list(_v.keys())[0]]
# _values.append(_v[list(_v.keys())[0]])
elif isinstance(v, dict):
v_dict[tf.__name__ + suffix] = v[list(v.keys())[0]]
# values.append(v[list(v.keys())[0]])
else:
v_dict[tf.__name__ + suffix] = v
# values.append(v)
values.append(v_dict)
return values
def process_numerical_value(value):
if isinstance(value, list):
assert isinstance(value[0], dict)
_value = float(value[0][list(value[0].keys())[0]])
elif isinstance(value, dict):
_value = float(value[list(value.keys())[0]])
elif isinstance(value, str):
_value, _ = process_entity_string(value)
_value = float(_value)
else:
_value = float(value)
return _value
def process_value(value):
if isinstance(value, list):
if len(value) == 0:
return ""
assert isinstance(value[0], dict)
_value = value[0][list(value[0].keys())[0]]
elif isinstance(value, dict):
_value = value[list(value.keys())[0]]
elif isinstance(value, str):
_value, _ = process_entity_string(value)
_value = value
else:
_value = value
return _value
def less_than(value_1, value_2):
"""
_<
"""
_value_1 = process_numerical_value(value_1)
_value_2 = process_numerical_value(value_2)
return _value_1 <= _value_2
def larger_than(value_1, value_2):
"""
_>
"""
_value_1 = process_numerical_value(value_1)
_value_2 = process_numerical_value(value_2)
return _value_1 >= _value_2
def numerical_equals(value_1, value_2):
"""
_=
"""
_value_1 = process_value(value_1)
_value_2 = process_value(value_2)
return _value_1 == _value_2
if __name__ == '__main__':
result = [(xe, ye) for xe in get_all_flight_ids() for ye in get_all_aircraft_codes() if (lambda x,y: (is_flight_with_specific_aircraft(x,y) and is_flight_airline(x,"dl:_al") and is_flight(x) and is_from(x,"seattle:_ci") and is_to(x,"salt_lake_city:_ci")))(xe, ye)]
pprint(result)
| 36.810762
| 416
| 0.680867
| 11,226
| 82,088
| 4.602797
| 0.033672
| 0.07989
| 0.065085
| 0.032726
| 0.824718
| 0.77887
| 0.749686
| 0.725455
| 0.702947
| 0.669041
| 0
| 0.01332
| 0.216231
| 82,088
| 2,229
| 417
| 36.827277
| 0.789798
| 0.0704
| 0
| 0.560748
| 0
| 0.05248
| 0.305649
| 0.057034
| 0
| 0
| 0
| 0.001795
| 0.017254
| 1
| 0.121495
| false
| 0.001438
| 0.002876
| 0
| 0.254493
| 0.002876
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2bf69ab8a64322be4396c7ad2ef64184194d9d24
| 172
|
py
|
Python
|
obot.py
|
NNTin/Reply-Dota-2-Reddit
|
e47d20f3d5bfacc9f39625324e57781e8d3f029a
|
[
"MIT"
] | 22
|
2016-03-27T21:28:18.000Z
|
2021-09-13T14:48:43.000Z
|
obot.py
|
NNTin/Reply-Dota-2-Reddit
|
e47d20f3d5bfacc9f39625324e57781e8d3f029a
|
[
"MIT"
] | 24
|
2016-03-28T09:39:41.000Z
|
2018-11-06T20:42:30.000Z
|
obot.py
|
NNTin/Reply-Dota-2-Reddit
|
e47d20f3d5bfacc9f39625324e57781e8d3f029a
|
[
"MIT"
] | null | null | null |
client_id="You need to fill this"
client_secret="You need to fill this"
user_agent="You need to fill this"
username="You need to fill this"
password="You need to fill this"
| 34.4
| 37
| 0.773256
| 33
| 172
| 3.939394
| 0.363636
| 0.269231
| 0.346154
| 0.5
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 172
| 5
| 38
| 34.4
| 0.878378
| 0
| 0
| 0
| 0
| 0
| 0.606936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
a630e56e61aedf6ad37589cc589db548c00267d0
| 655
|
py
|
Python
|
ex009.py
|
erikamaylim/Python-CursoemVideo
|
5a6809818c4c55a02ec52379d95f3d20c833df2e
|
[
"MIT"
] | null | null | null |
ex009.py
|
erikamaylim/Python-CursoemVideo
|
5a6809818c4c55a02ec52379d95f3d20c833df2e
|
[
"MIT"
] | null | null | null |
ex009.py
|
erikamaylim/Python-CursoemVideo
|
5a6809818c4c55a02ec52379d95f3d20c833df2e
|
[
"MIT"
] | null | null | null |
"""Faça um programa que leia um número Inteiro qualquer e mostre na tela a sua tabuada."""
n = int(input('Digite um número inteiro para ver sua tabuada: '))
#print(' {} \n {} \n {} \n {} \n {} \n {} \n {} \n {} \n {} \n {}'.format(n * 1, n * 2, n * 3, n * 4, n * 5, n * 6, n * 7, n * 8, n * 9, n * 10))
print('-'*12)
print(f'{n} x {1:2} = {n*1:3}')
print(f'{n} x {2:2} = {n*2:3}')
print(f'{n} x {3:2} = {n*3:3}')
print(f'{n} x {4:2} = {n*4:3}')
print(f'{n} x {5:2} = {n*5:3}')
print(f'{n} x {6:2} = {n*6:3}')
print(f'{n} x {7:2} = {n*7:3}')
print(f'{n} x {8:2} = {n*8:3}')
print(f'{n} x {9:2} = {n*9:3}')
print(f'{n} x {10:2} = {n*10:3}')
print('-'*12)
| 36.388889
| 145
| 0.461069
| 152
| 655
| 1.986842
| 0.236842
| 0.072848
| 0.231788
| 0.264901
| 0.298013
| 0.029801
| 0.029801
| 0.029801
| 0
| 0
| 0
| 0.108571
| 0.198473
| 655
| 17
| 146
| 38.529412
| 0.466667
| 0.349618
| 0
| 0.153846
| 0
| 0
| 0.622912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.923077
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
a655dd1a912baf0508a7fea94b9eb78e4766023f
| 133
|
py
|
Python
|
AutoPreProcessing/__init__.py
|
bhagatsajan0073/AutoPreProcessing
|
918e094c5887108a83a536b4d571f6e3505aeb95
|
[
"MIT"
] | null | null | null |
AutoPreProcessing/__init__.py
|
bhagatsajan0073/AutoPreProcessing
|
918e094c5887108a83a536b4d571f6e3505aeb95
|
[
"MIT"
] | null | null | null |
AutoPreProcessing/__init__.py
|
bhagatsajan0073/AutoPreProcessing
|
918e094c5887108a83a536b4d571f6e3505aeb95
|
[
"MIT"
] | null | null | null |
from AutoPreProcessing.EDA import EDA
from AutoPreProcessing.WOE_IV import WOE
from AutoPreProcessing.FeatureType import FeatureType
| 33.25
| 53
| 0.887218
| 16
| 133
| 7.3125
| 0.4375
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 133
| 3
| 54
| 44.333333
| 0.966942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a67230e2e4efde3a7789e02fb3ce8bad462f7e49
| 16,797
|
py
|
Python
|
Heaps.py
|
ivanbgd/Heap-Data-Structure
|
f759f30f0f81f4e52fae7b79062f66520e8088d0
|
[
"MIT"
] | null | null | null |
Heaps.py
|
ivanbgd/Heap-Data-Structure
|
f759f30f0f81f4e52fae7b79062f66520e8088d0
|
[
"MIT"
] | null | null | null |
Heaps.py
|
ivanbgd/Heap-Data-Structure
|
f759f30f0f81f4e52fae7b79062f66520e8088d0
|
[
"MIT"
] | null | null | null |
"""
Heaps are Complete Binary Trees.
The Complete Binary Max-Heap is implemented as an array, which is both fast and memory efficient. It's also easy to code.
The code uses 1-based indexing, not 0-based.
That's why it will ignore the element at position i == 0, and will have actual size larger by one.
Priority Queues are usually implemented as Heaps.
"""
class Heap(object):
"""The Complete Binary Heap"""
def __init__(self, maxSize):
"""The Complete Binary Heap
Instantiates an array of size maxSize that holds the elements of the heap.
Input: maxSize
Contains property named size that holds the current size of the heap, which is at most equal to maxSize.
"""
self.H = [None] * (maxSize + 1) # H is the array (a Python list) that stores the heap. Its first element, at index 0, is ignored.
# H[1 . . . maxSize] is an array of length maxSize where the heap occupies the first size elements.
self.size = 0 # size is the current size of the heap. Its first element, at index 0, is ignored.
self.maxSize = maxSize # maxSize is the maximum number of elements in the heap. Its first element, at index 0, is ignored.
def parent(self, i):
return i // 2
def leftChild(self, i):
return 2*i
def rightChild(self, i):
return 2*i + 1
def insert(self, val):
"""Inserts a new element with value val into the heap, if there is space.
Input: val
O(log n)
"""
if self.size == self.maxSize:
raise Exception("The heap is full! Cannot insert the element with value {}.".format(val))
self.size += 1
self.H[self.size] = val
self.siftUp(self.size)
def getHeap(self):
"""Returns the array H[1, size] that represents the heap."""
return self.H[1 : self.size + 1]
def getSize(self):
"""Returns the current size of the heap."""
return self.size
def getMaxSize(self):
"""Returns the maximum size of the heap."""
return self.maxSize
def buildHeap(self, A, n):
"""Creates a heap from the array A with length n.
Inputs: A, n
Its time complexity is actually O(2n), which is O(n), but it uses additional space for storing the heap, O(n).
"""
if n > self.maxSize:
raise Exception("Length of array A ({}) must not exceed maxSize of the heap ({})!".format(n, self.maxSize))
self.size = n # n = len(A)
self.H = [None] + A
#self.H[1:] = A # It's the same.
for i in range(n//2, 0, -1):
self.siftDown(i)
def printHeap(self):
"""Prints the heap as a complete binary tree.
Prints all nodes followed by their children.
"""
result = ""
for i in range(1, self.size + 1):
result += str(self.H[i]) + ": "
if self.leftChild(i) <= self.size:
result += str(self.H[self.leftChild(i)]) + " "
else:
result += "* "
if self.rightChild(i) <= self.size:
result += str(self.H[self.rightChild(i)])
else:
result += "*"
result += "\n"
print(result)
def __str__(self):
"""Prints the heap as a complete binary tree.
Uses Breadth First Search.
"""
from collections import deque
import math
result = ""
if self.size > 0:
maxLevel = int(math.log(self.size, 2))
queue = deque([1]) # Index of root node. That's the element with the highest value(priority) in case of MaxHeap, or with the lowest value(priority) in case of MinHeap.
else:
maxLevel = 1
queue = []
bounds = [2**j - 1 for j in range(1, maxLevel + 1)]
while len(queue):
i = queue.popleft()
result += str(self.H[i]) + " "
if self.leftChild(i) <= self.size:
queue.append(self.leftChild(i))
if self.rightChild(i) <= self.size:
queue.append(self.rightChild(i))
if i % 2:
result += "\t" # This is a complete binary tree!
if i in bounds:
result += "\n"
return result
class MaxHeap(Heap):
"""The Complete Binary Max-Heap"""
def __init__(self, maxSize):
"""The Complete Binary Max-Heap
Instantiates an array of size maxSize that holds the elements of the heap.
Input: maxSize
Contains property named size that holds the current size of the heap, which is at most equal to maxSize.
"""
return super(MaxHeap, self).__init__(maxSize)
def siftUp(self, i):
"""Sifts the element with index i up until it finds its place.
Used internally (privately) in the class.
O(log n)
"""
while i > 1 and self.H[self.parent(i)] < self.H[i]: # Root is at i == 1.
self.H[self.parent(i)], self.H[i] = self.H[i], self.H[self.parent(i)]
i = self.parent(i)
def siftDown(self, i):
"""Sifts the element with index i down until it finds its place.
Used internally (privately) in the class.
O(log n)
"""
while i <= self.size:
maxIndex = i
l = self.leftChild(i)
if l <= self.size and self.H[l] > self.H[maxIndex]:
maxIndex = l
r = self.rightChild(i)
if r <= self.size and self.H[r] > self.H[maxIndex]:
maxIndex = r
if i != maxIndex:
self.H[i], self.H[maxIndex] = self.H[maxIndex], self.H[i]
i = maxIndex
else:
break
def siftDownRecursive(self, i):
"""Sifts the element with index i down until it finds its place.
Recursive implementation.
Used internally (privately) in the class.
O(log n)
"""
maxIndex = i
l = self.leftChild(i)
if l <= self.size and self.H[l] > self.H[maxIndex]:
maxIndex = l
r = self.rightChild(i)
if r <= self.size and self.H[r] > self.H[maxIndex]:
maxIndex = r
if i != maxIndex:
self.H[i], self.H[maxIndex] = self.H[maxIndex], self.H[i]
self.siftDownRecursive(maxIndex)
def extractMax(self):
"""Returns the element with the highest value (priority).
O(log n)"""
if self.size >= 1:
result = self.H[1]
self.H[1] = self.H[self.size]
self.size -= 1
self.siftDown(1)
return result
else:
raise Exception("The heap is empty! Cannot return the element with the highest value.")
def remove(self, i):
"""Removes the element with index i.
Input: i
O(log n)"""
if i <= self.size:
self.H[i] = float("inf")
self.siftUp(i)
self.extractMax()
else:
raise Exception("No element with index {}!".format(i))
def changeValue(self, i, val):
"""Changes the value of the element with index i.
Inputs: i, val
O(log n)"""
oldVal = self.H[i]
self.H[i] = val
if val > oldVal:
self.siftUp(i)
else:
self.siftDown(i)
def heapSortNonDescending(self, A, n):
"""Creates a heap from the array A with length n.
Then it sorts the array A in non-descending order in-place.
Inputs: A, n
It doesn't return A, because it sorts it in-place.
Its time complexity is O(n log n), but it uses additional space for storing the heap, O(n).
"""
self.buildHeap(A, n)
for _ in range(n):
self.H[1], self.H[self.size] = self.H[self.size], self.H[1]
A[self.size-1] = self.H[self.size]
self.size -= 1
self.siftDown(1)
def heapSortNonAscending(self, A, n):
"""Creates a heap from the array A with length n.
Then it sorts the array A in non-ascending order in-place.
Inputs: A, n
It doesn't return A, because it sorts it in-place.
Its time complexity is O(n log n), but it uses additional space for storing the heap, O(n).
"""
self.buildHeap(A, n)
for i in range(n):
self.H[1], self.H[self.size] = self.H[self.size], self.H[1]
A[i] = self.H[self.size]
self.size -= 1
self.siftDown(1)
def partialSortingMax(self, A, n, k):
"""Inputs: Array A[1, n]; n, which is len(A); Integer k, such that 1 <= k <= n
Output: The last (maximal) k elements of a sorted version of A.
O(n), if k <= O(n/log n)
"""
assert 1 <= k <= n
self.buildHeap(A, n)
result = []
for i in range(k):
result.append(self.extractMax())
return result
class MinHeap(Heap):
"""The Complete Binary Min-Heap"""
def __init__(self, maxSize):
"""The Complete Binary Min-Heap
Instantiates an array of size maxSize that holds the elements of the heap.
Input: maxSize
Contains property named size that holds the current size of the heap, which is at most equal to maxSize.
"""
return super(MinHeap, self).__init__(maxSize)
def siftUp(self, i):
"""Sifts the element with index i up until it finds its place.
Used internally (privately) in the class.
O(log n)
"""
while i > 1 and self.H[self.parent(i)] > self.H[i]: # Root is at i == 1.
self.H[self.parent(i)], self.H[i] = self.H[i], self.H[self.parent(i)]
i = self.parent(i)
def siftDown(self, i):
"""Sifts the element with index i down until it finds its place.
Used internally (privately) in the class.
O(log n)
"""
while i <= self.size:
maxIndex = i
l = self.leftChild(i)
if l <= self.size and self.H[l] < self.H[maxIndex]:
maxIndex = l
r = self.rightChild(i)
if r <= self.size and self.H[r] < self.H[maxIndex]:
maxIndex = r
if i != maxIndex:
self.H[i], self.H[maxIndex] = self.H[maxIndex], self.H[i]
i = maxIndex
else:
break
def siftDownRecursive(self, i):
"""Sifts the element with index i down until it finds its place.
Recursive implementation.
Used internally (privately) in the class.
O(log n)
"""
maxIndex = i
l = self.leftChild(i)
if l <= self.size and self.H[l] < self.H[maxIndex]:
maxIndex = l
r = self.rightChild(i)
if r <= self.size and self.H[r] < self.H[maxIndex]:
maxIndex = r
if i != maxIndex:
self.H[i], self.H[maxIndex] = self.H[maxIndex], self.H[i]
self.siftDownRecursive(maxIndex)
def extractMin(self):
"""Returns the element with the lowest value (priority).
O(log n)"""
if self.size >= 1:
result = self.H[1]
self.H[1] = self.H[self.size]
self.size -= 1
self.siftDown(1)
return result
else:
raise Exception("The heap is empty! Cannot return the element with the lowest value.")
def remove(self, i):
"""Removes the element with index i.
Input: i
O(log n)"""
if i <= self.size:
self.H[i] = float("-inf")
self.siftUp(i)
self.extractMin()
else:
raise Exception("No element with index {}!".format(i))
def changeValue(self, i, val):
"""Changes the value of the element with index i.
Inputs: i, val
O(log n)"""
oldVal = self.H[i]
self.H[i] = val
if val > oldVal:
self.siftDown(i)
else:
self.siftUp(i)
def heapSortNonDescending(self, A, n):
"""Creates a heap from the array A with length n.
Then it sorts the array A in non-descending order in-place.
Inputs: A, n
It doesn't return A, because it sorts it in-place.
Its time complexity is O(n log n), but it uses additional space for storing the heap, O(n).
"""
self.buildHeap(A, n)
for i in range(n):
self.H[1], self.H[self.size] = self.H[self.size], self.H[1]
A[i] = self.H[self.size]
self.size -= 1
self.siftDown(1)
def heapSortNonAscending(self, A, n):
"""Creates a heap from the array A with length n.
Then it sorts the array A in non-ascending order in-place.
Inputs: A, n
It doesn't return A, because it sorts it in-place.
Its time complexity is O(n log n), but it uses additional space for storing the heap, O(n).
"""
self.buildHeap(A, n)
for _ in range(n):
self.H[1], self.H[self.size] = self.H[self.size], self.H[1]
A[self.size-1] = self.H[self.size]
self.size -= 1
self.siftDown(1)
def partialSortingMin(self, A, n, k):
"""Inputs: Array A[1, n]; n, which is len(A); Integer k, such that 1 <= k <= n
Output: The last (minimal) k elements of a sorted version of A.
O(n), if k <= O(n/log n)
"""
assert 1 <= k <= n
self.buildHeap(A, n)
result = []
for i in range(k):
result.append(self.extractMin())
return result
class MaxPriorityQueue(MaxHeap):
"""Priority Queue implemented as The Complete Binary Max-Heap"""
def __init__(self, maxSize):
"""Priority Queue implemented as The Complete Binary Max-Heap
Instantiates an array of size maxSize that holds the elements of the priority queue.
Inherits from class MaxHeap, and is practically identical to it.
Input: maxSize
Contains property named size that holds the current size of the priority queue, which is at most equal to maxSize.
"""
return super(MaxPriorityQueue, self).__init__(maxSize)
def insert(self, p): # p stands for priority. That's the value of an element.
"""Inserts a new element with priority p into the heap, if there is space.
Input: p
O(log n)"""
return super(MaxPriorityQueue, self).insert(p)
def changePriority(self, i, p):
"""Changes the priority p of the element with index i.
Inputs: i, p
O(log n)"""
return super(MaxPriorityQueue, self).changeValue(i, p)
def getPriorityQueue(self):
"""Returns the array H[1, size] that represents the priority queue."""
return super(MaxPriorityQueue, self).getHeap()
class MinPriorityQueue(MinHeap):
"""Priority Queue implemented as The Complete Binary Min-Heap"""
def __init__(self, maxSize):
"""Priority Queue implemented as The Complete Binary Min-Heap
Instantiates an array of size maxSize that holds the elements of the priority queue.
Inherits from class MinHeap, and is practically identical to it.
Input: maxSize
Contains property named size that holds the current size of the priority queue, which is at most equal to maxSize.
"""
return super(MinPriorityQueue, self).__init__(maxSize)
def insert(self, p): # p stands for priority. That's the value of an element.
"""Inserts a new element with priority p into the heap, if there is space.
Input: p
O(log n)"""
return super(MinPriorityQueue, self).insert(p)
def changePriority(self, i, p):
"""Changes the priority p of the element with index i.
Inputs: i, p
O(log n)"""
return super(MinPriorityQueue, self).changeValue(i, p)
def getPriorityQueue(self):
"""Returns the array H[1, size] that represents the priority queue."""
return super(MinPriorityQueue, self).getHeap()
| 38.349315
| 183
| 0.538429
| 2,262
| 16,797
| 3.979664
| 0.099027
| 0.047212
| 0.022995
| 0.021662
| 0.806376
| 0.787936
| 0.763941
| 0.754721
| 0.734948
| 0.706954
| 0
| 0.006769
| 0.357981
| 16,797
| 437
| 184
| 38.437071
| 0.827986
| 0.365065
| 0
| 0.666667
| 0
| 0
| 0.036193
| 0
| 0
| 0
| 0
| 0
| 0.008439
| 1
| 0.164557
| false
| 0
| 0.008439
| 0.012658
| 0.2827
| 0.008439
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a6a6cbd6f1c08a91938301977480c4093cc48a23
| 191
|
py
|
Python
|
irisreader/data/__init__.py
|
chuwyler/IRISreader
|
aee39321751ba273c5f0d172b3b653656872f605
|
[
"MIT"
] | null | null | null |
irisreader/data/__init__.py
|
chuwyler/IRISreader
|
aee39321751ba273c5f0d172b3b653656872f605
|
[
"MIT"
] | 1
|
2019-07-31T14:35:28.000Z
|
2019-12-06T10:54:49.000Z
|
irisreader/data/__init__.py
|
chuwyler/IRISreader
|
aee39321751ba273c5f0d172b3b653656872f605
|
[
"MIT"
] | 1
|
2019-02-13T13:49:13.000Z
|
2019-02-13T13:49:13.000Z
|
from irisreader.data.sample import sample_sji, sample_raster, sample_observation
from irisreader.data.mg2k_centroids import get_mg2k_centroids, assign_mg2k_centroids, get_mg2k_centroid_table
| 63.666667
| 109
| 0.895288
| 27
| 191
| 5.925926
| 0.518519
| 0.24375
| 0.225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022346
| 0.062827
| 191
| 2
| 110
| 95.5
| 0.871508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a6da382f7d70721f4d6076ff81c27fb9ad059d57
| 181
|
py
|
Python
|
convokit/forecaster/__init__.py
|
sophieball/Cornell-Conversational-Analysis-Toolkit
|
da65358baffc34a518114be2d94f1748f8e01240
|
[
"MIT"
] | 371
|
2016-07-19T22:10:13.000Z
|
2022-03-28T08:04:32.000Z
|
convokit/forecaster/__init__.py
|
sophieball/Cornell-Conversational-Analysis-Toolkit
|
da65358baffc34a518114be2d94f1748f8e01240
|
[
"MIT"
] | 92
|
2017-07-25T22:04:11.000Z
|
2022-03-29T13:46:07.000Z
|
convokit/forecaster/__init__.py
|
sophieball/Cornell-Conversational-Analysis-Toolkit
|
da65358baffc34a518114be2d94f1748f8e01240
|
[
"MIT"
] | 105
|
2016-07-04T15:04:53.000Z
|
2022-03-30T01:36:38.000Z
|
from .forecaster import *
from .forecasterModel import *
from .cumulativeBoW import *
import sys
if 'torch' in sys.modules:
from .CRAFTModel import *
from .CRAFT import *
| 18.1
| 30
| 0.723757
| 22
| 181
| 5.954545
| 0.545455
| 0.229008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198895
| 181
| 9
| 31
| 20.111111
| 0.903448
| 0
| 0
| 0
| 0
| 0
| 0.027933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5b3d88365d278dbedfab13247fd25da0c5e7c969
| 30
|
py
|
Python
|
foo_bar/foo.py
|
JPBeukes/foo_bar_pkg
|
7127b954b3f6ce66dacb4bf7221a7998f151728a
|
[
"MIT"
] | null | null | null |
foo_bar/foo.py
|
JPBeukes/foo_bar_pkg
|
7127b954b3f6ce66dacb4bf7221a7998f151728a
|
[
"MIT"
] | null | null | null |
foo_bar/foo.py
|
JPBeukes/foo_bar_pkg
|
7127b954b3f6ce66dacb4bf7221a7998f151728a
|
[
"MIT"
] | null | null | null |
def foo_print():
print('foo')
| 15
| 16
| 0.666667
| 5
| 30
| 3.8
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 2
| 17
| 15
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5b5b5d054432cf056f1c118887ec9dd5223aa555
| 30
|
py
|
Python
|
hashdist/__init__.py
|
jcftang/hashdist
|
231d3ade086c9f176e0bf19a9877d31703a50295
|
[
"BSD-3-Clause"
] | null | null | null |
hashdist/__init__.py
|
jcftang/hashdist
|
231d3ade086c9f176e0bf19a9877d31703a50295
|
[
"BSD-3-Clause"
] | null | null | null |
hashdist/__init__.py
|
jcftang/hashdist
|
231d3ade086c9f176e0bf19a9877d31703a50295
|
[
"BSD-3-Clause"
] | null | null | null |
from .spec.hook_api import *
| 10
| 28
| 0.733333
| 5
| 30
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 2
| 29
| 15
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5b685933b90e0d9b6443b7c6ccfd646d4eb5b858
| 25,154
|
py
|
Python
|
src/text_processing/topic_modeling_visualizations.py
|
MariyaIvanina/usaid_data_processing
|
a57d51f83f4ffbe36368589c6ccb1238d6390699
|
[
"Apache-2.0"
] | 3
|
2021-09-20T10:07:07.000Z
|
2021-12-11T13:33:40.000Z
|
src/text_processing/topic_modeling_visualizations.py
|
MariyaIvanina/usaid_data_processing
|
a57d51f83f4ffbe36368589c6ccb1238d6390699
|
[
"Apache-2.0"
] | null | null | null |
src/text_processing/topic_modeling_visualizations.py
|
MariyaIvanina/usaid_data_processing
|
a57d51f83f4ffbe36368589c6ccb1238d6390699
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import seaborn as sns
import matplotlib.pylab as plt
import math
import os
import pandas as pd
import re
def search_year(year, years):
for idx, _year in enumerate(years):
if idx == len(years) -1:
continue
if year >= _year and year < years[idx + 1]:
return idx
return -1
def save_plots_topics(folder, articles_df, column_name, topic_modeler, with_sorted = False,
vmax = 800, relative_number = False, years = list(range(2008,2021)), cnt_per_plot = 25):
if not os.path.exists(folder):
os.makedirs(folder)
topic_year = np.zeros((topic_modeler.n_components, len(years)-1), dtype = int if not relative_number else float)
topic_map = {}
topic_map_by_id = {}
topic_id = 0
all_articles_by_year = np.zeros(len(years)-1, dtype=int)
for i in range(len(articles_df)):
year_ind = search_year(articles_df["year"].values[i], years)
if year_ind >= 0:
for topic in articles_df[column_name].values[i]:
if topic not in topic_map:
topic_map[topic] = topic_id
topic_map_by_id[topic_id] = topic
topic_id += 1
topic_year[topic_map[topic]][year_ind] += 1
all_articles_by_year[year_ind] += 1
if with_sorted:
result = sorted([(idx, topic_val) for idx,topic_val in enumerate(np.sum(topic_year, axis = 1))],key=lambda x: x[1], reverse = True)
else:
result = [(idx, topic_val) for idx,topic_val in enumerate(np.sum(topic_year, axis = 1))]
if relative_number:
topic_year /= all_articles_by_year
topic_year *= 100
for ind in range(math.ceil(topic_modeler.n_components/cnt_per_plot)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame(topic_year[[i for i, cnt in result[ind*cnt_per_plot:(ind+1)*cnt_per_plot]],:])
topic_year_df.index = [ topic_map_by_id[i] for i, cnt in result[ind*cnt_per_plot:(ind+1)*cnt_per_plot]]
topic_year_df.columns = [ "%d-%d"%(years[idx], years[idx+1]) for idx, year in enumerate(years) if idx != len(years) -1]
if relative_number:
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax=vmax, annot=True, fmt=".1f")
else:
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax=vmax, annot=True, fmt="d")
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dtopics.png'%(ind*cnt_per_plot+1, (ind+1)*cnt_per_plot)))
def save_plots_districts(folder,
big_dataset,countries = ["Nigeria/", "Malawi/", "Kenya/", "Tanzania/", "Mali/", "Zambia/", "Burkina Faso/", "Philippines/", "Bangladesh/"], with_sorted = False, image_format="eps"):
for country in countries:
country_folder = os.path.join(folder, country)
if not os.path.exists(country_folder):
os.makedirs(country_folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
for district in big_dataset["districts"].values[i]:
if country in district:
if district not in districts_dict:
districts_dict[district] = 0
districts_dict[district] += 1
if district not in districts_dict_interv:
districts_dict_interv[district] = {"technology intervention": 0, "socioeconomic intervention": 0, "ecosystem intervention": 0}
for column in ["technology intervention", "socioeconomic intervention", "ecosystem intervention"]:
if len(big_dataset[column].values[i]) > 0:
districts_dict_interv[district][column] += 1
if with_sorted:
result = sorted([(name, (interv_val["technology intervention"], interv_val["socioeconomic intervention"], interv_val["ecosystem intervention"]),\
sum(interv_val.values())) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, (districts_dict_interv[name]["technology intervention"], districts_dict_interv[name]["socioeconomic intervention"], districts_dict_interv[name]["ecosystem intervention"]),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/30)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*30:(ind+1)*30]])
topic_year_df.index = [val[0] for val in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["Technology intervention", "Socioeconomic intervention", "Ecosystem intervention"]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 50, annot=True, fmt = "d")
plt.tight_layout()
plt.savefig(os.path.join(country_folder,'%d-%dinterventions.%s'%(ind*30+1, (ind+1)*30, image_format)), format=image_format)
def save_plots_districts_unique(folder, big_dataset,countries = ["Nigeria/", "Malawi/", "Kenya/", "Tanzania/", "Mali/", "Zambia/", "Burkina Faso/", "Philippines/", "Bangladesh/"], with_sorted = False):
for country in countries:
country_folder = os.path.join(folder, country)
if not os.path.exists(country_folder):
os.makedirs(country_folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
for district in big_dataset["districts"].values[i]:
if country in district:
if district not in districts_dict:
districts_dict[district] = 0
districts_dict[district] += 1
if district not in districts_dict_interv:
districts_dict_interv[district] = {"technology intervention": set(), "socioeconomic intervention":set(), "ecosystem intervention": set()}
for column in ["technology intervention", "socioeconomic intervention", "ecosystem intervention"]:
for val in big_dataset[column].values[i]:
districts_dict_interv[district][column].add(val)
if with_sorted:
result = sorted([(name, (len(interv_val["technology intervention"]), len(interv_val["socioeconomic intervention"]), len(interv_val["ecosystem intervention"])),\
sum([len(interv_val[v]) for v in interv_val])) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, (len(districts_dict_interv[name]["technology intervention"]), len(districts_dict_interv[name]["socioeconomic intervention"]), len(districts_dict_interv[name]["ecosystem intervention"])),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/30)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*30:(ind+1)*30]])
topic_year_df.index = [val[0] for val in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["Technology intervention", "Socioeconomic intervention", "Ecosystem intervention"]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 50, annot=True, fmt = "d")
plt.tight_layout()
plt.savefig(os.path.join(country_folder,'%d-%dinterventions.png'%(ind*30+1, (ind+1)*30)))
def code_sequence(values):
return 4*int("Technology intervention" in values) + 2*int("Socioeconomic intervention" in values) + int("Ecosystem intervention" in values)
def decode_sequence(num):
values = []
for idx, col in enumerate(["Eco", "Socio", "Tech"]):
if num & 2**idx:
values.append(col)
return list(sorted(values))
def save_plots_districts_with_overlapping(folder, big_dataset,
countries = ["Nigeria/", "Malawi/", "Kenya/", "Tanzania/", "Mali/", "Zambia/", "Burkina Faso/", "Philippines/", "Bangladesh/"], with_sorted = False, image_format="eps"):
for country in countries:
country_folder = os.path.join(folder, country)
if not os.path.exists(country_folder):
os.makedirs(country_folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
for district in big_dataset["districts"].values[i]:
if country in district:
if district not in districts_dict:
districts_dict[district] = 0
districts_dict[district] += 1
if district not in districts_dict_interv:
districts_dict_interv[district] = {}
for i in range(1,8):
districts_dict_interv[district][i] = 0
if code_sequence(big_dataset["intervention_labels"].values[i]) > 0:
districts_dict_interv[district][code_sequence(big_dataset["intervention_labels"].values[i])] += 1
if with_sorted:
result = sorted([(name, tuple([interv_val[w] for w in [4,2,1,6,3,5,7] ]),\
sum(interv_val.values())) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, tuple([interv_val[w] for w in [4,2,1,6,3,5,7] ]),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/30)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*30:(ind+1)*30]])
topic_year_df.index = [val[0] for val in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["; ".join(decode_sequence(w))for w in [4,2,1,6,3,5,7]]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 50, annot=True, fmt = "d")
plt.tight_layout()
plt.savefig(os.path.join(country_folder,'%d-%dinterventions.%s'%(ind*30+1, (ind+1)*30, image_format)), format=image_format)
def save_plots_topics_interv(folder, articles_df, column_name, with_sorted = True, topic_numbers=125, image_format="eps"):
if not os.path.exists(folder):
os.makedirs(folder)
topic_year_names = {}
topic_year = np.zeros(topic_numbers, dtype = int)
topics_per_page = int(topic_numbers/5)
for i in range(len(articles_df)):
for topic in articles_df[column_name].values[i]:
topic_num = int(re.search("#(\d+)", topic).group(1)) -1
topic_year_names[topic_num] = topic
topic_year[topic_num] += 1
if with_sorted:
result = sorted([(idx, topic_val) for idx,topic_val in enumerate(topic_year)],key=lambda x: x[1], reverse = True)
else:
result = [(idx, topic_val) for idx,topic_val in enumerate(topic_year)]
for ind in range(5):
plt.figure(figsize=(6, 6), dpi=150)
topic_year_df = pd.DataFrame(topic_year[[i for i,cnt in result[ind*topics_per_page:(ind+1)*topics_per_page]]])
topic_year_df.index = [ topic_year_names[i] for i,cnt in result[ind*topics_per_page:(ind+1)*topics_per_page]]
topic_year_df.columns = ["All"]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", annot=True, fmt = "d", vmax = 50)
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dinterventions.%s'%(ind*topics_per_page+1, (ind+1)*topics_per_page, image_format)), format=image_format)
def save_plots_topics_cooccur(folder, articles_df, topic_num, column_name = "topics", with_sorted = True):
if not os.path.exists(folder):
os.makedirs(folder)
topic_year = np.zeros(150, dtype = int)
for i in range(len(articles_df)):
should_be_used = False
for topic in articles_df[column_name].values[i]:
_topic_num = int(re.search("#(\d+)", topic).group(1))
if _topic_num == topic_num:
should_be_used = True
if should_be_used:
for topic in articles_df[column_name].values[i]:
_topic_num = int(re.search("#(\d+)", topic).group(1)) -1
topic_year[_topic_num] += 1
if with_sorted:
result = sorted([(idx, topic_val) for idx,topic_val in enumerate(topic_year)],key=lambda x: x[1], reverse = True)
else:
result = [(idx, topic_val) for idx,topic_val in enumerate(topic_year)]
for ind in range(5):
plt.figure(figsize=(6, 6), dpi=150)
topic_year_df = pd.DataFrame(topic_year[[i for i,cnt in result[ind*30:(ind+1)*30]]])
topic_year_df.index = [ topic_year_names[i] for i,cnt in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["All"]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", annot=True, fmt = "d", vmax = 50)
plt.title("Coocurance of topics with the topic " + topic_year_names[topic_num-1])
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dtopics.png'%(ind*30+1, (ind+1)*30)))
def save_plots_cooccur_interv(folder, big_dataset, topic_num, with_sorted = False, save_as_eps=False):
if not os.path.exists(folder):
os.makedirs(folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
should_be_used = False
for topic in big_dataset["topics"].values[i]:
_topic_num = int(re.search("#(\d+)", topic).group(1))
if _topic_num == topic_num:
should_be_used = True
if should_be_used:
for topic in big_dataset["topics"].values[i]:
if topic not in districts_dict:
districts_dict[topic] = 0
districts_dict[topic] += 1
if topic not in districts_dict_interv:
districts_dict_interv[topic] = {"all":0, "technology intervention": 0, "socioeconomic intervention": 0, "ecosystem intervention": 0}
for interv in big_dataset["Intervention labels"].values[i].split(";"):
districts_dict_interv[topic][interv] += 1
districts_dict_interv[topic]["all"] += 1
if with_sorted:
result = sorted([(name, (interv_val["all"], interv_val["technology intervention"], interv_val["socioeconomic intervention"], interv_val["ecosystem intervention"]),\
interv_val["all"]) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, (districts_dict_interv[name]["all"],districts_dict_interv[name]["technology intervention"], districts_dict_interv[name]["socioeconomic intervention"], districts_dict_interv[name]["ecosystem intervention"]),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/30)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*30:(ind+1)*30]])
topic_year_df.index = [val[0] for val in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["All","Technology interv.", "Socioeconomic interv.", "Ecosystem interv."]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 50, annot=True, fmt = "d")
plt.title("Coocurance of topics with the topic " + topic_year_names[topic_num-1])
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dinterventions.png'%(ind*30+1, (ind+1)*30)))
def save_plots_cooccur_interv_relative(folder, big_dataset, topic_num, with_sorted = False):
if not os.path.exists(folder):
os.makedirs(folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
should_be_used = False
for topic in big_dataset["topics"].values[i]:
_topic_num = int(re.search("#(\d+)", topic).group(1))
if _topic_num == topic_num:
should_be_used = True
if should_be_used:
for topic in big_dataset["topics"].values[i]:
if topic not in districts_dict:
districts_dict[topic] = 0
districts_dict[topic] += 1
if topic not in districts_dict_interv:
districts_dict_interv[topic] = {"all":0, "technology intervention": 0, "socioeconomic intervention": 0, "ecosystem intervention": 0}
for interv in big_dataset["Intervention labels"].values[i].split(";"):
districts_dict_interv[topic][interv] += 1
districts_dict_interv[topic]["all"] += 1
if with_sorted:
result = sorted([(name, (interv_val["technology intervention"]*100/interv_val["all"], interv_val["socioeconomic intervention"]*100/interv_val["all"], interv_val["ecosystem intervention"]*100/interv_val["all"]),\
interv_val["all"]) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, (districts_dict_interv[name]["technology intervention"]*100/districts_dict_interv[name]["all"], districts_dict_interv[name]["socioeconomic intervention"]*100/districts_dict_interv[name]["all"], districts_dict_interv[name]["ecosystem intervention"]*100/districts_dict_interv[name]["all"]),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/30)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*30:(ind+1)*30]])
topic_year_df.index = [val[0] for val in result[ind*30:(ind+1)*30]]
topic_year_df.columns = ["Technology interv.", "Socioeconomic interv.", "Ecosystem interv."]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 80, annot=True, fmt = "0.1f")
plt.title("Coocurance of topics with the topic " + topic_year_names[topic_num-1])
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dinterventions.png'%(ind*30+1, (ind+1)*30)))
def save_plots_interventions_districts(folder, big_dataset, column_name= "Intervention labels", with_sorted = False, image_format="eps"):
if not os.path.exists(folder):
os.makedirs(folder)
districts_dict = {}
districts_dict_interv = {}
for i in range(len(big_dataset)):
for topic in big_dataset["topics"].values[i]:
if topic not in districts_dict:
districts_dict[topic] = 0
districts_dict[topic] += 1
if topic not in districts_dict_interv:
districts_dict_interv[topic] = {"technology intervention": 0, "socioeconomic intervention": 0, "ecosystem intervention": 0}
for interv in big_dataset[column_name].values[i]:
interv = interv.lower()
if interv in districts_dict_interv[topic]:
districts_dict_interv[topic][interv] += 1
if with_sorted:
result = sorted([(name, (interv_val["technology intervention"], interv_val["socioeconomic intervention"], interv_val["ecosystem intervention"]),\
sum(interv_val.values())) for name,interv_val in districts_dict_interv.items()],key=lambda x: x[2], reverse = True)
else:
result = sorted([(name, (districts_dict_interv[name]["technology intervention"], districts_dict_interv[name]["socioeconomic intervention"], districts_dict_interv[name]["ecosystem intervention"]),\
cnt) for name, cnt in districts_dict.items()], key = lambda x: x[2], reverse= True)
for ind in range(math.ceil(len(districts_dict)/25)):
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame([val[1] for val in result[ind*25:(ind+1)*25]])
topic_year_df.index = [val[0] for val in result[ind*25:(ind+1)*25]]
topic_year_df.columns = ["Technology intervention", "Socioeconomic intervention", "Ecosystem intervention"]
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax = 50000, annot=True, fmt = "d")
plt.tight_layout()
plt.savefig(os.path.join(folder,'%d-%dinterventions.%s'%(ind*25+1, (ind+1)*25, image_format)), format=image_format)
def save_population_vs_geo_regions(folder, articles_df, vmax = 800, relative_number = False):
if not os.path.exists(folder):
os.makedirs(folder)
geo_regions = list(set([geo_reg for geo_region in articles_df["geo_regions"] for geo_reg in geo_region]))
geo_regions_vs_population = np.zeros((len(geo_regions), 3), dtype = int if not relative_number else float)
all_articles_by_type = np.zeros(len(geo_regions), dtype=int)
for i in range(len(articles_df)):
for geo_region in articles_df["geo_regions"].values[i]:
if geo_region in geo_regions:
geo_ind = geo_regions.index(geo_region)
if "Small scale farmers" in articles_df["population tags"].values[i]:
geo_regions_vs_population[geo_ind][0] += 1
elif "Farmers" in articles_df["population tags"].values[i]:
geo_regions_vs_population[geo_ind][1] += 1
else:
geo_regions_vs_population[geo_ind][2] += 1
all_articles_by_type[geo_ind] += 1
result = [(idx, cnt_val) for idx,cnt_val in enumerate(np.sum(geo_regions_vs_population, axis = 1))]
if relative_number:
geo_regions_vs_population = geo_regions_vs_population.T
geo_regions_vs_population /= all_articles_by_type
geo_regions_vs_population *= 100
geo_regions_vs_population = geo_regions_vs_population.T
plt.figure(figsize=(15, 6), dpi=150)
topic_year_df = pd.DataFrame(geo_regions_vs_population[[i for i,cnt in result],:])
topic_year_df.index = geo_regions
topic_year_df.columns = ["Small scale farmers", "Farmers", "Undefined"]
if relative_number:
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax=vmax, annot=True, fmt=".1f")
else:
ax = sns.heatmap(topic_year_df, linewidth=0.5, cmap="YlGnBu", vmin = 0, vmax=vmax, annot=True, fmt="d")
plt.tight_layout()
plt.savefig(os.path.join(folder,'plot.png'))
def run_plots():
save_plots_topics("topics_climate_relative_rearranged", subset_df, "topics", topic_modeler, with_sorted = True, vmax = 20, relative_number=True)
save_plots_topics("topics_climate_relative", subset_df, "topics", topic_modeler, with_sorted = False, vmax = 20, relative_number=True)
save_plots_topics("topics_up_to_date_125", big_dataset, "topics", topic_modeler, with_sorted = False, vmax = 3000)
save_plots_topics("topics_up_to_date_rearranged_125", big_dataset, "topics", topic_modeler, with_sorted = True, vmax = 3000)
save_plots_topics("topics_up_to_date_125_relative_number", big_dataset, "topics", topic_modeler, with_sorted = False, vmax = 15,relative_number=True)
save_plots_topics("topics_up_to_date_rearranged_125_relative_number", big_dataset, "topics", topic_modeler, with_sorted = True, vmax = 15,relative_number=True)
save_plots_topics("topics_climate_subset", subset_df, "topics_new", topic_modeler, with_sorted = False)
save_plots_topics("topics_climate_relative_subset_rearranged", subset_df, "topics_new", topic_modeler, with_sorted = True,vmax = 20, relative_number = True)
save_plots_topics("topics_climate_relative_subset", subset_df, "topics_new", topic_modeler, with_sorted = False,vmax = 20, relative_number = True)
save_plots_topics("topics_climate_rearranged_subset", subset_df, "topics_new", topic_modeler, with_sorted = True)
save_plots_districts_with_overlapping("countries_plots_with_overlapping", big_dataset, with_sorted=True)
save_plots_districts_unique("countries_plots_unique", big_dataset, with_sorted=True)
save_plots_districts("countries_plots", big_dataset, with_sorted=True)
save_plots_topics_interv("topic_interventions", temp_df, "topics", with_sorted = True)
for topic in [30, 81, 121, 140, 10, 25, 91, 112, 124, 97]:
save_plots_cooccur_interv_relative("topic_coocur_interv_relative_topic_%d"%topic, all_df, topic, with_sorted=True)
save_plots_cooccur_interv("topic_coocur_interv_topic_%d"%topic, all_df, topic, with_sorted=True)
save_plots_topics_cooccur("topic_coocur_topic_%d"%topic, all_df, topic, with_sorted=True)
save_plots_cooccur_interv("topic_coocur_weather_interv_topic_97", all_df, 97, with_sorted=True)
save_plots_topics_cooccur("topic_coocur_ICT_topic_111", all_df, 111, with_sorted = True)
save_plots_interventions_districts("intervention_labels_vs_topics", all_df, with_sorted = False)
| 61.053398
| 321
| 0.649598
| 3,443
| 25,154
| 4.506825
| 0.062736
| 0.074563
| 0.064897
| 0.028163
| 0.860927
| 0.82516
| 0.799381
| 0.776632
| 0.749243
| 0.696913
| 0
| 0.024797
| 0.22084
| 25,154
| 412
| 322
| 61.053398
| 0.766927
| 0
| 0
| 0.60274
| 0
| 0
| 0.130352
| 0.027827
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038356
| false
| 0
| 0.019178
| 0.00274
| 0.068493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5b8b0c0f0da668fd84bf8d52d2f48fbf993bd74f
| 69
|
py
|
Python
|
poetry_packaging/src/submod2/tests/test_submod2.py
|
tim-barnes/lang-python
|
3dbbc7f38cec598e32bd1a06827246dcab3a0ced
|
[
"MIT"
] | 1
|
2021-06-16T23:43:27.000Z
|
2021-06-16T23:43:27.000Z
|
poetry_packaging/src/submod2/tests/test_submod2.py
|
tim-barnes/lang-python
|
3dbbc7f38cec598e32bd1a06827246dcab3a0ced
|
[
"MIT"
] | null | null | null |
poetry_packaging/src/submod2/tests/test_submod2.py
|
tim-barnes/lang-python
|
3dbbc7f38cec598e32bd1a06827246dcab3a0ced
|
[
"MIT"
] | null | null | null |
from submod2.code import foo
def test_code():
assert foo() == 1
| 13.8
| 28
| 0.666667
| 11
| 69
| 4.090909
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.217391
| 69
| 4
| 29
| 17.25
| 0.796296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5b93627f882a4a1992c927cae5e468e3b16375fd
| 123
|
py
|
Python
|
mysite/chat/admin.py
|
DanHunt27/Music-Website
|
2764695997963047009bf6c74f14c370b6225c85
|
[
"MIT"
] | null | null | null |
mysite/chat/admin.py
|
DanHunt27/Music-Website
|
2764695997963047009bf6c74f14c370b6225c85
|
[
"MIT"
] | 12
|
2020-06-06T01:27:27.000Z
|
2022-02-11T03:45:47.000Z
|
mysite/chat/admin.py
|
DanHunt27/Music-Website
|
2764695997963047009bf6c74f14c370b6225c85
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Message, Chat
admin.site.register(Chat)
admin.site.register(Message)
| 20.5
| 33
| 0.813008
| 18
| 123
| 5.555556
| 0.555556
| 0.18
| 0.26
| 0.42
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 123
| 5
| 34
| 24.6
| 0.900901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5bdb7b10bb0cddede3006e48c0c0ce54766286f6
| 5,072
|
py
|
Python
|
tests/api/test_api_v3_viewsets.py
|
bmclaughlin/galaxy-api
|
f845c4fa3dc719a9f003a49b9895d437db45405d
|
[
"Apache-2.0"
] | null | null | null |
tests/api/test_api_v3_viewsets.py
|
bmclaughlin/galaxy-api
|
f845c4fa3dc719a9f003a49b9895d437db45405d
|
[
"Apache-2.0"
] | null | null | null |
tests/api/test_api_v3_viewsets.py
|
bmclaughlin/galaxy-api
|
f845c4fa3dc719a9f003a49b9895d437db45405d
|
[
"Apache-2.0"
] | null | null | null |
from unittest import mock
import galaxy_pulp
from .base import BaseTestCase, API_PREFIX
class TestCollectionViewSet(BaseTestCase):
def setUp(self):
super().setUp()
patcher = mock.patch("galaxy_pulp.GalaxyCollectionsApi")
self.collection_api = patcher.start().return_value
self.addCleanup(patcher.stop)
def test_list(self):
self.collection_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[]
)
response = self.client.get(f"/{API_PREFIX}/v3/collections/")
assert response.status_code == 200
self.collection_api.list.assert_called_once_with(
prefix=API_PREFIX, offset=0, limit=10
)
def test_list_limit_offset(self):
self.collection_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[]
)
response = self.client.get(
f"/{API_PREFIX}/v3/collections/", data={"limit": 10, "offset": 20}
)
assert response.status_code == 200
self.collection_api.list.assert_called_once_with(
prefix=API_PREFIX, limit=10, offset=20
)
def test_retrieve(self):
self.collection_api.get.return_value = {}
response = self.client.get(f"/{API_PREFIX}/v3/collections/ansible/nginx/")
assert response.status_code == 200
self.collection_api.get.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx"
)
class TestCollectionVersionViewSet(BaseTestCase):
def setUp(self):
super().setUp()
patcher = mock.patch("galaxy_pulp.GalaxyCollectionVersionsApi", spec=True)
self.versions_api = patcher.start().return_value
self.addCleanup(patcher.stop)
def test_list_empty(self):
self.versions_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[]
)
response = self.client.get(
f"/{API_PREFIX}/v3/collections/ansible/nginx/versions/"
)
assert response.status_code == 404
self.versions_api.list.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx", limit=10, offset=0
)
def test_list(self):
self.versions_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[{}]
)
response = self.client.get(
f"/{API_PREFIX}/v3/collections/ansible/nginx/versions/"
)
assert response.status_code == 200
self.versions_api.list.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx", limit=10, offset=0
)
def test_list_limit_offset(self):
self.versions_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[{}]
)
response = self.client.get(
f"/{API_PREFIX}/v3/collections/ansible/nginx/versions/",
data={"limit": 10, "offset": 20},
)
assert response.status_code == 200
self.versions_api.list.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx", limit=10, offset=20
)
def test_list_limit_offset_empty(self):
self.versions_api.list.return_value = galaxy_pulp.ResultsPage(
count=1, results=[]
)
response = self.client.get(
f"/{API_PREFIX}/v3/collections/ansible/nginx/versions/",
data={"limit": 10, "offset": 20},
)
assert response.status_code == 404
self.versions_api.list.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx", limit=10, offset=20
)
def test_retrieve(self):
self.versions_api.get.return_value = {
'namespace': 'ansible',
'name': 'nginx',
'version': '1.2.3',
'download_url': '/v3/artifacts/collections/ansible-nginx-1.2.3.tar.gz',
}
response = self.client.get(
f"/{API_PREFIX}/v3/collections/ansible/nginx/versions/1.2.3/"
)
assert response.status_code == 200
assert response.data['download_url'] == \
'http://testserver/v3/artifacts/collections/ansible-nginx-1.2.3.tar.gz'
self.versions_api.get.assert_called_once_with(
prefix=API_PREFIX, namespace="ansible", name="nginx", version="1.2.3"
)
class TestCollectionImportViewSet(BaseTestCase):
def setUp(self):
super().setUp()
patcher = mock.patch("galaxy_pulp.GalaxyImportsApi", spec=True)
self.imports_api = patcher.start().return_value
self.addCleanup(patcher.stop)
def test_retrieve(self):
self.imports_api.get.return_value = {}
response = self.client.get(
f"/{API_PREFIX}/v3/imports/collections/3e26b82c-702f-4bdd-a568-7d9db17759c1/"
)
assert response.status_code == 200
self.imports_api.get.assert_called_once_with(
prefix=API_PREFIX, id="3e26b82c-702f-4bdd-a568-7d9db17759c1"
)
| 34.040268
| 89
| 0.628549
| 587
| 5,072
| 5.231687
| 0.13799
| 0.055682
| 0.053728
| 0.061543
| 0.859329
| 0.819603
| 0.809508
| 0.798763
| 0.770759
| 0.739824
| 0
| 0.034709
| 0.250197
| 5,072
| 148
| 90
| 34.27027
| 0.772811
| 0
| 0
| 0.504202
| 0
| 0.008403
| 0.171136
| 0.123817
| 0
| 0
| 0
| 0
| 0.159664
| 1
| 0.10084
| false
| 0
| 0.07563
| 0
| 0.201681
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
75590dcf97d42969fc4527523b02f32f65c1bbce
| 43
|
py
|
Python
|
deps/shock_upload_download.py
|
pranjan77/VariationSearchUtil
|
251b23ec356ea29816727ef9567f3444f41e9ffe
|
[
"MIT"
] | null | null | null |
deps/shock_upload_download.py
|
pranjan77/VariationSearchUtil
|
251b23ec356ea29816727ef9567f3444f41e9ffe
|
[
"MIT"
] | null | null | null |
deps/shock_upload_download.py
|
pranjan77/VariationSearchUtil
|
251b23ec356ea29816727ef9567f3444f41e9ffe
|
[
"MIT"
] | null | null | null |
token= "GS7YS53BGEDYD2VLAYORFN73XJZM6HJL"
| 14.333333
| 41
| 0.860465
| 2
| 43
| 18.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 0.069767
| 43
| 2
| 42
| 21.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f3b9c03e6e0ca0e464bf747e2879a19b85b817cf
| 13,251
|
py
|
Python
|
test/test_model_gradient.py
|
tianjuchen/pyoptmat
|
6f34205f450fd884679f37522ccd0d0b65ecdb71
|
[
"MIT"
] | null | null | null |
test/test_model_gradient.py
|
tianjuchen/pyoptmat
|
6f34205f450fd884679f37522ccd0d0b65ecdb71
|
[
"MIT"
] | null | null | null |
test/test_model_gradient.py
|
tianjuchen/pyoptmat
|
6f34205f450fd884679f37522ccd0d0b65ecdb71
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import torch
from torch.autograd import Variable
import torch.nn
from pyoptmat import ode, models, flowrules, hardening, utility, damage
from pyoptmat.temperature import ConstantParameter as CP
torch.set_default_tensor_type(torch.DoubleTensor)
torch.autograd.set_detect_anomaly(True)
def differ(mfn, p0, eps=1.0e-6):
v0 = mfn(p0).numpy()
puse = p0.numpy()
result = np.zeros(puse.shape)
for ind, val in np.ndenumerate(puse):
dp = np.abs(val) * eps
if dp < eps:
dp = eps
pcurr = np.copy(puse)
pcurr[ind] += dp
v1 = mfn(torch.tensor(pcurr)).numpy()
result[ind] = (v1 - v0) / dp
return result
def simple_diff(fn, p0):
res = []
for i in range(len(p0)):
def mfn(pi):
ps = [pp for pp in p0]
ps[i] = pi
return fn(ps)
res.append(differ(mfn, p0[i]))
return res
class CommonGradient:
def test_gradient_strain(self):
bmodel = self.model_fn([Variable(pi, requires_grad=True) for pi in self.p])
res = torch.norm(
bmodel.solve_strain(self.times, self.strains, self.temperatures)
)
res.backward()
grad = self.extract_grad(bmodel)
ngrad = simple_diff(
lambda p: torch.norm(
self.model_fn(p).solve_strain(
self.times, self.strains, self.temperatures
)
),
self.p,
)
for i, (p1, p2) in enumerate(zip(grad, ngrad)):
print(i, p1, p2)
self.assertTrue(np.allclose(p1, p2, rtol=1e-4))
def test_gradient_stress(self):
bmodel = self.model_fn([Variable(pi, requires_grad=True) for pi in self.p])
res = torch.norm(
bmodel.solve_stress(self.times, self.stresses, self.temperatures)
)
res.backward()
grad = self.extract_grad(bmodel)
ngrad = simple_diff(
lambda p: torch.norm(
self.model_fn(p).solve_stress(
self.times, self.stresses, self.temperatures
)
),
self.p,
)
# Skipping the first step helps with noise issues
for i, (p1, p2) in enumerate(zip(grad[1:], ngrad[1:])):
print(i, p1, p2)
self.assertTrue(np.allclose(p1, p2, rtol=1e-4, atol=1e-7))
class TestPerfectViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.p = [self.E, self.n, self.eta]
self.model_fn = lambda p: models.ModelIntegrator(
models.InelasticModel(
CP(p[0]), flowrules.PerfectViscoplasticity(CP(p[1]), CP(p[2]))
),
use_adjoint=False,
)
self.extract_grad = lambda m: [
m.model.E.pvalue.grad.numpy(),
m.model.flowrule.n.pvalue.grad.numpy(),
m.model.flowrule.eta.pvalue.grad.numpy(),
]
self.times = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 1, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.strains = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 0.003, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.stresses = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 100.0, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.temperatures = torch.zeros_like(self.strains)
class TestIsotropicOnly(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d]
self.model_fn = lambda p: models.ModelIntegrator(
models.InelasticModel(
CP(p[0]),
flowrules.IsoKinViscoplasticity(
CP(p[1]),
CP(p[2]),
CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]), CP(p[5])),
hardening.NoKinematicHardeningModel(),
),
),
use_adjoint=False,
)
self.extract_grad = lambda m: [
m.model.E.pvalue.grad.numpy(),
m.model.flowrule.n.pvalue.grad.numpy(),
m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(),
m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
]
self.times = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 1, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.strains = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 0.003, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.stresses = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 200.0, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.temperatures = torch.zeros_like(self.strains)
class TestHardeningViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor(1000.0)
self.g = torch.tensor(10.0)
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d, self.C, self.g]
self.model_fn = lambda p: models.ModelIntegrator(
models.InelasticModel(
CP(p[0]),
flowrules.IsoKinViscoplasticity(
CP(p[1]),
CP(p[2]),
CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]), CP(p[5])),
hardening.FAKinematicHardeningModel(CP(p[6]), CP(p[7])),
),
),
use_adjoint=False,
)
self.extract_grad = lambda m: [
m.model.E.pvalue.grad.numpy(),
m.model.flowrule.n.pvalue.grad.numpy(),
m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(),
m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(),
m.model.flowrule.kinematic.g.pvalue.grad.numpy(),
]
self.times = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 1, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.strains = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 0.003, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.stresses = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 200.0, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.temperatures = torch.zeros_like(self.strains)
class TestHardeningViscoplasticityDamage(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor(1000.0)
self.g = torch.tensor(10.0)
self.s0 = torch.tensor(10.0)
self.A = torch.tensor(2000.0)
self.xi = torch.tensor(6.5)
self.phi = torch.tensor(1.7)
self.p = [
self.E,
self.n,
self.eta,
self.s0,
self.R,
self.d,
self.C,
self.g,
self.A,
self.xi,
self.phi,
]
self.model_fn = lambda p: models.ModelIntegrator(
models.InelasticModel(
CP(p[0]),
flowrules.IsoKinViscoplasticity(
CP(p[1]),
CP(p[2]),
CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]), CP(p[5])),
hardening.FAKinematicHardeningModel(CP(p[6]), CP(p[7])),
),
dmodel=damage.HayhurstLeckie(CP(p[8]), CP(p[9]), CP(p[10])),
),
use_adjoint=False,
)
self.extract_grad = lambda m: [
m.model.E.pvalue.grad.numpy(),
m.model.flowrule.n.pvalue.grad.numpy(),
m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(),
m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(),
m.model.flowrule.kinematic.g.pvalue.grad.numpy(),
m.model.dmodel.A.pvalue.grad.numpy(),
m.model.dmodel.xi.pvalue.grad.numpy(),
m.model.dmodel.phi.pvalue.grad.numpy(),
]
self.times = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 1, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.strains = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 0.03, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.stresses = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 200, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.temperatures = torch.zeros_like(self.strains)
class TestChabocheViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 4
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor([1000.0, 750.0, 100.0])
self.g = torch.tensor([10.0, 1.2, 8.6])
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d, self.C, self.g]
self.model_fn = lambda p: models.ModelIntegrator(
models.InelasticModel(
CP(p[0]),
flowrules.IsoKinViscoplasticity(
CP(p[1]),
CP(p[2]),
CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]), CP(p[5])),
hardening.ChabocheHardeningModel(CP(p[6]), CP(p[7])),
),
),
use_adjoint=False,
)
self.extract_grad = lambda m: [
m.model.E.pvalue.grad.numpy(),
m.model.flowrule.n.pvalue.grad.numpy(),
m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(),
m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(),
m.model.flowrule.kinematic.g.pvalue.grad.numpy(),
]
self.times = torch.transpose(
torch.tensor(
np.array([np.linspace(0, 1, self.ntime) for i in range(self.nbatch)])
),
1,
0,
)
self.strains = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 0.003, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.stresses = torch.transpose(
torch.tensor(
np.array(
[np.linspace(0, 200.0, self.ntime) for i in range(self.nbatch)]
)
),
1,
0,
)
self.temperatures = torch.zeros_like(self.strains)
| 30.74478
| 88
| 0.499208
| 1,547
| 13,251
| 4.247576
| 0.106012
| 0.087049
| 0.082179
| 0.075483
| 0.842642
| 0.842642
| 0.83138
| 0.828337
| 0.799422
| 0.799422
| 0
| 0.040365
| 0.371821
| 13,251
| 430
| 89
| 30.816279
| 0.749039
| 0.003547
| 0
| 0.680108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005376
| 1
| 0.026882
| false
| 0
| 0.018817
| 0
| 0.069892
| 0.005376
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f3c2870abd334b2e579ec1e54082a5abff0a432b
| 215
|
py
|
Python
|
hds_821pr/__init__.py
|
ragingcomputer/hds_821pr
|
06ab823f0f9fd1c3347f25aed46a89ada37ef38e
|
[
"MIT"
] | 7
|
2018-12-12T13:11:54.000Z
|
2020-10-03T18:36:45.000Z
|
hds_821pr/__init__.py
|
ragingcomputer/hds_821pr
|
06ab823f0f9fd1c3347f25aed46a89ada37ef38e
|
[
"MIT"
] | null | null | null |
hds_821pr/__init__.py
|
ragingcomputer/hds_821pr
|
06ab823f0f9fd1c3347f25aed46a89ada37ef38e
|
[
"MIT"
] | 2
|
2020-01-11T22:17:51.000Z
|
2020-06-10T06:30:35.000Z
|
from .dicts import ports
from .dicts import modes
from .dicts import resolutions
from .dicts import pip_sizes
from .dicts import pip_positions
from .dicts import pip_borders
from .ascii import *
from .hex import *
| 21.5
| 32
| 0.8
| 33
| 215
| 5.121212
| 0.363636
| 0.319527
| 0.532544
| 0.319527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153488
| 215
| 9
| 33
| 23.888889
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
34364986acf37920dd288348fd8dd4e010b584a5
| 188
|
py
|
Python
|
pages/product_page.py
|
edwlad/stepik-test-final
|
c6e46e1ace867b3d9bc8f74802d7f6e4591c2a52
|
[
"MIT"
] | null | null | null |
pages/product_page.py
|
edwlad/stepik-test-final
|
c6e46e1ace867b3d9bc8f74802d7f6e4591c2a52
|
[
"MIT"
] | null | null | null |
pages/product_page.py
|
edwlad/stepik-test-final
|
c6e46e1ace867b3d9bc8f74802d7f6e4591c2a52
|
[
"MIT"
] | null | null | null |
from pages.base_page import BasePage
from pages.locators import ProductPageLocators
class ProductPage(BasePage):
pass
# alert = self.browser.switch_to.alert
# alert.accept()
| 20.888889
| 46
| 0.760638
| 23
| 188
| 6.130435
| 0.73913
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164894
| 188
| 8
| 47
| 23.5
| 0.898089
| 0.271277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
347072586bda54c4dbebb8dcab4479eb612523ff
| 30
|
py
|
Python
|
app/__init__.py
|
vincenira/tutorial_github
|
82cc010a19bac4db39dc92453f226a6db7c158c7
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
vincenira/tutorial_github
|
82cc010a19bac4db39dc92453f226a6db7c158c7
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
vincenira/tutorial_github
|
82cc010a19bac4db39dc92453f226a6db7c158c7
|
[
"MIT"
] | null | null | null |
from app import routes
| 6
| 23
| 0.633333
| 4
| 30
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.366667
| 30
| 4
| 24
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
caaaf42479be40690c9d876eef93d1e3950038e6
| 299
|
py
|
Python
|
Testing/First_6.py
|
Fergius/Python-projets
|
764f0e32be46093dc948a4d102c72caf3c64ae9f
|
[
"MIT"
] | null | null | null |
Testing/First_6.py
|
Fergius/Python-projets
|
764f0e32be46093dc948a4d102c72caf3c64ae9f
|
[
"MIT"
] | null | null | null |
Testing/First_6.py
|
Fergius/Python-projets
|
764f0e32be46093dc948a4d102c72caf3c64ae9f
|
[
"MIT"
] | null | null | null |
"""Примеры HTTP методов в Requests"""
import requests
print(requests.post('https://httpbin.org/post', data={'key': 'value'}))
print(requests.put('https://httpbin.org/post', data={'key': 'value'}))
print(requests.delete('https://httpbin.org/delete'))
print(requests.head('https://httpbin.org/get'))
| 37.375
| 71
| 0.705686
| 41
| 299
| 5.146341
| 0.439024
| 0.246446
| 0.28436
| 0.180095
| 0.417062
| 0.417062
| 0.417062
| 0.417062
| 0.417062
| 0
| 0
| 0
| 0.053512
| 299
| 7
| 72
| 42.714286
| 0.745583
| 0.103679
| 0
| 0
| 0
| 0
| 0.431298
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
1b3726845995db98eb221414b7f86ebe3c09cb54
| 5,581
|
py
|
Python
|
drf_auto/serializers/fields.py
|
elivin/django-rest-framework-auto
|
1092d92f1f3b407b95503a821bc10e100f271a06
|
[
"Apache-2.0"
] | 1
|
2018-09-20T04:32:43.000Z
|
2018-09-20T04:32:43.000Z
|
drf_auto/serializers/fields.py
|
elivin/django-rest-framework-auto
|
1092d92f1f3b407b95503a821bc10e100f271a06
|
[
"Apache-2.0"
] | null | null | null |
drf_auto/serializers/fields.py
|
elivin/django-rest-framework-auto
|
1092d92f1f3b407b95503a821bc10e100f271a06
|
[
"Apache-2.0"
] | null | null | null |
"""
Филды для сериалайзеров.
"""
import re
from rest_framework import serializers
class GraphListMultipleChoiceField(serializers.Field):
"""
Филд, для использования перечисления в текстовом поле. Взято из GraphQL.
>>> from rest_framework import serializers
>>>
>>>
>>> class ExampleSerializer(serializers.Serializer):
>>> test = GraphListMultipleChoiceField(allowed_values=['test1', 'test2'], separator=',')
>>>
>>> ser = ExampleSerializer({'test': 'test1,test2, test1'})
>>> ser.is_valid()
>>> ser.validated_data['test']
... ['test1', 'test2']
"""
default_error_messages = {
'empty_list': 'Поле пустое: `{ITEM}`.',
'not_str': 'Поле `{TYPE}` не является строкой.',
'find_error_char': 'Значение `{ITEM}` содержит недопустимые символы `{CHAR}`.',
'not_found': 'Элемент `{ITEM}` недопустим. Разрешенные: `{ALLOWED}`.'
}
re_space = re.compile('\s')
def __init__(self, allowed_values, separator=',', default_exclude_fields=None, required=False, *args, **kwargs):
"""
Создаем свой филд.
:param list allowed_values: Список разрешенных элементов.
:param str separator: Разделитель между элементами.
:param list default_exclude_fields: Филды, которые нужно исключить, в случае когда передано пустое значение.
:param bool required: Обязательное ли поле.
"""
super().__init__(*args, **kwargs)
self.allowed_values = allowed_values
self.separator = separator if separator else ','
self.default_exclude_fields = default_exclude_fields if default_exclude_fields else []
self.required = required
def to_representation(self, obj):
"""
Преобразуем массив значение в поле str.
:param list obj: Список элементов, которые схлапываем.
:return: Готовая строка.
:rtype: str
"""
if not obj:
self.fail('empty_list', ITEM=obj)
return ','.join(map(str, obj))
def to_internal_value(self, data):
"""
Преобразуем строку в список.
:param str data: Исходная строка.
:return: Список значение.
:rtype: list
"""
if not isinstance(data, str):
self.fail('not_str', TYPE=type(data))
# Возвращает все филды кроме те, что стоит исключить.
if not data:
return list(set(self.allowed_values) - set(self.default_exclude_fields))
return list(set(map(self._clean_str_item, map(str, data.split(self.separator)))))
def _clean_str_item(self, itm):
"""
Проверяет на валидность входной элемент, обрабатывает его и возвращает.
:param str itm: Элемент, которые необходимо обработать.
:return: Обработанный элемент.
:rtype: str
"""
itm = itm.strip()
# TODO: Вероятно, тут надо уметь и другие символы обрабатывать, типо `-`.
if re.search(self.re_space, itm):
self.fail('find_error_char', ITEM=itm, CHAR='Пробелы')
if itm not in self.allowed_values:
self.fail('not_found', ITEM=itm, ALLOWED=self.allowed_values)
return itm
class TextToArrayField(serializers.Field):
"""
Филд, для использования перечисления в текстовом поле. Взято из GraphQL.
>>> from rest_framework import serializers
>>>
>>>
>>> class ExampleSerializer(serializers.Serializer):
>>> test = TextToArrayField(separator=',')
>>>
>>> ser = ExampleSerializer({'test': 'test1,test2, test1'})
>>> ser.is_valid()
>>> ser.validated_data['test']
... ['test1', 'test2']
"""
default_error_messages = {
'empty_list': 'Поле пустое: `{ITEM}`.',
'not_str': 'Поле `{TYPE}` не является строкой.',
'find_error_char': 'Значение `{ITEM}` содержит недопустимые символы `{CHAR}`.',
}
re_space = re.compile('\s')
def __init__(self, separator=',', required=False, *args, **kwargs):
"""
Создаем свой филд.
:param str separator: Разделитель между элементами.
:param bool required: Обязательное ли поле.
"""
super().__init__(*args, **kwargs)
self.separator = separator if separator else ','
self.required = required
def to_representation(self, obj):
"""
Преобразуем массив значение в поле str.
:param list obj: Список элементов, которые схлапываем.
:return: Готовая строка.
:rtype: str
"""
if not obj:
self.fail('empty_list', ITEM=obj)
return ','.join(map(str, obj))
def to_internal_value(self, data):
"""
Преобразуем строку в список.
:param str data: Исходная строка.
:return: Список значение.
:rtype: list
"""
if not isinstance(data, str):
self.fail('not_str', TYPE=type(data))
return list(set(map(self._clean_str_item, map(str, data.split(self.separator)))))
def _clean_str_item(self, itm):
"""
Проверяет на валидность входной элемент, обрабатывает его и возвращает.
:param str itm: Элемент, которые необходимо обработать.
:return: Обработанный элемент.
:rtype: str
"""
itm = itm.strip()
# TODO: Вероятно, тут надо уметь и другие символы обрабатывать, типо `-`.
if re.search(self.re_space, itm):
self.fail('find_error_char', ITEM=itm, CHAR='Пробелы')
if not itm:
self.fail('empty_list', ITEM=itm)
return itm
| 29.84492
| 116
| 0.609031
| 608
| 5,581
| 5.445724
| 0.233553
| 0.03141
| 0.036243
| 0.02084
| 0.797946
| 0.791604
| 0.779825
| 0.732709
| 0.689822
| 0.689822
| 0
| 0.00293
| 0.266261
| 5,581
| 186
| 117
| 30.005376
| 0.805617
| 0.396524
| 0
| 0.711864
| 0
| 0
| 0.160783
| 0
| 0
| 0
| 0
| 0.010753
| 0
| 1
| 0.135593
| false
| 0
| 0.033898
| 0
| 0.389831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1b5e4cc2337b52ee860bc604141ec678c12e6dee
| 1,399
|
py
|
Python
|
api/models.py
|
hardik-dadhich/MediOne
|
36874799a52853fd8f0bb5250417cfb6d1454e5d
|
[
"MIT"
] | null | null | null |
api/models.py
|
hardik-dadhich/MediOne
|
36874799a52853fd8f0bb5250417cfb6d1454e5d
|
[
"MIT"
] | 5
|
2020-08-02T17:00:44.000Z
|
2021-09-22T19:38:35.000Z
|
api/models.py
|
hardik-dadhich/MediOne
|
36874799a52853fd8f0bb5250417cfb6d1454e5d
|
[
"MIT"
] | 2
|
2020-07-31T04:00:51.000Z
|
2020-08-07T18:19:55.000Z
|
from django.db import models
from django.contrib.auth.models import User
from phonenumber_field.modelfields import PhoneNumberField
# Create your models here.
class Hospital(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=25)
phone_number = PhoneNumberField()
email = models.EmailField(max_length=100)
famous_for = models.CharField(max_length=50)
geolocation = models.CharField(max_length=80)
def __str__(self):
return self.name
class Doctor(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=25)
phone_number = PhoneNumberField()
email = models.EmailField(max_length=100)
specialist = models.CharField(max_length=50)
geolocation = models.CharField(max_length=80)
hospital = models.ForeignKey(Hospital, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Patient(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=25)
phone_number = PhoneNumberField()
alternate_phone_number = PhoneNumberField()
email = models.EmailField(max_length=100)
disease = models.CharField(max_length=50)
geolocation = models.CharField(max_length=80)
doctor = models.ForeignKey(Doctor, on_delete=models.CASCADE)
def __str__(self):
return self.name
| 29.765957
| 68
| 0.734096
| 172
| 1,399
| 5.761628
| 0.296512
| 0.108981
| 0.163471
| 0.217962
| 0.742684
| 0.742684
| 0.742684
| 0.708375
| 0.708375
| 0.64783
| 0
| 0.023296
| 0.171551
| 1,399
| 46
| 69
| 30.413043
| 0.831752
| 0.017155
| 0
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.090909
| 0.090909
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
94106fe7ebfb446466c2ab9bebf008e88e77d6e9
| 60,233
|
py
|
Python
|
shell/gen-py/CatalogService/ttypes.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
shell/gen-py/CatalogService/ttypes.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
shell/gen-py/CatalogService/ttypes.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import CatalogObjects.ttypes
import JniCatalog.ttypes
import Types.ttypes
import Status.ttypes
import Results.ttypes
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class CatalogServiceVersion:
V1 = 0
_VALUES_TO_NAMES = {
0: "V1",
}
_NAMES_TO_VALUES = {
"V1": 0,
}
class TCatalogServiceRequestHeader:
"""
Attributes:
- requesting_user
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'requesting_user', None, None, ), # 1
)
def __init__(self, requesting_user=None,):
self.requesting_user = requesting_user
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.requesting_user = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCatalogServiceRequestHeader')
if self.requesting_user is not None:
oprot.writeFieldBegin('requesting_user', TType.STRING, 1)
oprot.writeString(self.requesting_user)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCatalogUpdateResult:
"""
Attributes:
- catalog_service_id
- version
- status
- updated_catalog_objects
- removed_catalog_objects
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'catalog_service_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 1
(2, TType.I64, 'version', None, None, ), # 2
(3, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 3
(4, TType.LIST, 'updated_catalog_objects', (TType.STRUCT,(CatalogObjects.ttypes.TCatalogObject, CatalogObjects.ttypes.TCatalogObject.thrift_spec)), None, ), # 4
(5, TType.LIST, 'removed_catalog_objects', (TType.STRUCT,(CatalogObjects.ttypes.TCatalogObject, CatalogObjects.ttypes.TCatalogObject.thrift_spec)), None, ), # 5
)
def __init__(self, catalog_service_id=None, version=None, status=None, updated_catalog_objects=None, removed_catalog_objects=None,):
self.catalog_service_id = catalog_service_id
self.version = version
self.status = status
self.updated_catalog_objects = updated_catalog_objects
self.removed_catalog_objects = removed_catalog_objects
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.catalog_service_id = Types.ttypes.TUniqueId()
self.catalog_service_id.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.version = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.status = Status.ttypes.TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.updated_catalog_objects = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = CatalogObjects.ttypes.TCatalogObject()
_elem5.read(iprot)
self.updated_catalog_objects.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.removed_catalog_objects = []
(_etype9, _size6) = iprot.readListBegin()
for _i10 in xrange(_size6):
_elem11 = CatalogObjects.ttypes.TCatalogObject()
_elem11.read(iprot)
self.removed_catalog_objects.append(_elem11)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCatalogUpdateResult')
if self.catalog_service_id is not None:
oprot.writeFieldBegin('catalog_service_id', TType.STRUCT, 1)
self.catalog_service_id.write(oprot)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.I64, 2)
oprot.writeI64(self.version)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 3)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.updated_catalog_objects is not None:
oprot.writeFieldBegin('updated_catalog_objects', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.updated_catalog_objects))
for iter12 in self.updated_catalog_objects:
iter12.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.removed_catalog_objects is not None:
oprot.writeFieldBegin('removed_catalog_objects', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.removed_catalog_objects))
for iter13 in self.removed_catalog_objects:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.catalog_service_id is None:
raise TProtocol.TProtocolException(message='Required field catalog_service_id is unset!')
if self.version is None:
raise TProtocol.TProtocolException(message='Required field version is unset!')
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TDdlExecRequest:
"""
Attributes:
- protocol_version
- header
- ddl_type
- alter_table_params
- alter_view_params
- create_db_params
- create_table_params
- create_table_like_params
- create_view_params
- create_fn_params
- drop_db_params
- drop_table_or_view_params
- truncate_params
- drop_fn_params
- compute_stats_params
- create_data_source_params
- drop_data_source_params
- drop_stats_params
- create_drop_role_params
- grant_revoke_role_params
- grant_revoke_priv_params
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.I32, 'ddl_type', None, None, ), # 2
(3, TType.STRUCT, 'alter_table_params', (JniCatalog.ttypes.TAlterTableParams, JniCatalog.ttypes.TAlterTableParams.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'alter_view_params', (JniCatalog.ttypes.TCreateOrAlterViewParams, JniCatalog.ttypes.TCreateOrAlterViewParams.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'create_db_params', (JniCatalog.ttypes.TCreateDbParams, JniCatalog.ttypes.TCreateDbParams.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'create_table_params', (JniCatalog.ttypes.TCreateTableParams, JniCatalog.ttypes.TCreateTableParams.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'create_table_like_params', (JniCatalog.ttypes.TCreateTableLikeParams, JniCatalog.ttypes.TCreateTableLikeParams.thrift_spec), None, ), # 7
(8, TType.STRUCT, 'create_view_params', (JniCatalog.ttypes.TCreateOrAlterViewParams, JniCatalog.ttypes.TCreateOrAlterViewParams.thrift_spec), None, ), # 8
(9, TType.STRUCT, 'create_fn_params', (JniCatalog.ttypes.TCreateFunctionParams, JniCatalog.ttypes.TCreateFunctionParams.thrift_spec), None, ), # 9
(10, TType.STRUCT, 'drop_db_params', (JniCatalog.ttypes.TDropDbParams, JniCatalog.ttypes.TDropDbParams.thrift_spec), None, ), # 10
(11, TType.STRUCT, 'drop_table_or_view_params', (JniCatalog.ttypes.TDropTableOrViewParams, JniCatalog.ttypes.TDropTableOrViewParams.thrift_spec), None, ), # 11
(12, TType.STRUCT, 'drop_fn_params', (JniCatalog.ttypes.TDropFunctionParams, JniCatalog.ttypes.TDropFunctionParams.thrift_spec), None, ), # 12
(13, TType.STRUCT, 'compute_stats_params', (JniCatalog.ttypes.TComputeStatsParams, JniCatalog.ttypes.TComputeStatsParams.thrift_spec), None, ), # 13
(14, TType.STRUCT, 'create_data_source_params', (JniCatalog.ttypes.TCreateDataSourceParams, JniCatalog.ttypes.TCreateDataSourceParams.thrift_spec), None, ), # 14
(15, TType.STRUCT, 'drop_data_source_params', (JniCatalog.ttypes.TDropDataSourceParams, JniCatalog.ttypes.TDropDataSourceParams.thrift_spec), None, ), # 15
(16, TType.STRUCT, 'drop_stats_params', (JniCatalog.ttypes.TDropStatsParams, JniCatalog.ttypes.TDropStatsParams.thrift_spec), None, ), # 16
(17, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 17
(18, TType.STRUCT, 'create_drop_role_params', (JniCatalog.ttypes.TCreateDropRoleParams, JniCatalog.ttypes.TCreateDropRoleParams.thrift_spec), None, ), # 18
(19, TType.STRUCT, 'grant_revoke_role_params', (JniCatalog.ttypes.TGrantRevokeRoleParams, JniCatalog.ttypes.TGrantRevokeRoleParams.thrift_spec), None, ), # 19
(20, TType.STRUCT, 'grant_revoke_priv_params', (JniCatalog.ttypes.TGrantRevokePrivParams, JniCatalog.ttypes.TGrantRevokePrivParams.thrift_spec), None, ), # 20
(21, TType.STRUCT, 'truncate_params', (JniCatalog.ttypes.TTruncateParams, JniCatalog.ttypes.TTruncateParams.thrift_spec), None, ), # 21
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, ddl_type=None, alter_table_params=None, alter_view_params=None, create_db_params=None, create_table_params=None, create_table_like_params=None, create_view_params=None, create_fn_params=None, drop_db_params=None, drop_table_or_view_params=None, truncate_params=None, drop_fn_params=None, compute_stats_params=None, create_data_source_params=None, drop_data_source_params=None, drop_stats_params=None, create_drop_role_params=None, grant_revoke_role_params=None, grant_revoke_priv_params=None,):
self.protocol_version = protocol_version
self.header = header
self.ddl_type = ddl_type
self.alter_table_params = alter_table_params
self.alter_view_params = alter_view_params
self.create_db_params = create_db_params
self.create_table_params = create_table_params
self.create_table_like_params = create_table_like_params
self.create_view_params = create_view_params
self.create_fn_params = create_fn_params
self.drop_db_params = drop_db_params
self.drop_table_or_view_params = drop_table_or_view_params
self.truncate_params = truncate_params
self.drop_fn_params = drop_fn_params
self.compute_stats_params = compute_stats_params
self.create_data_source_params = create_data_source_params
self.drop_data_source_params = drop_data_source_params
self.drop_stats_params = drop_stats_params
self.create_drop_role_params = create_drop_role_params
self.grant_revoke_role_params = grant_revoke_role_params
self.grant_revoke_priv_params = grant_revoke_priv_params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.ddl_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.alter_table_params = JniCatalog.ttypes.TAlterTableParams()
self.alter_table_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.alter_view_params = JniCatalog.ttypes.TCreateOrAlterViewParams()
self.alter_view_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.create_db_params = JniCatalog.ttypes.TCreateDbParams()
self.create_db_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.create_table_params = JniCatalog.ttypes.TCreateTableParams()
self.create_table_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.create_table_like_params = JniCatalog.ttypes.TCreateTableLikeParams()
self.create_table_like_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.create_view_params = JniCatalog.ttypes.TCreateOrAlterViewParams()
self.create_view_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.create_fn_params = JniCatalog.ttypes.TCreateFunctionParams()
self.create_fn_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRUCT:
self.drop_db_params = JniCatalog.ttypes.TDropDbParams()
self.drop_db_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRUCT:
self.drop_table_or_view_params = JniCatalog.ttypes.TDropTableOrViewParams()
self.drop_table_or_view_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.STRUCT:
self.truncate_params = JniCatalog.ttypes.TTruncateParams()
self.truncate_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRUCT:
self.drop_fn_params = JniCatalog.ttypes.TDropFunctionParams()
self.drop_fn_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.compute_stats_params = JniCatalog.ttypes.TComputeStatsParams()
self.compute_stats_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRUCT:
self.create_data_source_params = JniCatalog.ttypes.TCreateDataSourceParams()
self.create_data_source_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRUCT:
self.drop_data_source_params = JniCatalog.ttypes.TDropDataSourceParams()
self.drop_data_source_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.STRUCT:
self.drop_stats_params = JniCatalog.ttypes.TDropStatsParams()
self.drop_stats_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.STRUCT:
self.create_drop_role_params = JniCatalog.ttypes.TCreateDropRoleParams()
self.create_drop_role_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.STRUCT:
self.grant_revoke_role_params = JniCatalog.ttypes.TGrantRevokeRoleParams()
self.grant_revoke_role_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.STRUCT:
self.grant_revoke_priv_params = JniCatalog.ttypes.TGrantRevokePrivParams()
self.grant_revoke_priv_params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TDdlExecRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.ddl_type is not None:
oprot.writeFieldBegin('ddl_type', TType.I32, 2)
oprot.writeI32(self.ddl_type)
oprot.writeFieldEnd()
if self.alter_table_params is not None:
oprot.writeFieldBegin('alter_table_params', TType.STRUCT, 3)
self.alter_table_params.write(oprot)
oprot.writeFieldEnd()
if self.alter_view_params is not None:
oprot.writeFieldBegin('alter_view_params', TType.STRUCT, 4)
self.alter_view_params.write(oprot)
oprot.writeFieldEnd()
if self.create_db_params is not None:
oprot.writeFieldBegin('create_db_params', TType.STRUCT, 5)
self.create_db_params.write(oprot)
oprot.writeFieldEnd()
if self.create_table_params is not None:
oprot.writeFieldBegin('create_table_params', TType.STRUCT, 6)
self.create_table_params.write(oprot)
oprot.writeFieldEnd()
if self.create_table_like_params is not None:
oprot.writeFieldBegin('create_table_like_params', TType.STRUCT, 7)
self.create_table_like_params.write(oprot)
oprot.writeFieldEnd()
if self.create_view_params is not None:
oprot.writeFieldBegin('create_view_params', TType.STRUCT, 8)
self.create_view_params.write(oprot)
oprot.writeFieldEnd()
if self.create_fn_params is not None:
oprot.writeFieldBegin('create_fn_params', TType.STRUCT, 9)
self.create_fn_params.write(oprot)
oprot.writeFieldEnd()
if self.drop_db_params is not None:
oprot.writeFieldBegin('drop_db_params', TType.STRUCT, 10)
self.drop_db_params.write(oprot)
oprot.writeFieldEnd()
if self.drop_table_or_view_params is not None:
oprot.writeFieldBegin('drop_table_or_view_params', TType.STRUCT, 11)
self.drop_table_or_view_params.write(oprot)
oprot.writeFieldEnd()
if self.drop_fn_params is not None:
oprot.writeFieldBegin('drop_fn_params', TType.STRUCT, 12)
self.drop_fn_params.write(oprot)
oprot.writeFieldEnd()
if self.compute_stats_params is not None:
oprot.writeFieldBegin('compute_stats_params', TType.STRUCT, 13)
self.compute_stats_params.write(oprot)
oprot.writeFieldEnd()
if self.create_data_source_params is not None:
oprot.writeFieldBegin('create_data_source_params', TType.STRUCT, 14)
self.create_data_source_params.write(oprot)
oprot.writeFieldEnd()
if self.drop_data_source_params is not None:
oprot.writeFieldBegin('drop_data_source_params', TType.STRUCT, 15)
self.drop_data_source_params.write(oprot)
oprot.writeFieldEnd()
if self.drop_stats_params is not None:
oprot.writeFieldBegin('drop_stats_params', TType.STRUCT, 16)
self.drop_stats_params.write(oprot)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 17)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.create_drop_role_params is not None:
oprot.writeFieldBegin('create_drop_role_params', TType.STRUCT, 18)
self.create_drop_role_params.write(oprot)
oprot.writeFieldEnd()
if self.grant_revoke_role_params is not None:
oprot.writeFieldBegin('grant_revoke_role_params', TType.STRUCT, 19)
self.grant_revoke_role_params.write(oprot)
oprot.writeFieldEnd()
if self.grant_revoke_priv_params is not None:
oprot.writeFieldBegin('grant_revoke_priv_params', TType.STRUCT, 20)
self.grant_revoke_priv_params.write(oprot)
oprot.writeFieldEnd()
if self.truncate_params is not None:
oprot.writeFieldBegin('truncate_params', TType.STRUCT, 21)
self.truncate_params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
if self.ddl_type is None:
raise TProtocol.TProtocolException(message='Required field ddl_type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TDdlExecResponse:
"""
Attributes:
- result
- new_table_created
- result_set
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'result', (TCatalogUpdateResult, TCatalogUpdateResult.thrift_spec), None, ), # 1
(2, TType.BOOL, 'new_table_created', None, None, ), # 2
(3, TType.STRUCT, 'result_set', (Results.ttypes.TResultSet, Results.ttypes.TResultSet.thrift_spec), None, ), # 3
)
def __init__(self, result=None, new_table_created=None, result_set=None,):
self.result = result
self.new_table_created = new_table_created
self.result_set = result_set
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.result = TCatalogUpdateResult()
self.result.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.new_table_created = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.result_set = Results.ttypes.TResultSet()
self.result_set.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TDdlExecResponse')
if self.result is not None:
oprot.writeFieldBegin('result', TType.STRUCT, 1)
self.result.write(oprot)
oprot.writeFieldEnd()
if self.new_table_created is not None:
oprot.writeFieldBegin('new_table_created', TType.BOOL, 2)
oprot.writeBool(self.new_table_created)
oprot.writeFieldEnd()
if self.result_set is not None:
oprot.writeFieldBegin('result_set', TType.STRUCT, 3)
self.result_set.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.result is None:
raise TProtocol.TProtocolException(message='Required field result is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUpdateCatalogRequest:
"""
Attributes:
- protocol_version
- header
- target_table
- db_name
- created_partitions
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 2
(3, TType.STRING, 'target_table', None, None, ), # 3
(4, TType.STRING, 'db_name', None, None, ), # 4
(5, TType.SET, 'created_partitions', (TType.STRING,None), None, ), # 5
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, target_table=None, db_name=None, created_partitions=None,):
self.protocol_version = protocol_version
self.header = header
self.target_table = target_table
self.db_name = db_name
self.created_partitions = created_partitions
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.target_table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.SET:
self.created_partitions = set()
(_etype17, _size14) = iprot.readSetBegin()
for _i18 in xrange(_size14):
_elem19 = iprot.readString();
self.created_partitions.add(_elem19)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUpdateCatalogRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 2)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.target_table is not None:
oprot.writeFieldBegin('target_table', TType.STRING, 3)
oprot.writeString(self.target_table)
oprot.writeFieldEnd()
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 4)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.created_partitions is not None:
oprot.writeFieldBegin('created_partitions', TType.SET, 5)
oprot.writeSetBegin(TType.STRING, len(self.created_partitions))
for iter20 in self.created_partitions:
oprot.writeString(iter20)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
if self.target_table is None:
raise TProtocol.TProtocolException(message='Required field target_table is unset!')
if self.db_name is None:
raise TProtocol.TProtocolException(message='Required field db_name is unset!')
if self.created_partitions is None:
raise TProtocol.TProtocolException(message='Required field created_partitions is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUpdateCatalogResponse:
"""
Attributes:
- result
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'result', (TCatalogUpdateResult, TCatalogUpdateResult.thrift_spec), None, ), # 1
)
def __init__(self, result=None,):
self.result = result
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.result = TCatalogUpdateResult()
self.result.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUpdateCatalogResponse')
if self.result is not None:
oprot.writeFieldBegin('result', TType.STRUCT, 1)
self.result.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.result is None:
raise TProtocol.TProtocolException(message='Required field result is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TResetMetadataRequest:
"""
Attributes:
- protocol_version
- header
- is_refresh
- table_name
- partition_spec
- db_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.BOOL, 'is_refresh', None, None, ), # 2
(3, TType.STRUCT, 'table_name', (CatalogObjects.ttypes.TTableName, CatalogObjects.ttypes.TTableName.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 4
(5, TType.LIST, 'partition_spec', (TType.STRUCT,(CatalogObjects.ttypes.TPartitionKeyValue, CatalogObjects.ttypes.TPartitionKeyValue.thrift_spec)), None, ), # 5
(6, TType.STRING, 'db_name', None, None, ), # 6
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, is_refresh=None, table_name=None, partition_spec=None, db_name=None,):
self.protocol_version = protocol_version
self.header = header
self.is_refresh = is_refresh
self.table_name = table_name
self.partition_spec = partition_spec
self.db_name = db_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.is_refresh = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.table_name = CatalogObjects.ttypes.TTableName()
self.table_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.partition_spec = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in xrange(_size21):
_elem26 = CatalogObjects.ttypes.TPartitionKeyValue()
_elem26.read(iprot)
self.partition_spec.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TResetMetadataRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.is_refresh is not None:
oprot.writeFieldBegin('is_refresh', TType.BOOL, 2)
oprot.writeBool(self.is_refresh)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRUCT, 3)
self.table_name.write(oprot)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 4)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.partition_spec is not None:
oprot.writeFieldBegin('partition_spec', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.partition_spec))
for iter27 in self.partition_spec:
iter27.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 6)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
if self.is_refresh is None:
raise TProtocol.TProtocolException(message='Required field is_refresh is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TResetMetadataResponse:
"""
Attributes:
- result
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'result', (TCatalogUpdateResult, TCatalogUpdateResult.thrift_spec), None, ), # 1
)
def __init__(self, result=None,):
self.result = result
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.result = TCatalogUpdateResult()
self.result.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TResetMetadataResponse')
if self.result is not None:
oprot.writeFieldBegin('result', TType.STRUCT, 1)
self.result.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.result is None:
raise TProtocol.TProtocolException(message='Required field result is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsRequest:
"""
Attributes:
- protocol_version
- header
- db_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.STRING, 'db_name', None, None, ), # 2
(3, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 3
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, db_name=None,):
self.protocol_version = protocol_version
self.header = header
self.db_name = db_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 2)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 3)
self.header.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsResponse:
"""
Attributes:
- status
- functions
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1
(2, TType.LIST, 'functions', (TType.STRUCT,(Types.ttypes.TFunction, Types.ttypes.TFunction.thrift_spec)), None, ), # 2
)
def __init__(self, status=None, functions=None,):
self.status = status
self.functions = functions
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status.ttypes.TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.functions = []
(_etype31, _size28) = iprot.readListBegin()
for _i32 in xrange(_size28):
_elem33 = Types.ttypes.TFunction()
_elem33.read(iprot)
self.functions.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.functions is not None:
oprot.writeFieldBegin('functions', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.functions))
for iter34 in self.functions:
iter34.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogObjectRequest:
"""
Attributes:
- protocol_version
- header
- object_desc
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.STRUCT, 'object_desc', (CatalogObjects.ttypes.TCatalogObject, CatalogObjects.ttypes.TCatalogObject.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 3
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, object_desc=None,):
self.protocol_version = protocol_version
self.header = header
self.object_desc = object_desc
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.object_desc = CatalogObjects.ttypes.TCatalogObject()
self.object_desc.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogObjectRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.object_desc is not None:
oprot.writeFieldBegin('object_desc', TType.STRUCT, 2)
self.object_desc.write(oprot)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 3)
self.header.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
if self.object_desc is None:
raise TProtocol.TProtocolException(message='Required field object_desc is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogObjectResponse:
"""
Attributes:
- catalog_object
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'catalog_object', (CatalogObjects.ttypes.TCatalogObject, CatalogObjects.ttypes.TCatalogObject.thrift_spec), None, ), # 1
)
def __init__(self, catalog_object=None,):
self.catalog_object = catalog_object
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.catalog_object = CatalogObjects.ttypes.TCatalogObject()
self.catalog_object.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogObjectResponse')
if self.catalog_object is not None:
oprot.writeFieldBegin('catalog_object', TType.STRUCT, 1)
self.catalog_object.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.catalog_object is None:
raise TProtocol.TProtocolException(message='Required field catalog_object is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TPrioritizeLoadRequest:
"""
Attributes:
- protocol_version
- header
- object_descs
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 2
(3, TType.LIST, 'object_descs', (TType.STRUCT,(CatalogObjects.ttypes.TCatalogObject, CatalogObjects.ttypes.TCatalogObject.thrift_spec)), None, ), # 3
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None, object_descs=None,):
self.protocol_version = protocol_version
self.header = header
self.object_descs = object_descs
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.object_descs = []
(_etype38, _size35) = iprot.readListBegin()
for _i39 in xrange(_size35):
_elem40 = CatalogObjects.ttypes.TCatalogObject()
_elem40.read(iprot)
self.object_descs.append(_elem40)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TPrioritizeLoadRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 2)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.object_descs is not None:
oprot.writeFieldBegin('object_descs', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.object_descs))
for iter41 in self.object_descs:
iter41.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
if self.object_descs is None:
raise TProtocol.TProtocolException(message='Required field object_descs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TPrioritizeLoadResponse:
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status.ttypes.TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TPrioritizeLoadResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TSentryAdminCheckRequest:
"""
Attributes:
- protocol_version
- header
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'protocol_version', None, 0, ), # 1
(2, TType.STRUCT, 'header', (TCatalogServiceRequestHeader, TCatalogServiceRequestHeader.thrift_spec), None, ), # 2
)
def __init__(self, protocol_version=thrift_spec[1][4], header=None,):
self.protocol_version = protocol_version
self.header = header
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.protocol_version = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.header = TCatalogServiceRequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TSentryAdminCheckRequest')
if self.protocol_version is not None:
oprot.writeFieldBegin('protocol_version', TType.I32, 1)
oprot.writeI32(self.protocol_version)
oprot.writeFieldEnd()
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 2)
self.header.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.protocol_version is None:
raise TProtocol.TProtocolException(message='Required field protocol_version is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TSentryAdminCheckResponse:
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status.ttypes.TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TSentryAdminCheckResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 36.59356
| 564
| 0.679511
| 7,071
| 60,233
| 5.5268
| 0.037477
| 0.033009
| 0.028327
| 0.034545
| 0.816633
| 0.769498
| 0.715353
| 0.673286
| 0.634161
| 0.608956
| 0
| 0.010866
| 0.211612
| 60,233
| 1,645
| 565
| 36.615805
| 0.812096
| 0.025169
| 0
| 0.702861
| 1
| 0
| 0.050908
| 0.012101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082172
| false
| 0
| 0.006603
| 0.025679
| 0.18562
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
948e0848ff774131bd21e02da4a8bd0894a5cade
| 10,080
|
py
|
Python
|
lib/enthought/traits/tests/delegate_test_case.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2016-05-08T18:33:12.000Z
|
2016-05-08T18:33:12.000Z
|
lib/enthought/traits/tests/delegate_test_case.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
lib/enthought/traits/tests/delegate_test_case.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from enthought.traits.api import Delegate, HasTraits, Instance, Str, Any
# global because event handlers are being called with wrong value for self
baz_s_handler_self = None
baz_sd_handler_self = None
foo_s_handler_self = None
foo_t_handler_self = None
class Foo( HasTraits ):
s = Str( 'foo' )
t = Str( 'foo.t')
def _s_changed(self, name, old, new):
print 'Foo._s_changed( %s, %s, %s, %s)' % (self, name, old, new)
global foo_s_handler_self
foo_s_handler_self = self
return
def _t_changed(self, name, old, new):
print 'Foo._t_changed( %s, %s, %s, %s)' % (self, name, old, new)
global foo_t_handler_self
foo_t_handler_self = self
return
class Bar( HasTraits ):
foo = Instance( Foo, () )
s = Delegate( 'foo' )
class BazModify( HasTraits ):
foo = Instance( Foo, () )
sd = Delegate( 'foo', prefix='s', modify=True )
t = Delegate( 'foo', modify=True )
def _s_changed(self, name, old, new):
# should never be called
print 'BazModify._s_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_s_handler_self
baz_s_handler_self = self
return
def _sd_changed(self, name, old, new):
print 'BazModify._sd_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_sd_handler_self
baz_sd_handler_self = self
return
def _t_changed(self, name, old, new):
print 'BazModify._t_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_t_handler_self
baz_t_handler_self = self
return
class BazNoModify( HasTraits ):
foo = Instance( Foo, () )
sd = Delegate( 'foo', prefix='s' )
t = Delegate( 'foo' )
def _s_changed(self, name, old, new):
print 'BazNoModify._s_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_s_handler_self
baz_s_handler_self = self
return
def _sd_changed(self, name, old, new):
print 'BazNoModify._sd_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_sd_handler_self
baz_sd_handler_self = self
return
def _t_changed(self, name, old, new):
print 'BazNoModify._t_changed( %s, %s, %s, %s)' % (self, name, old, new)
global baz_t_handler_self
baz_t_handler_self = self
return
class DelegateTestCase( unittest.TestCase ):
""" Test cases for delegated traits. """
def test_reset(self):
""" Test that a delegated trait may be reset.
Deleting the attribute should reset the trait back to its initial
delegation behavior.
"""
f = Foo()
b = Bar(foo=f)
# Check initial delegation.
self.assertEqual( f.s, b.s )
# Check that an override works.
b.s = 'bar'
self.assertNotEqual( f.s, b.s )
# Check that we can reset back to delegation. This is what we are
# really testing for.
del b.s
self.assertEqual( f.s, b.s )
return
# Below are 8 tests to check the calling of change notification handlers.
# There are 8 cases for the 2x2x2 matrix with axes:
# Delegate with prefix or not
# Delegate with modify write through or not
# Handler in the delegator and delegatee
#
def test_modify_prefix_handler_on_delegator(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.s, b.sd )
global baz_s_handler_self
global baz_sd_handler_self
baz_sd_handler_self = None
baz_s_handler_self = None
b.sd = 'changed'
self.assertEqual( f.s, b.sd )
# Don't expect _s_changed to be called because from Baz's perspective
# the triat is named 'sd'
self.assertEqual( baz_s_handler_self, None )
# Do expect '_sd_changed' to be called with b as self
self.assertEqual( baz_sd_handler_self, b )
return
def test_modify_prefix_handler_on_delegatee(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.s, b.sd )
global foo_s_handler_self
foo_s_handler_self = None
b.sd = 'changed'
self.assertEqual( f.s, b.sd )
# Foo expects its '_s_changed' handler to be called with f as self
self.assertEqual( foo_s_handler_self, f )
return
def test_no_modify_prefix_handler_on_delegator(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.s, b.sd )
global baz_s_handler_self
global baz_sd_handler_self
baz_sd_handler_self = None
baz_s_handler_self = None
b.sd = 'changed'
self.assertNotEqual( f.s, b.sd )
# Don't expect _s_changed to be called because from Baz's perspective
# the triat is named 'sd'
self.assertEqual( baz_s_handler_self, None )
# Do expect '_sd_changed' to be called with b as self
self.assertEqual( baz_sd_handler_self, b )
return
def test_no_modify_prefix_handler_on_delegatee_not_called(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.s, b.sd )
global foo_s_handler_self
foo_s_handler_self = None
b.sd = 'changed'
self.assertNotEqual( f.s, b.sd )
# Foo expects its '_s_changed' handler to be called with f as self
self.assertEqual( foo_s_handler_self, None )
return
def test_modify_handler_on_delegator(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.t, b.t )
global baz_t_handler_self
baz_t_handler_self = None
b.t = 'changed'
self.assertEqual( f.t, b.t )
# Do expect '_t_changed' to be called with b as self
self.assertEqual( baz_t_handler_self, b )
return
def test_modify_handler_on_delegatee(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.t, b.t )
global foo_t_handler_self
foo_t_handler_self = None
b.t = 'changed'
self.assertEqual( f.t, b.t )
# Foo t did change so '_t_changed' handler should be called
self.assertEqual( foo_t_handler_self, f)
return
def test_no_modify_handler_on_delegator(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.t, b.t )
global baz_t_handler_self
baz_t_handler_self = None
b.t = 'changed'
self.assertNotEqual( f.t, b.t )
# Do expect '_t_changed' to be called with b as self
self.assertEqual( baz_t_handler_self, b )
return
def test_no_modify_handler_on_delegatee_not_called(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.t, b.t )
global foo_t_handler_self
foo_t_handler_self = None
b.t = 'changed'
self.assertNotEqual( f.t, b.t )
# Foo t did not change so '_t_changed' handler should not be called
self.assertEqual( foo_t_handler_self, None)
return
# Below are 4 tests for notification when the delegated trait is changed
# directly rather than through the delegator.
def test_no_modify_handler_on_delegatee_direct_change(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.t, b.t )
global foo_t_handler_self
foo_t_handler_self = None
f.t = 'changed'
self.assertEqual( f.t, b.t )
# Foo t did change so '_t_changed' handler should be called
self.assertEqual( foo_t_handler_self, f)
return
def test_no_modify_handler_on_delegator_direct_change(self):
f = Foo()
b = BazNoModify(foo=f)
self.assertEqual( f.t, b.t )
global baz_t_handler_self
baz_t_handler_self = None
f.t = 'changed'
self.assertEqual( f.t, b.t )
# Do expect '_t_changed' to be called with b as self
self.assertEqual( baz_t_handler_self, b )
return
def test_modify_handler_on_delegatee_direct_change(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.t, b.t )
global foo_t_handler_self
foo_t_handler_self = None
f.t = 'changed'
self.assertEqual( f.t, b.t )
# Foo t did change so '_t_changed' handler should be called
self.assertEqual( foo_t_handler_self, f)
return
def test_modify_handler_on_delegator_direct_change(self):
f = Foo()
b = BazModify(foo=f)
self.assertEqual( f.t, b.t )
global baz_t_handler_self
baz_t_handler_self = None
f.t = 'changed'
self.assertEqual( f.t, b.t )
# Do expect '_t_changed' to be called with b as self
self.assertEqual( baz_t_handler_self, b )
return
#### EOF ######################################################################
| 28.077994
| 81
| 0.568254
| 1,328
| 10,080
| 4.074548
| 0.115964
| 0.12604
| 0.068749
| 0.041397
| 0.774718
| 0.754944
| 0.733321
| 0.711698
| 0.685456
| 0.65718
| 0
| 0.001476
| 0.327679
| 10,080
| 358
| 82
| 28.156425
| 0.79696
| 0.197421
| 0
| 0.77551
| 0
| 0
| 0.051968
| 0.017237
| 0
| 0
| 0
| 0
| 0.209184
| 0
| null | null | 0
| 0.010204
| null | null | 0.040816
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
846b9c049f0059865dce31c039c587ada739d6a2
| 1,385
|
py
|
Python
|
tests/test_cdec_data_read.py
|
dwr-psandhu/cdec_maps
|
cdf58fbffe1a82fbaa652b2b9fc8d5af57b2b0b7
|
[
"MIT"
] | null | null | null |
tests/test_cdec_data_read.py
|
dwr-psandhu/cdec_maps
|
cdf58fbffe1a82fbaa652b2b9fc8d5af57b2b0b7
|
[
"MIT"
] | null | null | null |
tests/test_cdec_data_read.py
|
dwr-psandhu/cdec_maps
|
cdf58fbffe1a82fbaa652b2b9fc8d5af57b2b0b7
|
[
"MIT"
] | null | null | null |
from cdec_maps import cdec
import pytest
import pandas as pd
def test_read_simple():
c = cdec.Reader()
df = c._read_station_data('FPT', '1', 'H', '2020-01-01', '2020-02-01')
assert not df.empty
assert len(df) == 745
assert df['VALUE'].iloc[0] == pytest.approx(104.31)
assert df['VALUE'].index[0] == pd.Timestamp('2020-01-01')
def test_read_simple_dask():
c = cdec.Reader()
df = c.read_station_data('FPT', '1', 'H', '2020-01-01', '2020-02-01')
assert not df.empty
assert len(df) == 745
assert df['VALUE'].iloc[0] == pytest.approx(104.31)
assert df['VALUE'].index[0] == pd.Timestamp('2020-01-01')
def test_start_end_order():
c = cdec.Reader()
df = c.read_station_data('FPT', '1', 'H', '2020-02-01', '2020-01-01')
assert not df.empty
assert len(df) == 745
assert df['VALUE'].iloc[0] == pytest.approx(104.31)
assert df['VALUE'].index[0] == pd.Timestamp('2020-01-01')
def test_read_station_data_kwargs():
'''Test by calling positional args with keywords syntax'''
c = cdec.Reader()
df = c.read_station_data(station_id='FPT', duration_code='H',
sensor_number='1', end='2020-01-01', start='2020-02-01')
assert not df.empty
assert len(df) == 745
assert df['VALUE'].iloc[0] == pytest.approx(104.31)
assert df['VALUE'].index[0] == pd.Timestamp('2020-01-01')
| 32.97619
| 85
| 0.620217
| 223
| 1,385
| 3.73991
| 0.237668
| 0.057554
| 0.076739
| 0.06235
| 0.741007
| 0.741007
| 0.741007
| 0.741007
| 0.706235
| 0.706235
| 0
| 0.124777
| 0.189892
| 1,385
| 41
| 86
| 33.780488
| 0.618538
| 0.037545
| 0
| 0.625
| 0
| 0
| 0.135644
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.125
| false
| 0
| 0.09375
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
846d6c4792574d53005b372c9d434cc447a52e08
| 165
|
py
|
Python
|
os_android_adb_handler/test.py
|
osfunapps/os_android_adb_handler-py
|
640f34ff10a8b404947426196e97a35b196257e2
|
[
"MIT"
] | 3
|
2022-01-31T16:57:46.000Z
|
2022-02-10T15:08:37.000Z
|
os_android_adb_handler/test.py
|
osfunapps/os_android_adb_handler-py
|
640f34ff10a8b404947426196e97a35b196257e2
|
[
"MIT"
] | null | null | null |
os_android_adb_handler/test.py
|
osfunapps/os_android_adb_handler-py
|
640f34ff10a8b404947426196e97a35b196257e2
|
[
"MIT"
] | null | null | null |
from os_android_adb_handler import adb_handler
time.sleep(7)
adb_handler.dpad_right()
time.sleep(1)
adb_handler.dpad_right()
adb_handler.dpad_right()
print("yey!")
| 18.333333
| 46
| 0.812121
| 28
| 165
| 4.428571
| 0.5
| 0.403226
| 0.33871
| 0.459677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 0.066667
| 165
| 8
| 47
| 20.625
| 0.792208
| 0
| 0
| 0.428571
| 0
| 0
| 0.024242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.142857
| 0
| 0.142857
| 0.142857
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8491af34960dfa1d3d06b4200cbf9b78dd30ca97
| 219
|
py
|
Python
|
odoo-13.0/addons/payment/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/payment/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/payment/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import payment_acquirer
from . import account_invoice
from . import res_partner
from . import account_payment
from . import chart_template
from . import ir_http
from . import res_company
| 21.9
| 30
| 0.771689
| 31
| 219
| 5.225806
| 0.516129
| 0.432099
| 0.209877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005405
| 0.155251
| 219
| 9
| 31
| 24.333333
| 0.87027
| 0.09589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
84ebee73625e2d1d008d33f93fa0080ba9468ef4
| 23
|
py
|
Python
|
buildroot/support/testing/tests/package/sample_python_gobject.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 2
|
2020-08-07T20:14:11.000Z
|
2020-08-07T20:18:16.000Z
|
buildroot/support/testing/tests/package/sample_python_gobject.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 8
|
2020-04-02T22:51:47.000Z
|
2020-04-27T03:24:55.000Z
|
buildroot/support/testing/tests/package/sample_python_gobject.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 1
|
2021-08-16T07:28:33.000Z
|
2021-08-16T07:28:33.000Z
|
import gobject # noqa
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 23
| 1
| 23
| 23
| 0.944444
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1708305b28a8ef762339de554fa3929d135ee1b7
| 22
|
py
|
Python
|
src/__init__.py
|
rodrigocam/shell-gtranslate
|
c7a7addf2c0e1a3eb5779c85b7d2fcfe0114e04b
|
[
"MIT"
] | 2
|
2018-01-05T16:27:05.000Z
|
2018-05-10T14:26:34.000Z
|
src/__init__.py
|
rodrigocam/shell-gtranslate
|
c7a7addf2c0e1a3eb5779c85b7d2fcfe0114e04b
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
rodrigocam/shell-gtranslate
|
c7a7addf2c0e1a3eb5779c85b7d2fcfe0114e04b
|
[
"MIT"
] | null | null | null |
from src.main import *
| 22
| 22
| 0.772727
| 4
| 22
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ca5c38a8dc433010d79a94f477e1381285e85252
| 30
|
py
|
Python
|
quoters/__init__.py
|
suman-kr/random-quotes
|
5d3241dc9647a7b16a93dece12e9f214072e64c3
|
[
"MIT"
] | 22
|
2020-01-24T08:59:18.000Z
|
2022-02-09T02:35:20.000Z
|
quoters/__init__.py
|
suman-kr/random-quotes
|
5d3241dc9647a7b16a93dece12e9f214072e64c3
|
[
"MIT"
] | 9
|
2021-04-07T00:57:09.000Z
|
2022-03-31T10:18:06.000Z
|
quoters/__init__.py
|
suman-kr/random-quotes
|
5d3241dc9647a7b16a93dece12e9f214072e64c3
|
[
"MIT"
] | 1
|
2021-06-06T19:00:55.000Z
|
2021-06-06T19:00:55.000Z
|
from quoters.main import Quote
| 30
| 30
| 0.866667
| 5
| 30
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ca854ff00d6ede2a8e90128e276004ae324a8cf1
| 126
|
py
|
Python
|
tests/test_fouriertrf.py
|
SciFin-Team/SciFin
|
e4e3d1a32e060c911e43d89df833b3ad078ba6df
|
[
"MIT"
] | 9
|
2020-08-07T02:58:44.000Z
|
2021-10-05T16:37:07.000Z
|
tests/test_fouriertrf.py
|
SciFin-Team/SciFin
|
e4e3d1a32e060c911e43d89df833b3ad078ba6df
|
[
"MIT"
] | 9
|
2020-08-30T09:55:23.000Z
|
2020-10-15T20:06:36.000Z
|
tests/test_fouriertrf.py
|
SciFin-Team/SciFin
|
e4e3d1a32e060c911e43d89df833b3ad078ba6df
|
[
"MIT"
] | 5
|
2020-08-17T10:35:26.000Z
|
2020-11-13T17:46:33.000Z
|
from scifin import fouriertrf as ft
class TestFouriertrf:
"""
Tests the functions in fouriertrf.py.
"""
pass
| 15.75
| 41
| 0.666667
| 15
| 126
| 5.6
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 126
| 7
| 42
| 18
| 0.903226
| 0.293651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
ca8e8d57f2b1cf61a528e6a3c08d59d5740f1696
| 9,336
|
py
|
Python
|
app/follow/service/tests/test_follow.py
|
ChandanaNT/BuzzBlogApp
|
7f27409b36eb2aa9c38931ad3b4a7540340e242c
|
[
"Apache-2.0"
] | 1
|
2021-02-19T00:37:29.000Z
|
2021-02-19T00:37:29.000Z
|
app/follow/service/tests/test_follow.py
|
ChandanaNT/BuzzBlogApp
|
7f27409b36eb2aa9c38931ad3b4a7540340e242c
|
[
"Apache-2.0"
] | null | null | null |
app/follow/service/tests/test_follow.py
|
ChandanaNT/BuzzBlogApp
|
7f27409b36eb2aa9c38931ad3b4a7540340e242c
|
[
"Apache-2.0"
] | 2
|
2021-04-13T01:06:06.000Z
|
2021-11-16T16:14:46.000Z
|
# Copyright (C) 2020 Georgia Tech Center for Experimental Research in Computer
# Systems
import random
import string
import time
import unittest
from buzzblog.gen.ttypes import *
from buzzblog.account_client import Client as AccountClient
from buzzblog.follow_client import Client as FollowClient
IP_ADDRESS = "localhost"
ACCOUNT_PORT = 9090
FOLLOW_PORT = 9091
def random_id(size=16, chars=string.ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
class TestService(unittest.TestCase):
def test_follow_account_and_retrieve_standard_follow(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Follow an account and check the returned object's attributes.
follow = client.follow_account(
TRequestMetadata(id="3", requester_id=accounts[0].id), accounts[1].id)
self.assertAlmostEqual(time.time(), follow.created_at, delta=60)
self.assertEqual(accounts[0].id, follow.follower_id)
self.assertEqual(accounts[1].id, follow.followee_id)
# Check that it cannot be duplicated.
with self.assertRaises(TFollowAlreadyExistsException):
client.follow_account(
TRequestMetadata(id="4", requester_id=accounts[0].id),
accounts[1].id)
# Retrieve that follow and check its attributes.
retrieved_follow = client.retrieve_standard_follow(
TRequestMetadata(id="5", requester_id=accounts[0].id), follow.id)
self.assertEqual(follow.id, retrieved_follow.id)
self.assertEqual(follow.created_at, retrieved_follow.created_at)
self.assertEqual(follow.follower_id, retrieved_follow.follower_id)
self.assertEqual(follow.followee_id, retrieved_follow.followee_id)
def test_follow_account_and_retrieve_expanded_follow(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Follow an account and check the returned object's attributes.
follow = client.follow_account(
TRequestMetadata(id="3", requester_id=accounts[0].id), accounts[1].id)
self.assertAlmostEqual(time.time(), follow.created_at, delta=60)
self.assertEqual(accounts[0].id, follow.follower_id)
self.assertEqual(accounts[1].id, follow.followee_id)
# Check that it cannot be duplicated.
with self.assertRaises(TFollowAlreadyExistsException):
client.follow_account(
TRequestMetadata(id="4", requester_id=accounts[0].id),
accounts[1].id)
# Retrieve that follow and check its attributes.
retrieved_follow = client.retrieve_expanded_follow(
TRequestMetadata(id="5", requester_id=accounts[0].id),
follow.id)
self.assertEqual(follow.id, retrieved_follow.id)
self.assertEqual(follow.created_at, retrieved_follow.created_at)
self.assertEqual(follow.follower_id, retrieved_follow.follower_id)
self.assertEqual(follow.followee_id, retrieved_follow.followee_id)
self.assertEqual(follow.follower_id, retrieved_follow.follower.id)
self.assertEqual(follow.followee_id, retrieved_follow.followee.id)
def test_delete_follow(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Follow an account.
follow = client.follow_account(
TRequestMetadata(id="3", requester_id=accounts[0].id), accounts[1].id)
# Remove that follow and check that it cannot be gotten anymore.
client.delete_follow(
TRequestMetadata(id="4", requester_id=accounts[0].id), follow.id)
with self.assertRaises(TFollowNotFoundException):
client.retrieve_standard_follow(
TRequestMetadata(id="5", requester_id=accounts[0].id), follow.id)
def test_list_follows(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="3"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Create follows.
follows = []
for followee in accounts[1:]:
follows.append(client.follow_account(
TRequestMetadata(id="4", requester_id=accounts[0].id), followee.id))
# NOTE: 2s of sleep are needed for the consistency of results because
# creation times are recorded in the database with precision of 1s.
time.sleep(2)
# Retrieve those follows, reverse the list, and check their ids.
query = TFollowQuery(follower_id=accounts[0].id)
limit = 10
offset = 0
retrieved_follows = client.list_follows(
TRequestMetadata(id="5", requester_id=accounts[0].id), query, limit,
offset)
retrieved_follows.reverse()
for (follow, retrieved_follow) in zip(follows, retrieved_follows):
self.assertEqual(follow.id, retrieved_follow.id)
def test_check_follow(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Check that a follow does not exist.
self.assertFalse(client.check_follow(
TRequestMetadata(id="3", requester_id=accounts[0].id), accounts[0].id,
accounts[1].id))
# Create that follow.
client.follow_account(
TRequestMetadata(id="4", requester_id=accounts[0].id), accounts[1].id)
# Check that follow now exists.
self.assertTrue(client.check_follow(
TRequestMetadata(id="5", requester_id=accounts[0].id), accounts[0].id,
accounts[1].id))
def test_count_followers(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="3"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="4"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Follow an account 3 times.
for (i, follower) in enumerate(accounts[1:]):
client.follow_account(
TRequestMetadata(id=str(i+5), requester_id=follower.id),
accounts[0].id)
# Check that account has 3 followers.
self.assertEqual(3,
client.count_followers(
TRequestMetadata(id="8", requester_id=accounts[0].id),
accounts[0].id))
def test_count_followees(self):
with AccountClient(IP_ADDRESS, ACCOUNT_PORT) as client:
# Create test accounts.
accounts = [
client.create_account(TRequestMetadata(id="1"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="2"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="3"), random_id(), "passwd",
"George", "Burdell"),
client.create_account(TRequestMetadata(id="4"), random_id(), "passwd",
"George", "Burdell")
]
with FollowClient(IP_ADDRESS, FOLLOW_PORT) as client:
# Make an account follow 3 different accounts.
for (i, followee) in enumerate(accounts[1:]):
client.follow_account(
TRequestMetadata(id=str(i+5), requester_id=accounts[0].id),
followee.id)
# Check that account has 3 followees.
self.assertEqual(3,
client.count_followees(
TRequestMetadata(id="8", requester_id=accounts[0].id),
accounts[0].id))
if __name__ == "__main__":
unittest.main()
| 44.246445
| 80
| 0.66356
| 1,085
| 9,336
| 5.542857
| 0.134562
| 0.110742
| 0.116395
| 0.049717
| 0.798637
| 0.775191
| 0.758065
| 0.744929
| 0.731294
| 0.723977
| 0
| 0.014405
| 0.219259
| 9,336
| 210
| 81
| 44.457143
| 0.810674
| 0.112789
| 0
| 0.608434
| 0
| 0
| 0.050036
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 1
| 0.048193
| false
| 0.114458
| 0.042169
| 0.006024
| 0.10241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
ca95e350e55f0d60371884cb6defded71b895ffb
| 2,575
|
py
|
Python
|
examples/examples.py
|
CaptainDario/ETLCDB_data_reader
|
34054e507701f76dff7f47b95fdb9c9d80493516
|
[
"MIT"
] | 1
|
2021-12-27T20:43:46.000Z
|
2021-12-27T20:43:46.000Z
|
examples/examples.py
|
CaptainDario/ETL_data_reader
|
34054e507701f76dff7f47b95fdb9c9d80493516
|
[
"MIT"
] | null | null | null |
examples/examples.py
|
CaptainDario/ETL_data_reader
|
34054e507701f76dff7f47b95fdb9c9d80493516
|
[
"MIT"
] | 1
|
2021-12-27T20:44:24.000Z
|
2021-12-27T20:44:24.000Z
|
import sys
import os
import time
sys.path.append(os.path.abspath(os.getcwd()))
from etldr.etl_data_reader import ETLDataReader
def load_one_data_set_file(reader : ETLDataReader):
"""The first example of the README.
Args:
reader : ETLDataReader instance to load the data set part.
"""
from etldr.etl_data_names import ETLDataNames
from etldr.etl_character_groups import ETLCharacterGroups
include = [ETLCharacterGroups.katakana, ETLCharacterGroups.number]
imgs, labels = reader.read_dataset_file(2, ETLDataNames.ETL7, include)
def load_one_data_set_part(reader : ETLDataReader):
"""The second example of the README.
Args:
reader : ETLDataReader instance to load the data set part.
"""
from etldr.etl_data_names import ETLDataNames
from etldr.etl_character_groups import ETLCharacterGroups
include = [ETLCharacterGroups.kanji, ETLCharacterGroups.hiragana]
imgs, labels = reader.read_dataset_part(ETLDataNames.ETL2, include)
def load_one_data_set_part_parallel(reader : ETLDataReader):
"""The second example of the README.
Args:
reader : ETLDataReader instance to load the data set part.
"""
from etldr.etl_data_names import ETLDataNames
from etldr.etl_character_groups import ETLCharacterGroups
include = [ETLCharacterGroups.kanji, ETLCharacterGroups.hiragana]
imgs, labels = reader.read_dataset_part(ETLDataNames.ETL2, include, 16)
def load_the_whole_data_set(reader : ETLDataReader):
"""The third example of the README.
Args:
reader : ETLDataReader instance to load the data set part.
"""
from etldr.etl_character_groups import ETLCharacterGroups
include = [ETLCharacterGroups.roman, ETLCharacterGroups.symbols]
imgs, labels = reader.read_dataset_whole(include)
def load_the_whole_data_set_parallel(reader : ETLDataReader):
"""The third example of the README.
Args:
reader : ETLDataReader instance to load the data set part.
"""
from etldr.etl_character_groups import ETLCharacterGroups
include = [ETLCharacterGroups.roman, ETLCharacterGroups.symbols]
imgs, labels = reader.read_dataset_whole(include, 16)
if __name__ == "__main__":
path_to_data_set = r"F:\data_sets\ETL_kanji"
reader = ETLDataReader(path_to_data_set)
# uncomment one of these examples
#load_one_data_set_file(reader)
#load_one_data_set_part(reader)
#load_the_whole_data_set(reader)
#load_one_data_set_part_parallel(reader)
#load_the_whole_data_set_parallel(reader)
| 27.393617
| 75
| 0.749126
| 324
| 2,575
| 5.682099
| 0.188272
| 0.064639
| 0.058664
| 0.045627
| 0.847909
| 0.831613
| 0.765345
| 0.684411
| 0.684411
| 0.684411
| 0
| 0.003786
| 0.179417
| 2,575
| 93
| 76
| 27.688172
| 0.867487
| 0.279223
| 0
| 0.387097
| 0
| 0
| 0.017045
| 0.0125
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16129
| false
| 0
| 0.387097
| 0
| 0.548387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0464599c3ac6ea9331551a68628b827688e3cf72
| 10,645
|
py
|
Python
|
track/tests/test_selectors.py
|
anthonyoteri/track-server
|
4c7e8e7dece759f120e8a96d440a6c9b214fb101
|
[
"BSD-3-Clause"
] | null | null | null |
track/tests/test_selectors.py
|
anthonyoteri/track-server
|
4c7e8e7dece759f120e8a96d440a6c9b214fb101
|
[
"BSD-3-Clause"
] | 4
|
2019-12-04T23:40:47.000Z
|
2021-03-18T23:26:59.000Z
|
track/tests/test_selectors.py
|
anthonyoteri/track-server
|
4c7e8e7dece759f120e8a96d440a6c9b214fb101
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime, date, timedelta
import pytest
from track.selectors import (
get_active_record,
get_elapsed_time,
get_elapsed_time_per_category,
get_entries_per_day,
get_entries_per_week,
)
from . import factories
@pytest.mark.django_db
def test_get_active_record_all_stopped():
factories.RecordFactory.create_batch(10)
active = get_active_record()
assert active is None
@pytest.mark.django_db
def test_get_active_record_one_is_running():
factories.RecordFactory.create_batch(9)
target = factories.RecordFactory(stop_time_epoch=None)
active = get_active_record()
assert active is not None
assert active == target
assert active.stop_time_epoch is None
@pytest.mark.django_db
def test_get_elapsed_time():
project1 = factories.ProjectFactory()
project2 = factories.ProjectFactory()
now = datetime.now().replace(microsecond=0)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
assert get_elapsed_time(project=project1) == 24 * 60 * 60
assert get_elapsed_time(project=project2) == 0
@pytest.mark.django_db
def test_get_elapsed_time_with_lower_bounds():
project1 = factories.ProjectFactory()
now = datetime.now().replace(microsecond=0)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
assert (
get_elapsed_time(project=project1, begin=now - timedelta(hours=6))
== 6 * 60 * 60
)
@pytest.mark.django_db
def test_get_elapsed_time_with_upper_bounds():
project1 = factories.ProjectFactory()
now = datetime.now().replace(microsecond=0)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
assert (
get_elapsed_time(project=project1, end=now - timedelta(hours=6))
== 18 * 60 * 60
)
@pytest.mark.django_db
def test_get_elapsed_time_with_lower_and_upper_bounds():
project1 = factories.ProjectFactory()
now = datetime.now().replace(microsecond=0)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
assert (
get_elapsed_time(
project=project1,
begin=now - timedelta(hours=8),
end=now - timedelta(hours=6),
)
== 2 * 60 * 60
)
@pytest.mark.django_db
def test_get_elapsed_time_per_category():
category1 = factories.CategoryFactory()
category2 = factories.CategoryFactory()
category3 = factories.CategoryFactory()
project1 = factories.ProjectFactory()
project2 = factories.ProjectFactory()
project1.categories.add(category1)
project1.categories.add(category2)
project2.categories.add(category1)
now = datetime.now().replace(microsecond=0)
for offset in range(48, 24, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project2,
)
assert get_elapsed_time_per_category(category=category1) == 48 * 60 * 60
assert get_elapsed_time_per_category(category=category2) == 24 * 60 * 60
assert get_elapsed_time_per_category(category=category3) == 0
@pytest.mark.django_db
def test_get_elapsed_time_per_category_with_lower_bounds():
category1 = factories.CategoryFactory()
category2 = factories.CategoryFactory()
project1 = factories.ProjectFactory()
project2 = factories.ProjectFactory()
project1.categories.add(category1)
project1.categories.add(category2)
project2.categories.add(category1)
now = datetime.now().replace(microsecond=0)
for offset in range(48, 24, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project2,
)
assert (
get_elapsed_time_per_category(
category=category1, begin=now - timedelta(hours=6)
)
== 6 * 60 * 60
)
assert (
get_elapsed_time_per_category(
category=category2, begin=now - timedelta(hours=6)
)
== 0
)
@pytest.mark.django_db
def test_get_elapsed_time_per_category_with_upper_bounds():
category1 = factories.CategoryFactory()
category2 = factories.CategoryFactory()
project1 = factories.ProjectFactory()
project2 = factories.ProjectFactory()
project1.categories.add(category1)
project1.categories.add(category2)
project2.categories.add(category1)
now = datetime.now().replace(microsecond=0)
for offset in range(48, 24, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
for offset in range(24, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project2,
)
assert (
get_elapsed_time_per_category(
category=category1, end=now - timedelta(hours=6)
)
== 42 * 60 * 60
)
assert (
get_elapsed_time_per_category(
category=category2, end=now - timedelta(hours=6)
)
== 24 * 60 * 60
)
@pytest.mark.django_db
def test_get_entries_per_day():
project1 = factories.ProjectFactory()
now = datetime.now().replace(microsecond=0)
for offset in range(48, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
expected = {
project1: int(
(
now.replace(minute=0, second=0)
- now.replace(hour=0, minute=0, second=0)
).total_seconds()
)
}
assert get_entries_per_day(day=now.date()) == expected
@pytest.mark.django_db
def test_get_entries_per_day_for_category():
category = factories.CategoryFactory()
project1 = factories.ProjectFactory()
project2 = factories.ProjectFactory()
category.projects.add(project1)
now = datetime.now().replace(microsecond=0)
for offset in range(48, 0, -1):
start_time = now - timedelta(hours=offset)
stop_time = start_time + timedelta(hours=1)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(
start_time + timedelta(minutes=5)
),
stop_time_epoch=datetime.timestamp(
stop_time - timedelta(minutes=5)
),
project=project2,
)
expected = {
project1: int(
(
now.replace(minute=0, second=0)
- now.replace(hour=0, minute=0, second=0)
).total_seconds()
)
}
assert (
get_entries_per_day(day=now.date(), category=category.name) == expected
)
@pytest.mark.parametrize("with_category", [False, True])
@pytest.mark.django_db
def test_get_entries_per_week(with_category):
category = factories.CategoryFactory()
project1 = factories.ProjectFactory()
category.projects.add(project1)
now = datetime(2019, 7, 9) # A Tuesday
for offset in range(24 * 14, 0, -1):
start_time = now - timedelta(hours=offset) + timedelta(minutes=15)
stop_time = start_time + timedelta(minutes=30)
factories.RecordFactory(
start_time_epoch=datetime.timestamp(start_time),
stop_time_epoch=datetime.timestamp(stop_time),
project=project1,
)
expected = {
"week_number": "2019-W27",
"projects": [project1.name],
"days": [
{
"date": date(2019, 7, n),
"records": {project1.name: 24 * 30 * 60},
"total": 24 * 30 * 60,
}
for n in range(1, 8)
],
}
category_param = None
if with_category:
expected["category"] = category.name
category_param = category.name
assert (
get_entries_per_week(week_number="2019-W27", category=category_param)
== expected
)
| 28.013158
| 79
| 0.641804
| 1,199
| 10,645
| 5.460384
| 0.087573
| 0.074232
| 0.072705
| 0.111196
| 0.873377
| 0.845425
| 0.829388
| 0.799145
| 0.763403
| 0.720941
| 0
| 0.033989
| 0.259277
| 10,645
| 379
| 80
| 28.087071
| 0.796322
| 0.000845
| 0
| 0.61324
| 0
| 0
| 0.007147
| 0
| 0
| 0
| 0
| 0
| 0.066202
| 1
| 0.041812
| false
| 0
| 0.013937
| 0
| 0.055749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
048574a3348ddacc80b883a612bd6e876bee894b
| 73
|
py
|
Python
|
fatgraph_v.1.0/configuration/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
fatgraph_v.1.0/configuration/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
fatgraph_v.1.0/configuration/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from . import classFilePath
from . import configuration
| 14.6
| 27
| 0.767123
| 9
| 73
| 6.222222
| 0.777778
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.164384
| 73
| 4
| 28
| 18.25
| 0.901639
| 0.178082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
04dd07277042f5440a58d4e01a3065b12a0c5f5d
| 719
|
py
|
Python
|
Screen/views.py
|
charanreddyvaddhi/BigScreen
|
73e378572d1416f47aaeb02fc1de7d005230fa06
|
[
"Apache-2.0"
] | null | null | null |
Screen/views.py
|
charanreddyvaddhi/BigScreen
|
73e378572d1416f47aaeb02fc1de7d005230fa06
|
[
"Apache-2.0"
] | null | null | null |
Screen/views.py
|
charanreddyvaddhi/BigScreen
|
73e378572d1416f47aaeb02fc1de7d005230fa06
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render, HttpResponse
# Create your views here.
def page1(request):
return render(request,'home_page.html')
def page2(request):
return render(request,'world.html')
def page3(request):
return render(request,'india.html')
def page4(request):
return render(request,'media.html')
def page5(request):
return render(request,'worldnews.html')
def page6(request):
return render(request,'worldmovies.html')
def page7(request):
return render(request,'worldbusiness.html')
def page8(request):
return render(request,'indianews.html')
def page9(request):
return render(request,'indiamovies.html')
def page10(request):
return render(request,'indiabusiness.html')
| 28.76
| 49
| 0.741307
| 91
| 719
| 5.846154
| 0.384615
| 0.244361
| 0.357143
| 0.488722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.127955
| 719
| 24
| 50
| 29.958333
| 0.830941
| 0.031989
| 0
| 0
| 0
| 0
| 0.201729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.47619
| false
| 0
| 0.047619
| 0.47619
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
04fc5ec7b90b60a46768bba6ac232d1cbe975a59
| 31
|
py
|
Python
|
MIDSEM/midsem/midsem/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
MIDSEM/midsem/midsem/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
MIDSEM/midsem/midsem/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
from midsem.envs.RME import RME
| 31
| 31
| 0.83871
| 6
| 31
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 31
| 1
| 31
| 31
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8e489e0a44b65e1681b98cfea8d90cbda6e1d300
| 99
|
py
|
Python
|
OnlySnarf/__init__.py
|
sec-js/onlysnarf
|
c8c32abb5d6b22c08fc7e29b41211530fb583b85
|
[
"MIT"
] | null | null | null |
OnlySnarf/__init__.py
|
sec-js/onlysnarf
|
c8c32abb5d6b22c08fc7e29b41211530fb583b85
|
[
"MIT"
] | null | null | null |
OnlySnarf/__init__.py
|
sec-js/onlysnarf
|
c8c32abb5d6b22c08fc7e29b41211530fb583b85
|
[
"MIT"
] | null | null | null |
#from . import config as Config
#from . import menu as Menu
from .src.util.settings import Settings
| 33
| 39
| 0.777778
| 16
| 99
| 4.8125
| 0.5
| 0.25974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 3
| 39
| 33
| 0.916667
| 0.565657
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d4577dd499043d47c9837228a317685452fe08e
| 152
|
py
|
Python
|
openbci_stream/utils/__init__.py
|
dunderlab/openbci-stream
|
9a481d82c3a1e39c3c5c9a5a50350f397bb7221a
|
[
"BSD-2-Clause"
] | 5
|
2021-04-13T13:14:59.000Z
|
2022-03-22T10:47:06.000Z
|
openbci_stream/utils/__init__.py
|
dunderlab/openbci-stream
|
9a481d82c3a1e39c3c5c9a5a50350f397bb7221a
|
[
"BSD-2-Clause"
] | 1
|
2021-10-04T10:04:34.000Z
|
2021-10-04T10:04:34.000Z
|
openbci_stream/utils/__init__.py
|
dunderlab/openbci-stream
|
9a481d82c3a1e39c3c5c9a5a50350f397bb7221a
|
[
"BSD-2-Clause"
] | 2
|
2021-10-04T19:45:35.000Z
|
2021-11-18T22:42:59.000Z
|
from .pid_admin import autokill_process
from .hdf5 import HDF5Reader, HDF5Writer, interpolate_datetime
from .scan_wifi_modules import scan_wifi_modules
| 38
| 62
| 0.875
| 21
| 152
| 6
| 0.666667
| 0.126984
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.092105
| 152
| 3
| 63
| 50.666667
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d5754fb972ed82e4cd563572083eb9793761d58
| 114
|
py
|
Python
|
deep_nilmtk/utils/__init__.py
|
DBRTII/Deep-NILMtk
|
cf6c50ec9891c17f9c626e23c0e1b6f488fefcb3
|
[
"MIT"
] | 1
|
2022-03-18T08:56:18.000Z
|
2022-03-18T08:56:18.000Z
|
deep_nilmtk/utils/__init__.py
|
DBRTII/Deep-NILMtk
|
cf6c50ec9891c17f9c626e23c0e1b6f488fefcb3
|
[
"MIT"
] | null | null | null |
deep_nilmtk/utils/__init__.py
|
DBRTII/Deep-NILMtk
|
cf6c50ec9891c17f9c626e23c0e1b6f488fefcb3
|
[
"MIT"
] | null | null | null |
from .test import *
from .experiment_starter import setup
from .logger import *
from .check_compatibility import *
| 28.5
| 37
| 0.807018
| 15
| 114
| 6
| 0.6
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 114
| 4
| 38
| 28.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
edd1fab44ca9a55b8074ca8c4c69c256f0e1c0d1
| 98
|
py
|
Python
|
get_pop/tests/conftest.py
|
SimmonsRitchie/get-pop
|
e54e966e1d2841eea09ba297ae421bb37b1530e3
|
[
"MIT"
] | 1
|
2020-05-07T14:56:25.000Z
|
2020-05-07T14:56:25.000Z
|
get_pop/tests/conftest.py
|
SimmonsRitchie/get-pop
|
e54e966e1d2841eea09ba297ae421bb37b1530e3
|
[
"MIT"
] | 3
|
2020-07-14T01:58:59.000Z
|
2020-07-15T02:43:30.000Z
|
get_pop/tests/conftest.py
|
SimmonsRitchie/us_pop_extracter
|
e54e966e1d2841eea09ba297ae421bb37b1530e3
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture(scope="module")
def states():
return ["ny", "tn", "tx", "pa"]
| 14
| 35
| 0.602041
| 13
| 98
| 4.538462
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 98
| 6
| 36
| 16.333333
| 0.719512
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.