hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aea7dad91b92ff592563fe72a5286a418a28967b
| 216
|
py
|
Python
|
TranscriptomicPipelines/t_utilities/t_gff_exceptions.py
|
g-simmons/OCB
|
217d9b8eaaefad97c52741b3eac9c18ae1def51a
|
[
"Apache-2.0"
] | null | null | null |
TranscriptomicPipelines/t_utilities/t_gff_exceptions.py
|
g-simmons/OCB
|
217d9b8eaaefad97c52741b3eac9c18ae1def51a
|
[
"Apache-2.0"
] | null | null | null |
TranscriptomicPipelines/t_utilities/t_gff_exceptions.py
|
g-simmons/OCB
|
217d9b8eaaefad97c52741b3eac9c18ae1def51a
|
[
"Apache-2.0"
] | null | null | null |
class FailedToExtractGFF3Attributes(Exception):
pass
class FailedToOutputBEDFile(Exception):
pass
class FailedToOutputGFFFile(Exception):
pass
class InvalidIDSelectionInGFFFile(Exception):
pass
| 19.636364
| 47
| 0.787037
| 16
| 216
| 10.625
| 0.4375
| 0.305882
| 0.317647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005495
| 0.157407
| 216
| 11
| 48
| 19.636364
| 0.928571
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
9d6ac65ba5c2b0c57ee79798d4b8a4d919a78658
| 10,053
|
py
|
Python
|
gcloud/analysis_statistics/migrations/0001_initial.py
|
brookylin/bk-sops
|
6c0cf78879849921c4ff6ad6bf3bb82dfdf5b973
|
[
"Apache-2.0"
] | 881
|
2019-03-25T02:45:42.000Z
|
2022-03-30T09:10:49.000Z
|
gcloud/analysis_statistics/migrations/0001_initial.py
|
m0re-work/bk-sops
|
d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d
|
[
"Apache-2.0"
] | 3,303
|
2019-03-25T04:18:03.000Z
|
2022-03-31T11:52:03.000Z
|
gcloud/analysis_statistics/migrations/0001_initial.py
|
m0re-work/bk-sops
|
d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d
|
[
"Apache-2.0"
] | 395
|
2019-03-25T02:53:36.000Z
|
2022-03-31T08:37:28.000Z
|
# Generated by Django 2.2.24 on 2021-11-01 03:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="TaskflowExecutedNodeStatistics",
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="id")),
("component_code", models.CharField(db_index=True, max_length=255, verbose_name="组件编码")),
("instance_id", models.BigIntegerField(db_index=True, verbose_name="Pipeline实例ID")),
("task_instance_id", models.BigIntegerField(db_index=True, verbose_name="Task实例ID")),
("node_id", models.CharField(max_length=32, verbose_name="节点ID")),
("is_sub", models.BooleanField(default=False, verbose_name="是否子流程引用")),
("subprocess_stack", models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈")),
("started_time", models.DateTimeField(verbose_name="标准插件执行开始时间")),
("archived_time", models.DateTimeField(blank=True, null=True, verbose_name="标准插件执行结束时间")),
("elapsed_time", models.IntegerField(blank=True, null=True, verbose_name="标准插件执行耗时(s)")),
("status", models.BooleanField(default=False, verbose_name="是否执行成功")),
("is_skip", models.BooleanField(default=False, verbose_name="是否跳过")),
("is_retry", models.BooleanField(default=False, verbose_name="是否重试记录")),
("version", models.CharField(default="legacy", max_length=255, verbose_name="插件版本")),
("template_id", models.CharField(max_length=32, verbose_name="Pipeline模板ID")),
("task_template_id", models.CharField(max_length=32, verbose_name="Task模板ID")),
(
"project_id",
models.IntegerField(db_index=True, default=-1, help_text="模板所属project id", verbose_name="项目 ID"),
),
("instance_create_time", models.DateTimeField(db_index=True, verbose_name="Pipeline实例创建时间")),
("instance_start_time", models.DateTimeField(blank=True, null=True, verbose_name="Pipeline实例启动时间")),
("instance_finish_time", models.DateTimeField(blank=True, null=True, verbose_name="Pipeline实例结束时间")),
],
options={
"verbose_name": "Pipeline标准插件执行数据",
"verbose_name_plural": "Pipeline标准插件执行数据",
"ordering": ["-id"],
},
),
migrations.CreateModel(
name="TaskflowStatistics",
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="id")),
("instance_id", models.BigIntegerField(db_index=True, verbose_name="Pipeline实例ID")),
("task_instance_id", models.BigIntegerField(db_index=True, verbose_name="Task实例ID")),
("atom_total", models.IntegerField(verbose_name="标准插件总数")),
("subprocess_total", models.IntegerField(verbose_name="子流程总数")),
("gateways_total", models.IntegerField(verbose_name="网关总数")),
("project_id", models.IntegerField(default=-1, help_text="模板所属project id", verbose_name="项目 ID")),
(
"category",
models.CharField(
choices=[
("OpsTools", "运维工具"),
("MonitorAlarm", "监控告警"),
("ConfManage", "配置管理"),
("DevTools", "开发工具"),
("EnterpriseIT", "企业IT"),
("OfficeApp", "办公应用"),
("Other", "其它"),
("Default", "默认分类"),
],
default="Default",
max_length=255,
verbose_name="模板类型",
),
),
("template_id", models.CharField(db_index=True, max_length=255, verbose_name="Pipeline模板ID")),
("task_template_id", models.CharField(db_index=True, max_length=255, verbose_name="Task模板ID")),
("creator", models.CharField(blank=True, max_length=32, verbose_name="创建者")),
("create_time", models.DateTimeField(db_index=True, verbose_name="创建时间")),
("start_time", models.DateTimeField(blank=True, null=True, verbose_name="启动时间")),
("finish_time", models.DateTimeField(blank=True, null=True, verbose_name="结束时间")),
("elapsed_time", models.IntegerField(blank=True, null=True, verbose_name="实例执行耗时(s)")),
(
"create_method",
models.CharField(
choices=[
("app", "手动"),
("api", "API网关"),
("app_maker", "轻应用"),
("periodic", "周期任务"),
("clocked", "计划任务"),
("mobile", "移动端"),
],
default="app",
max_length=30,
verbose_name="实例创建方式",
),
),
],
options={
"verbose_name": "Pipeline实例引用数据",
"verbose_name_plural": "Pipeline实例引用数据",
},
),
migrations.CreateModel(
name="TemplateNodeStatistics",
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="id")),
("component_code", models.CharField(db_index=True, max_length=255, verbose_name="组件编码")),
("template_id", models.BigIntegerField(db_index=True, verbose_name="Pipeline模板ID")),
("task_template_id", models.BigIntegerField(db_index=True, verbose_name="Task模板ID")),
(
"project_id",
models.IntegerField(db_index=True, default=-1, help_text="模板所属project id", verbose_name="项目 ID"),
),
(
"category",
models.CharField(
choices=[
("OpsTools", "运维工具"),
("MonitorAlarm", "监控告警"),
("ConfManage", "配置管理"),
("DevTools", "开发工具"),
("EnterpriseIT", "企业IT"),
("OfficeApp", "办公应用"),
("Other", "其它"),
("Default", "默认分类"),
],
default="Default",
max_length=255,
verbose_name="模板类型",
),
),
("node_id", models.CharField(max_length=32, verbose_name="节点ID")),
("is_sub", models.BooleanField(default=False, verbose_name="是否子流程引用")),
("subprocess_stack", models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈")),
("version", models.CharField(default="legacy", max_length=255, verbose_name="插件版本")),
("template_creator", models.CharField(blank=True, max_length=255, null=True, verbose_name="创建者")),
("template_create_time", models.DateTimeField(null=True, verbose_name="模版创建时间")),
("template_edit_time", models.DateTimeField(null=True, verbose_name="模板最近编辑时间")),
],
options={
"verbose_name": "Pipeline标准插件被引用数据",
"verbose_name_plural": "Pipeline标准插件被引用数据",
},
),
migrations.CreateModel(
name="TemplateStatistics",
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="id")),
("template_id", models.BigIntegerField(db_index=True, verbose_name="Pipeline模板ID")),
("task_template_id", models.BigIntegerField(db_index=True, verbose_name="Task模板ID")),
("atom_total", models.IntegerField(verbose_name="标准插件总数")),
("subprocess_total", models.IntegerField(verbose_name="子流程总数")),
("gateways_total", models.IntegerField(verbose_name="网关总数")),
(
"project_id",
models.IntegerField(db_index=True, default=-1, help_text="模板所属project id", verbose_name="项目 ID"),
),
(
"category",
models.CharField(
choices=[
("OpsTools", "运维工具"),
("MonitorAlarm", "监控告警"),
("ConfManage", "配置管理"),
("DevTools", "开发工具"),
("EnterpriseIT", "企业IT"),
("OfficeApp", "办公应用"),
("Other", "其它"),
("Default", "默认分类"),
],
default="Default",
max_length=255,
verbose_name="模板类型",
),
),
("template_creator", models.CharField(blank=True, max_length=255, null=True, verbose_name="创建者")),
("template_create_time", models.DateTimeField(db_index=True, null=True, verbose_name="创建时间")),
("template_edit_time", models.DateTimeField(null=True, verbose_name="最近编辑时间")),
("output_count", models.IntegerField(default=-1, verbose_name="输出变量数")),
("input_count", models.IntegerField(default=-1, verbose_name="输入变量数")),
],
options={
"verbose_name": "Pipeline模板引用数据",
"verbose_name_plural": "Pipeline模板引用数据",
},
),
]
| 53.473404
| 117
| 0.501144
| 818
| 10,053
| 5.933985
| 0.195599
| 0.158632
| 0.071075
| 0.050886
| 0.798723
| 0.79213
| 0.76679
| 0.735064
| 0.728471
| 0.65925
| 0
| 0.010448
| 0.362081
| 10,053
| 187
| 118
| 53.759358
| 0.746453
| 0.004576
| 0
| 0.638889
| 1
| 0
| 0.194303
| 0.005197
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005556
| 0
| 0.027778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9d756bab240dbdf2f2f5c2358b3103e881cf25ec
| 8,440
|
py
|
Python
|
tests/addon/test_async_addon_study.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 9
|
2020-04-20T23:45:44.000Z
|
2021-04-18T11:22:17.000Z
|
tests/addon/test_async_addon_study.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 13
|
2020-02-08T16:15:05.000Z
|
2021-09-13T22:55:28.000Z
|
tests/addon/test_async_addon_study.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 6
|
2020-03-25T17:47:45.000Z
|
2021-04-18T11:22:19.000Z
|
import inspect
from pathlib import Path
import pytest
from dynaconf import settings
@pytest.mark.asyncio
class TestAsyncAddonStudy:
"""Test addon study namespace."""
async def test_upload_dir(
self,
async_api,
async_account,
async_auto_remove,
):
"""Test study upload from path method."""
study_dir = Path(__file__) \
.parents[1] \
.joinpath('dicoms', 'read_only')
namespace_id = async_account.account.namespace_id
_, images_params = await async_api.Addon.Study.upload_dir(
study_dir=study_dir,
namespace_id=namespace_id,
)
image_params = images_params[0]
new_study = await async_api.Addon.Study.wait(
study_uid=image_params.study_uid,
namespace_id=namespace_id,
timeout=settings.API.upload_study_timeout,
ws_timeout=settings.API.upload_study_timeout,
)
assert new_study
async_auto_remove(new_study)
async def test_upload_paths(
self,
async_api,
async_account,
async_auto_remove,
):
"""Test study upload dicoms method."""
study_dir = Path(__file__) \
.parents[1] \
.joinpath('dicoms', 'read_only')
namespace_id = async_account.account.namespace_id
_, images_params = await async_api.Addon.Study.upload_paths(
dicom_paths=study_dir.glob('**/*.dcm'),
namespace_id=namespace_id,
)
image_params = images_params[0]
new_study = await async_api.Addon.Study.wait(
study_uid=image_params.study_uid,
namespace_id=namespace_id,
timeout=settings.API.upload_study_timeout,
ws_timeout=settings.API.upload_study_timeout,
)
assert new_study
async_auto_remove(new_study)
async def test_upload_dir_and_get(
self,
async_api,
async_account,
async_auto_remove,
):
"""Test study upload dir and get method."""
study_dir = Path(__file__) \
.parents[1] \
.joinpath('dicoms', 'read_only')
namespace_id = async_account.account.namespace_id
new_study = await async_api.Addon.Study.upload_dir_and_get(
study_dir=study_dir,
namespace_id=namespace_id,
timeout=settings.API.upload_study_timeout,
ws_timeout=settings.API.upload_study_timeout,
)
assert new_study
async_auto_remove(new_study)
async def test_upload_paths_and_get(
self,
async_api,
async_account,
async_auto_remove,
):
"""Test study upload paths and get method."""
study_dir = Path(__file__) \
.parents[1] \
.joinpath('dicoms', 'read_only')
namespace_id = async_account.account.namespace_id
new_study = await async_api.Addon.Study.upload_paths_and_get(
dicom_paths=study_dir.glob('**/*.dcm'),
namespace_id=namespace_id,
timeout=settings.API.upload_study_timeout,
ws_timeout=settings.API.upload_study_timeout,
)
assert new_study
async_auto_remove(new_study)
async def test_duplicate_and_get(
self,
async_api,
async_account,
async_readonly_study,
async_create_group,
):
"""Test duplicate study and get."""
group = await async_create_group()
duplicated_study = await async_api.Addon.Study.duplicate_and_get(
uuid=async_readonly_study.uuid,
namespace_id=group.namespace_id,
include_attachments=False,
timeout=settings.API['upload_study_timeout'],
ws_timeout=settings.API['ws_timeout'],
)
assert duplicated_study.uuid != async_readonly_study.uuid
assert duplicated_study.study_uid == async_readonly_study.study_uid
assert duplicated_study.phi_namespace == group.namespace_id
await async_api.Study.delete(uuid=duplicated_study.uuid).get()
async def test_duplicate_fixture(
self,
async_api,
async_readonly_study,
async_duplicate,
):
"""Test duplicate study fixture."""
duplicated_study = await async_duplicate(async_readonly_study.uuid)
assert duplicated_study.uuid != async_readonly_study.uuid
assert duplicated_study.study_uid == async_readonly_study.study_uid
def test_anonymize_and_wait_signature(self, async_api):
"""Test anonymize_and_wait signature."""
anonymize_and_wait_s = inspect.signature(
async_api.Addon.Study.anonymize_and_wait,
)
anonymize_s = inspect.signature(async_api.Storage.Study.anonymize)
assert set(anonymize_and_wait_s.parameters) - \
{'timeout', 'ws_timeout'} == set(anonymize_s.parameters)
async def test_anonymize_and_wait(
self,
async_api,
async_readonly_study,
async_auto_remove,
async_storage_auto_remove,
):
"""Test anonymize_and_wait."""
engine_fqdn = async_readonly_study.engine_fqdn
storage_namespace = async_readonly_study.storage_namespace
study_uid = async_readonly_study.study_uid
series_uid = '1.2.840.113619.2.278.3.2831165743.908.1345078604.948'
region = {
'series': {
series_uid: {
'regions': [
{
'x': 10,
'y': 10,
'width': 30,
'height': 40,
},
],
},
},
}
new_study_uid = await async_api.Addon.Study.anonymize_and_wait(
engine_fqdn=engine_fqdn,
namespace=storage_namespace,
to_namespace=storage_namespace,
phi_namespace=async_readonly_study.phi_namespace,
study_uid=study_uid,
region=region,
color='121197149',
)
assert new_study_uid != study_uid
async_storage_auto_remove(
engine_fqdn,
storage_namespace,
new_study_uid,
)
new_study = await async_api.Addon.Study.wait(
study_uid=new_study_uid,
namespace_id=storage_namespace,
timeout=settings.API['upload_study_timeout'],
ws_timeout=settings.API['ws_timeout'],
)
async_auto_remove(new_study)
def test_anonymize_and_get_signature(self, async_api):
"""Test anonymize_and_get signature."""
anonymize_and_get_s = inspect.signature(
async_api.Addon.Study.anonymize_and_get,
)
anonymize_s = inspect.signature(async_api.Storage.Study.anonymize)
assert set(anonymize_and_get_s.parameters) - \
{'timeout', 'ws_timeout'} == set(anonymize_s.parameters)
async def test_anonymize_and_get(
self,
async_api,
async_readonly_study,
async_auto_remove,
async_storage_auto_remove,
):
"""Test anonymize_and_get."""
engine_fqdn = async_readonly_study.engine_fqdn
storage_namespace = async_readonly_study.storage_namespace
study_uid = async_readonly_study.study_uid
series_uid = '1.2.840.113619.2.278.3.2831165743.908.1345078604.948'
region = {
'series': {
series_uid: {
'regions': [
{
'x': 10,
'y': 10,
'width': 30,
'height': 40,
},
],
},
},
}
new_study = await async_api.Addon.Study.anonymize_and_get(
engine_fqdn=engine_fqdn,
namespace=storage_namespace,
to_namespace=storage_namespace,
phi_namespace=async_readonly_study.phi_namespace,
study_uid=study_uid,
region=region,
color='121197149',
)
assert new_study.study_uid != study_uid
async_storage_auto_remove(
engine_fqdn,
storage_namespace,
new_study.study_uid,
)
async_auto_remove(new_study)
| 32.96875
| 75
| 0.59372
| 906
| 8,440
| 5.12362
| 0.102649
| 0.043085
| 0.069798
| 0.046532
| 0.871176
| 0.828738
| 0.822059
| 0.772296
| 0.757863
| 0.737613
| 0
| 0.021766
| 0.325
| 8,440
| 255
| 76
| 33.098039
| 0.793049
| 0.011374
| 0
| 0.688073
| 0
| 0.009174
| 0.042829
| 0.012948
| 0
| 0
| 0
| 0
| 0.059633
| 1
| 0.009174
| false
| 0
| 0.018349
| 0
| 0.03211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9da01d9e1fcce6dbb8327ebcfaf954d5466e6cdd
| 92
|
py
|
Python
|
core/__init__.py
|
DeveloperNeon/reaction-light
|
32a64d76e14be486fc8c766152b777212a7d4217
|
[
"MIT"
] | 1
|
2021-06-10T14:50:51.000Z
|
2021-06-10T14:50:51.000Z
|
core/__init__.py
|
DeveloperNeon/reaction-light
|
32a64d76e14be486fc8c766152b777212a7d4217
|
[
"MIT"
] | null | null | null |
core/__init__.py
|
DeveloperNeon/reaction-light
|
32a64d76e14be486fc8c766152b777212a7d4217
|
[
"MIT"
] | null | null | null |
from .activity import *
from .database import *
from .github import *
from .schema import *
| 18.4
| 23
| 0.73913
| 12
| 92
| 5.666667
| 0.5
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 92
| 4
| 24
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9dd00412e522dd3ff2a0bc82872c210cc53b2c47
| 179
|
py
|
Python
|
layers/poky/scripts/lib/checklayer/case.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 53
|
2018-02-28T08:51:32.000Z
|
2022-02-28T06:49:23.000Z
|
layers/poky/scripts/lib/checklayer/case.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 27
|
2018-01-25T00:26:53.000Z
|
2020-08-09T05:20:04.000Z
|
layers/poky/scripts/lib/checklayer/case.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 51
|
2018-02-21T04:46:08.000Z
|
2022-03-02T04:20:41.000Z
|
# Copyright (C) 2017 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
from oeqa.core.case import OETestCase
class OECheckLayerTestCase(OETestCase):
pass
| 22.375
| 50
| 0.782123
| 23
| 179
| 6.086957
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.150838
| 179
| 7
| 51
| 25.571429
| 0.894737
| 0.47486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
9deb4478641d86962eca4050f0a326415639d8e1
| 22
|
py
|
Python
|
ezmodel/__init__.py
|
UBC-MDS/ezmodel
|
66f7d38c41778c65bbdbec85533c496bb796be47
|
[
"MIT"
] | 3
|
2018-02-15T01:31:50.000Z
|
2018-03-03T18:56:45.000Z
|
ezmodel/__init__.py
|
UBC-MDS/ezmodel
|
66f7d38c41778c65bbdbec85533c496bb796be47
|
[
"MIT"
] | 15
|
2018-02-15T01:38:55.000Z
|
2018-03-24T19:31:51.000Z
|
ezmodel/__init__.py
|
UBC-MDS/ezmodel
|
66f7d38c41778c65bbdbec85533c496bb796be47
|
[
"MIT"
] | null | null | null |
from . import ezmodel
| 11
| 21
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3b0265ff51f7a7d04bef4bc072a9037cc691452c
| 42
|
py
|
Python
|
edgify/functional/__init__.py
|
scale-lab/BitTrain
|
3a15f96cc32222e3d6fceb00a622521e31745d4c
|
[
"BSD-3-Clause"
] | 3
|
2021-12-14T23:52:51.000Z
|
2021-12-26T19:38:55.000Z
|
edgify/functional/__init__.py
|
scale-lab/BitTrain
|
3a15f96cc32222e3d6fceb00a622521e31745d4c
|
[
"BSD-3-Clause"
] | null | null | null |
edgify/functional/__init__.py
|
scale-lab/BitTrain
|
3a15f96cc32222e3d6fceb00a622521e31745d4c
|
[
"BSD-3-Clause"
] | null | null | null |
from .linear import *
from .relu import *
| 21
| 22
| 0.714286
| 6
| 42
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 23
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1c04bb8baa8ece891420d8c97c2de56d0529cd6
| 665
|
py
|
Python
|
clab/torch/__init__.py
|
chengjianglong/clab
|
504a111a5ffbaa119dc64b30c8f7cb14288923a8
|
[
"Apache-2.0"
] | null | null | null |
clab/torch/__init__.py
|
chengjianglong/clab
|
504a111a5ffbaa119dc64b30c8f7cb14288923a8
|
[
"Apache-2.0"
] | null | null | null |
clab/torch/__init__.py
|
chengjianglong/clab
|
504a111a5ffbaa119dc64b30c8f7cb14288923a8
|
[
"Apache-2.0"
] | 1
|
2020-10-15T00:03:40.000Z
|
2020-10-15T00:03:40.000Z
|
# -*- coding: utf-8 -*-
"""
python -c "import ubelt._internal as a; a.autogen_init('clab.torch', attrs=False)"
"""
# flake8: noqa
from __future__ import absolute_import, division, print_function, unicode_literals
from clab.torch import criterions
from clab.torch import layers
from clab.torch import filters
from clab.torch import fit_harness
from clab.torch import hyperparams
from clab.torch import im_loaders
from clab.torch import lr_schedule
from clab.torch import metrics
from clab.torch import models
from clab.torch import netinfo
from clab.torch import nninit
from clab.torch import nnio
from clab.torch import transforms
from clab.torch import xpu_device
| 31.666667
| 82
| 0.807519
| 103
| 665
| 5.087379
| 0.427184
| 0.257634
| 0.347328
| 0.507634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003425
| 0.121805
| 665
| 20
| 83
| 33.25
| 0.893836
| 0.177444
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.066667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d1d8ee77c604cf464ea8b3fd653baf5751002534
| 156
|
py
|
Python
|
pyasyncserver/pipes/__init__.py
|
agratoth/pyasyncserver
|
6125b8de327b2eb22699e7961ad6bef50ab55a70
|
[
"MIT"
] | null | null | null |
pyasyncserver/pipes/__init__.py
|
agratoth/pyasyncserver
|
6125b8de327b2eb22699e7961ad6bef50ab55a70
|
[
"MIT"
] | null | null | null |
pyasyncserver/pipes/__init__.py
|
agratoth/pyasyncserver
|
6125b8de327b2eb22699e7961ad6bef50ab55a70
|
[
"MIT"
] | null | null | null |
from .pipe import Pipe
from .http_pipe import HTTPPipe
from .nats_pipe import NATSPipe
from .directions import PipeDirections
from .helpers import PipePool
| 26
| 38
| 0.839744
| 22
| 156
| 5.863636
| 0.5
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 156
| 6
| 39
| 26
| 0.948529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1de40e4c2e1cecf4c4718ef17003e430bd9feb2
| 816
|
py
|
Python
|
Ta-Irikulau/framework/pomdp/model/observation.py
|
mjtsai1974/DevBlog
|
f1429e28e7ea618a64f5e111be4d7f42ae616ce8
|
[
"MIT"
] | null | null | null |
Ta-Irikulau/framework/pomdp/model/observation.py
|
mjtsai1974/DevBlog
|
f1429e28e7ea618a64f5e111be4d7f42ae616ce8
|
[
"MIT"
] | null | null | null |
Ta-Irikulau/framework/pomdp/model/observation.py
|
mjtsai1974/DevBlog
|
f1429e28e7ea618a64f5e111be4d7f42ae616ce8
|
[
"MIT"
] | null | null | null |
class ObservationModel(object):
def __init__(self):
self._observation_matrix = dict()
def __str__(self):
str = ''
for k, v in self._observation_matrix.items():
str += '{}:{}\n'.format(k, v)
return str
@property
def observation_matrix(self):
return self._observation_matrix
"""
Configure the observation probability of o from state sj by action a with probability p
"""
def ConfigureObservationMatrix(self, o, sj, a, p):
self._observation_matrix['{},{},{}'.format(o.Name, sj.Name, a.Name)] = p
#self._observation_matrix[o, sj, a] = p
def Probability(self, o, sj, a):
return self._observation_matrix.get('{},{},{}'.format(o.Name, sj.Name, a.Name))
#return self._observation_matrix[o, sj, a]
| 29.142857
| 91
| 0.610294
| 103
| 816
| 4.61165
| 0.339806
| 0.286316
| 0.309474
| 0.170526
| 0.197895
| 0.197895
| 0.092632
| 0
| 0
| 0
| 0
| 0
| 0.246324
| 816
| 28
| 92
| 29.142857
| 0.772358
| 0.096814
| 0
| 0
| 0
| 0
| 0.03645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.133333
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d1e162b0dac745f18f723ba55bb2886814b8a042
| 54
|
py
|
Python
|
csv_combinator/__init__.py
|
Blanen/csv_combinator
|
19335f73f434d11b700ad875ae6e244a9656bed5
|
[
"MIT"
] | null | null | null |
csv_combinator/__init__.py
|
Blanen/csv_combinator
|
19335f73f434d11b700ad875ae6e244a9656bed5
|
[
"MIT"
] | null | null | null |
csv_combinator/__init__.py
|
Blanen/csv_combinator
|
19335f73f434d11b700ad875ae6e244a9656bed5
|
[
"MIT"
] | null | null | null |
from csv_combinator.csv_combinator import Combinator
| 27
| 53
| 0.888889
| 7
| 54
| 6.571429
| 0.571429
| 0.565217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 54
| 1
| 54
| 54
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1e8b9c42dd9fc705161aa6ccfc0c83fdda9fa65
| 34,103
|
py
|
Python
|
tests/test_base_elements.py
|
benjaminleroy/cowpatch
|
1ef2685de78f3ec10d42271148d20dadf9d2c00c
|
[
"MIT"
] | null | null | null |
tests/test_base_elements.py
|
benjaminleroy/cowpatch
|
1ef2685de78f3ec10d42271148d20dadf9d2c00c
|
[
"MIT"
] | 10
|
2022-02-21T04:05:40.000Z
|
2022-03-26T11:33:12.000Z
|
tests/test_base_elements.py
|
benjaminleroy/cowpatch
|
1ef2685de78f3ec10d42271148d20dadf9d2c00c
|
[
"MIT"
] | null | null | null |
from pytest_regressions import image_regression
from hypothesis import given, strategies as st, settings
import numpy as np
import cowpatch as cow
from cowpatch.utils import inherits, _flatten_nested_list, \
_transform_size_to_pt
import pytest
import io
import plotnine as p9
import plotnine.data as p9_data
import re
import matplotlib.pyplot as plt
import pdb
# inner functions -----
def test_patch__init__():
"""
test patch's __init__ function to collecting grobs
Note:
this test will likely have to converted to passing in
plotnine and other patch objects...
"""
mtcars = p9_data.mpg
p0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.facet_wrap("cyl") +\
p9.labs(title = 'Plot 0')
p1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 1')
p2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 2')
mypatch = cow.patch(grobs = [p0, p1, p2])
assert len(mypatch.grobs) == 3, \
"grobs can be passed through the grobs parameter directly"
mypatch_args = cow.patch(p0, p1, p2)
assert len(mypatch_args.grobs) == 3, \
"grobs can be passed through the grobs parameter indirectly"
with pytest.raises(Exception) as e_info:
mypatch_both = cow.patch(p0, p1, p2,
grobs = [p0, p1, p2])
# can't pass grobs through both the parameter and *args
# struture allows for nesting
mypatch2 = cow.patch(grobs = [p0, p1, mypatch])
assert len(mypatch2.grobs) == 3, \
"grobs can be passed through the grobs parameter directly"
mypatch_args2 = cow.patch(p0, p1, mypatch)
assert len(mypatch_args2.grobs) == 3, \
"grobs can be passed through the grobs parameter indirectly"
def test_patch__size_dive():
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"),bins=10) +\
p9.facet_wrap("class")
# basic option ----------
vis1 = cow.patch(g0,g1,g2) +\
cow.layout(design = np.array([[0,1],
[0,2]]),
rel_heights = [1,2])
sug_width, sug_height, max_depth = \
vis1._size_dive()
assert np.allclose(sug_width,
(2 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"] *
cow.rcParams["base_aspect_ratio"])), \
"suggested width incorrectly sizes the smallest width of the images (v1)"
assert np.allclose(sug_height,
(3 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"])), \
"suggested height incorrectly sizes the smallest height of the images (v1)"
assert max_depth == 1, \
"expected depth for basic cow.patch (of depth 1) is incorrect (v1)"
# of note: the internal uses "pt", but they're actually defined relatively...
image_rel_widths, image_rel_heights, depths = \
vis1._size_dive(parents_areas=[cow.area(width=1/6,
height=1/6,
x_left=0,
y_top=0,
_type="pt")])
assert np.allclose(image_rel_widths, [.5*1/6]*3), \
"expected widths if input into a smaller image incorrect "+\
"(v1.1, rel width to top 1/6)"
assert np.allclose(image_rel_heights, [1/6, 1/6*1/3, 1/6*2/3]), \
"expected heights if input into a smaller image incorrect "+\
"(v1.1, rel heights to top 1/6)"
assert np.allclose(depths, [1+1]*3), \
"expected depths in basic cow.patch (all of depth 1) input into a "+\
"1 level deep smaller image is incorrect (v1.1)"
image_rel_widths2, image_rel_heights2, depths2 = \
vis1._size_dive(parents_areas=[cow.area(width=1/3,
height=1/2,
x_left=0,
y_top=0,
_type="pt"),
cow.area(width=1/2,
height=1/3,
x_left=1/2,
y_top=0,
_type="pt")])
assert np.allclose(image_rel_widths2, [.5*1/6]*3), \
"expected widths if input into a smaller image incorrect "+\
"(v1.2, rel width to top 1/6)"
assert np.allclose(image_rel_heights2, [1/6, 1/6*1/3, 1/6*2/3]), \
"expected heights if input into a smaller image incorrect "+\
"(v1.2, rel heights to top 1/6)"
assert np.allclose(depths2, [1+2]*3), \
"expected depths in basic cow.patch (all of depth 1) input into a "+\
"2 levels deep smaller image is incorrect (v1.2)"
# nested option --------
vis_nested = cow.patch(g0,cow.patch(g1,g2)+\
cow.layout(ncol=1, rel_heights = [1,2])) +\
cow.layout(nrow=1)
sug_width_n, sug_height_n, max_depth_n = \
vis_nested._size_dive()
assert np.allclose(sug_width_n,
(2 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"] *
cow.rcParams["base_aspect_ratio"])), \
"suggested width incorrectly sizes the smallest width of the images "+\
"(v2 - nested)"
assert np.allclose(sug_height_n,
(3 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"])), \
"suggested height incorrectly sizes the smallest height of the images "+\
"(v2 - nested)"
assert max_depth_n == 2, \
"expected depth for nested cow.patch (of depth 1) is incorrect "+\
"(v2 - nested)"
# of note: the internal uses "pt", but they're actually defined relatively...
image_rel_widths_n, image_rel_heights_n, depths_n = \
vis_nested._size_dive(parents_areas=[cow.area(width=1/6,
height=1/6,
x_left=0,
y_top=0,
_type="pt")])
assert np.allclose(image_rel_widths_n, [.5*1/6]*3), \
"expected widths if input into a smaller image incorrect "+\
"(v2.1 - nested, rel width to top 1/6)"
assert np.allclose(image_rel_heights_n, [1/6, 1/6*1/3, 1/6*2/3]), \
"expected heights if input into a smaller image incorrect "+\
"(v2.1 - nested, rel heights to top 1/6)"
assert np.allclose(depths_n, list(np.array([1,2,2])+1)), \
"expected depths in nested cow.patch (all of depth 1) input into a "+\
"1 level deep smaller image is incorrect (v2.1 - nested)"
image_rel_widths2_n, image_rel_heights2_n, depths2_n = \
vis_nested._size_dive(parents_areas=[cow.area(width=1/3,
height=1/2,
x_left=0,
y_top=0,
_type="pt"),
cow.area(width=1/2,
height=1/3,
x_left=1/2,
y_top=0,
_type="pt")])
assert np.allclose(image_rel_widths2_n, [.5*1/6]*3), \
"expected widths if input into a smaller image incorrect "+\
"(v2.2 - nested, rel width to top 1/6)"
assert np.allclose(image_rel_heights2_n, [1/6, 1/6*1/3, 1/6*2/3]), \
"expected heights if input into a smaller image incorrect "+\
"(v2.2 - nested, rel heights to top 1/6)"
assert np.allclose(depths2_n, list(np.array([1,2,2])+2)), \
"expected depths in nested cow.patch (all of depth 1) input into a "+\
"1 levels deep smaller image is incorrect (v2.2 - nested)"
def test_patch__default_size__both_none():
"""
this test passes none for both parameters
"""
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"),bins=10) +\
p9.facet_wrap("class")
# basic option ----------
vis1 = cow.patch(g0,g1,g2) +\
cow.layout(design = np.array([[0,1],
[0,2]]),
rel_heights = [1,2])
out_w, out_h = vis1._default_size(height=None,width=None)
assert np.allclose(out_w,
(2 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"] *
cow.rcParams["base_aspect_ratio"])), \
"_default_size incorrectly connects with _size_dive output - width (v1)"
assert np.allclose(out_h,
(3 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"])), \
"_default_size incorrectly connects with _size_dive output - height (v1)"
# nested option --------
vis_nested = cow.patch(g0,cow.patch(g1,g2)+\
cow.layout(ncol=1, rel_heights = [1,2])) +\
cow.layout(nrow=1)
out_w_n, out_h_n = vis_nested._default_size(height=None,width=None)
assert np.allclose(out_w_n,
(2 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"] *
cow.rcParams["base_aspect_ratio"])), \
"_default_size incorrectly connects with _size_dive output - width (v2-nested)"
assert np.allclose(out_h_n,
(3 * # 1/ rel width of smallest width of images
cow.rcParams["base_height"])), \
"_default_size incorrectly connects with _size_dive output - height (v2-nested)"
@given(st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49))
def test_patch__default_size__both_not_none(height,width):
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"),bins=10) +\
p9.facet_wrap("class")
# basic option ----------
vis1 = cow.patch(g0,g1,g2) +\
cow.layout(design = np.array([[0,1],
[0,2]]),
rel_heights = [1,2])
out_w, out_h = vis1._default_size(height=height,width=width)
assert out_w == width and out_h == height, \
"if height and width are provided, they shouldn't be changed by "+\
"default size function (v1 - no nesting)"
# nested option --------
vis_nested = cow.patch(g0,cow.patch(g1,g2)+\
cow.layout(ncol=1, rel_heights = [1,2])) +\
cow.layout(nrow=1)
out_w_n, out_h_n = vis_nested._default_size(height=height,width=width)
assert out_w_n == width and out_h_n == height, \
"if height and width are provided, they shouldn't be changed by "+\
"default size function (v2 - nesting)"
@given(st.floats(min_value=.5, max_value=49))
def test_patch__default_size__one_none(height_or_width):
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"),bins=10) +\
p9.facet_wrap("class")
# basic option ----------
vis1 = cow.patch(g0,g1,g2) +\
cow.layout(design = np.array([[0,1],
[0,2]]),
rel_heights = [1,2])
default_w, default_h = vis1._default_size(None,None)
static_aspect_ratio = default_h / default_w
# provide width ----
out_w, out_h = vis1._default_size(height=None,width=height_or_width)
assert np.allclose(out_w, height_or_width) and \
np.allclose(out_h, height_or_width * static_aspect_ratio), \
"if *only width* is provided, suggested height is relative to aspect "+\
"ratio that would be suggested if neither provided (v1)"
# provide height ----
out_w, out_h = vis1._default_size(height=height_or_width,width=None)
assert np.allclose(out_h, height_or_width) and \
np.allclose(out_w, height_or_width / static_aspect_ratio), \
"if *only height* is provided, suggested width is relative to aspect "+\
"ratio that would be suggested if neither provided (v1)"
# nested option --------
vis_nested = cow.patch(g0,cow.patch(g1,g2)+\
cow.layout(ncol=1, rel_heights = [1,2])) +\
cow.layout(nrow=1)
default_w_n, default_h_n = vis_nested._default_size(None,None)
static_aspect_ratio_n = default_h_n / default_w_n
# provide width ----
out_w, out_h = vis_nested._default_size(height=None,width=height_or_width)
assert np.allclose(out_w, height_or_width) and \
np.allclose(out_h, height_or_width * static_aspect_ratio_n), \
"if *only width* is provided, suggested height is relative to aspect "+\
"ratio that would be suggested if neither provided (v1)"
# provide height ----
out_w, out_h = vis_nested._default_size(height=height_or_width,width=None)
assert np.allclose(out_h, height_or_width) and \
np.allclose(out_w, height_or_width / static_aspect_ratio_n), \
"if *only height* is provided, suggested width is relative to aspect "+\
"ratio that would be suggested if neither provided (v1)"
def test_patch__svg_get_sizes():
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"),bins=10) +\
p9.facet_wrap("class")
# basic option ----------
vis1 = cow.patch(g0,g1,g2) +\
cow.layout(design = np.array([[0,1],
[0,2]]),
rel_heights = [4,1])
# successful sizings ----
sizes, logics = vis1._svg_get_sizes(width_pt = 20 * 72,
height_pt = 20 * 72)
requested_sizes = [(10,20), (10,16), (10,4)]
assert np.all(logics), \
"expected all plotnine objects to be able to be sized correctly "+\
"in very large output (v1)"
assert type(sizes) is list and \
np.all([len(s) == 2 and type(s) is tuple for s in sizes]), \
"expected structure of sizes list is incorrect (v1)"
assert np.all([2/3 < (sizes[s_idx][0]/requested_sizes[s_idx][0]) < 1.5 and \
2/3 < (sizes[s_idx][1]/requested_sizes[s_idx][1]) < 1.5
for s_idx in [0,1,2]]), \
"suggested sizing in sizes isn't too extreme relative to true "+\
"requested sizes- this is just a sanity check, "+\
"not a robust test (v1)"
# failed sizings ------
sizes_f, logics_f = vis1._svg_get_sizes(width_pt = 10 * 72,
height_pt = 10 * 72)
requested_sizes_f = [(5,10), (5,8), (5,2)] # final one should fail...
assert not np.all(logics_f) and (logics_f == [True, True, False]), \
"expected not all plotnine objects to be able to be sized correctly "+\
"in small output (v1.1 - failed)"
assert type(sizes_f) is list and \
np.all([len(s) == 2 and type(s) is tuple for s in sizes_f]), \
"expected structure of sizes list is incorrect (v1.1 - failed)"
assert np.all([2/3 < (sizes_f[s_idx][0]/requested_sizes_f[s_idx][0]) < 1.5 and \
2/3 < (sizes_f[s_idx][1]/requested_sizes_f[s_idx][1]) < 1.5
for s_idx in [0,1]]), \
"suggested sizing in sizes (that didn't fail) isn't too extreme "+\
"relative to true "+\
"requested sizes- this is just a sanity check, "+\
"not a robust test (v1.1 - failed)"
assert sizes_f[2][0] < 1 and sizes_f[2][1] < 1, \
"expected failed sizing (due to being too small, to return a scaling" +\
"below 1 (note the correction to scaling should be 1/suggested scaling))," +\
"(v1.1 - failed)"
# nested option --------
vis_nested = cow.patch(g0,cow.patch(g1, g2)+\
cow.layout(ncol=1, rel_heights = [4,1])) +\
cow.layout(nrow=1)
# successful sizings ----
sizes_n, logics_n = vis_nested._svg_get_sizes(width_pt = 20 * 72,
height_pt = 20 * 72)
requested_sizes_n = [(10,20), (10,16), (10,4)]
assert np.all(_flatten_nested_list(logics_n)), \
"expected all plotnine objects to be able to be sized correctly "+\
"in very large output (v2 - nested)"
assert type(sizes_n) is list and len(sizes_n) == 2 and \
type(sizes_n[0]) is tuple and type(sizes_n[1]) is list and \
len(sizes_n[0]) == 2 and len(sizes_n[1]) == 2 and \
np.all([len(s) == 2 and type(s) is tuple for s in sizes_n[1]]), \
"expected structure of sizes list is incorrect (v2 - nested)"
sizes_n_flattened = _flatten_nested_list(sizes_n)
assert np.all([2/3 < (sizes_n_flattened[s_idx][0]/requested_sizes[s_idx][0]) < 1.5 and \
2/3 < (sizes_n_flattened[s_idx][1]/requested_sizes[s_idx][1]) < 1.5
for s_idx in [0,1,2]]), \
"suggested sizing in sizes isn't too extreme relative to true "+\
"requested sizes- this is just a sanity check, "+\
"not a robust test (v2 - nested)"
assert np.allclose(sizes_n_flattened, sizes), \
"expected nested and non-nested suggested sizes to be equal (v1 vs v2)"
# failed sizings ------
sizes_f_n, logics_f_n = vis_nested._svg_get_sizes(width_pt = 10 * 72,
height_pt = 10 * 72)
requested_sizes_f = [(5,10), (5,8), (5,2)] # final one should fail ...
logic_f_n_flat = _flatten_nested_list(logics_f_n)
sizes_f_n_flat = _flatten_nested_list(sizes_f_n)
assert not np.all(logic_f_n_flat) and \
(logic_f_n_flat == [True, True, False]), \
"expected not all plotnine objects to be able to be sized correctly "+\
"in smaller output (v2.1 - nested, failed)"
assert type(sizes_f_n) is list and len(sizes_f_n) == 2 and \
type(sizes_f_n[0]) is tuple and type(sizes_f_n[1]) is list and \
len(sizes_f_n[0]) == 2 and len(sizes_f_n[1]) == 2 and \
np.all([len(s) == 2 and type(s) is tuple for s in sizes_f_n[1]]), \
"expected structure of sizes list is incorrect (v2.1 - nested, failed)"
assert np.all([2/3 < (sizes_f_n_flat[s_idx][0]/requested_sizes_f[s_idx][0]) < 1.5 and \
2/3 < (sizes_f_n_flat[s_idx][1]/requested_sizes_f[s_idx][1]) < 1.5
for s_idx in [0,1]]), \
"suggested sizing in sizes (that didn't fail) isn't too extreme "+\
"relative to true "+\
"requested sizes- this is just a sanity check, "+\
"not a robust test (v2.1 - nested, failed)"
assert sizes_f_n_flat[2][0] < 1 and sizes_f_n_flat[2][1] < 1, \
"expected failed sizing (due to being too small, to return a scaling" +\
"below 1 (note the correction to scaling should be 1/suggested scaling))," +\
"(v2.1 - nested, failed)"
assert np.allclose(sizes_f_n_flat, sizes_f), \
"expected nested and non-nested suggested sizes to be equal (v1.1 vs v2.1 - failed)"
@given(st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49),
st.floats(min_value=.5, max_value=49))
def test_patch__process_sizes(w1,h1,w2,h2,w3,h3):
# default patch (not needed)
empty_patch = cow.patch()
# not nested -------
sizes = [(w1,h1),(w2,h2),(w3,h3)]
# all true ---
logics = [True, True, True]
out_s = empty_patch._process_sizes(sizes = sizes, logics = logics)
assert out_s == sizes, \
"expected sizes to return if all logics true"
# not all true ----
logics_f = [True, True, False]
out_s1 = empty_patch._process_sizes(sizes = sizes, logics = logics_f)
assert np.allclose(out_s1, 1/np.min(sizes[2])), \
"expected max_scaling should be the max of 1/width_scale and "+\
"1/height_scale assoicated with failed plot(s) (v1.1 - 1 plot failed)"
logics_f2 = [True, False, False]
out_s2 = empty_patch._process_sizes(sizes = sizes, logics = logics_f2)
assert np.allclose(out_s2, 1/np.min([w2,h2,w3,h3])), \
"expected max_scaling should be the max of 1/width_scale and "+\
"1/height_scale assoicated with failed plot(s) (v1.2 - 2 plot failed)"
# nested ---------
sizes_n = [(w1,h1),[(w2,h2),(w3,h3)]]
# all true ---
logics_n = [True, [True, True]]
out_s_n = empty_patch._process_sizes(sizes = sizes_n, logics = logics_n)
assert out_s_n == sizes_n, \
"expected unflatted sizes to return if all logics true (v2 - nested)"
# not all true ----
logics_n_f = [True, [True, False]]
out_s1 = empty_patch._process_sizes(sizes = sizes_n, logics = logics_n_f)
assert np.allclose(out_s1, 1/np.min(sizes_n[1][1])), \
"expected max_scaling should be the max of 1/width_scale and "+\
"1/height_scale assoicated with failed plot(s) (v2.1 - 1 plot failed)"
logics_f2 = [True, [False, False]]
out_s2 = empty_patch._process_sizes(sizes = sizes, logics = logics_f2)
assert np.allclose(out_s2, 1/np.min([w2,h2,w3,h3])), \
"expected max_scaling should be the max of 1/width_scale and "+\
"1/height_scale assoicated with failed plot(s) (v2.2 - 2 plot failed)"
# global savings and showing and creating ------
def _layouts_and_patches_patch_plus_layout(idx):
# creation of some some ggplot objects
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
"suv",
"pickup"])]) +\
p9.geom_histogram(p9.aes(x="hwy"), bins=10) +\
p9.facet_wrap("class")
if idx == 0:
patch_obj = cow.patch(g0,g1,g2)
layout_obj = cow.layout(design = np.array([[0,0,0,1,1,1],
[0,0,0,2,2,2],
[0,0,0,2,2,2]]))
elif idx == 1:
patch_obj = cow.patch(g0,g1,g2)
layout_obj = cow.layout(design = """
AB
AC
AC
""")
elif idx == 2:
patch_obj = cow.patch(g0,g1,g2,g3)
layout_obj = cow.layout(ncol=3)
elif idx == 3:
patch_obj = cow.patch(g0,g1,g2,g3)
layout_obj = cow.layout(nrow=2)
elif idx == 4:
patch_obj = cow.patch(g0,g1,g2,g3)
layout_obj = cow.layout(nrow=2,ncol=3)
elif idx == 5:
patch_obj = cow.patch(g0,g1,g2)
layout_obj = cow.layout(nrow=1, rel_widths = [1,1,2])
elif idx == 6:
patch_obj = cow.patch(g0,g1,g2)
layout_obj = cow.layout(nrow=2, rel_widths = [1,2],
rel_heights = [1,2])
return patch_obj, layout_obj
@pytest.mark.parametrize("idx", np.arange(7,dtype=int))
def test_patch_plus_layout_second(image_regression, idx):
"""
test patch + layout (varying)
"""
patch_obj, layout_obj = _layouts_and_patches_patch_plus_layout(idx)
vis_patch = patch_obj + layout_obj
with io.BytesIO() as fid2:
vis_patch.save(filename=fid2, width=12, height=10,
dpi=96, _format="png", verbose=False)
image_regression.check(fid2.getvalue(), diff_threshold=.1)
def test_patch_plus_layout(image_regression):
"""
test patch + layout
"""
# creation of some some ggplot objects
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1_no_legend = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1 no color')
g1_legend = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 1 color')
vis_patch = cow.patch(g0,g1_no_legend,g1_legend)
vis_patch += cow.layout(design = np.array([[0,0,0,1,1,1],
[0,0,0,2,2,2],
[0,0,0,2,2,2]]))
assert inherits(vis_patch, cow.patch), \
"check patch addition correctly returns patch object"
assert vis_patch._patch__layout == \
cow.layout(design = np.array([[0,0,0,1,1,1],
[0,0,0,2,2,2],
[0,0,0,2,2,2]])) and \
vis_patch.layout == \
cow.layout(design = np.array([[0,0,0,1,1,1],
[0,0,0,2,2,2],
[0,0,0,2,2,2]])), \
"layout incorrectly saved internally"
with io.BytesIO() as fid2:
vis_patch.save(filename=fid2, width=12, height=10,
dpi=96, _format="png", verbose=False)
image_regression.check(fid2.getvalue(), diff_threshold=.1)
def test_patch_nesting(image_regression):
"""
check that nesting does work as expected
"""
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
vis_left = cow.patch(g1,g2) + cow.layout(ncol = 1, rel_heights = [1,2])
vis_patch = cow.patch(g0, vis_left) + cow.layout(nrow = 1)
with io.BytesIO() as fid2:
vis_patch.save(filename=fid2, width=12, height=7,
dpi=96, _format="png", verbose = False)
image_regression.check(fid2.getvalue(), diff_threshold=.1)
def test_patch__svg():
"""
static due to time it takes to run this test :(
"""
height_in, width_in = 10, 10
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
vis_left = cow.patch(g1,g2) + cow.layout(ncol = 1, rel_heights = [4,1])
vis_patch = cow.patch(g0, vis_left) + cow.layout(nrow = 1)
svg_out, size = vis_patch._svg(width_pt = width_in*76,
height_pt = height_in*76,
num_attempts = 2)
assert np.allclose(_transform_size_to_pt(svg_out.get_size()),
size), \
"returned svg's size should match what was returned by _svg"
assert np.allclose(size[0]/size[1], width_in/height_in) and \
size[0] > width_in*76 and size[1] > height_in*76, \
"if _svg has attempts to correct the size of the image, then " +\
"it will change the size relative to requested, but keep aspect ratio"
with pytest.raises(Exception) as e_info:
svg_out, size = vis_patch._svg(width_pt = width_in*76,
height_pt = height_in*76,
num_attempts = 1) # no attempts to adjust
assert e_info.typename == "StopIteration" and \
e_info.value.args[0] == "Attempts to find the correct sizing of "+\
"innerplots failed with provided parameters", \
"expected failure to create correct size image to be a certain "+\
"class of error"
# printing ----------
def test_patch__repr__(monkeypatch,capsys):
monkeypatch.setattr(plt, "show", lambda:None)
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
vis_left = cow.patch(g1,g2) + cow.layout(ncol = 1, rel_heights = [1,1])
vis_patch = cow.patch(g0, vis_left) + cow.layout(nrow = 1)
print(vis_patch)
captured = capsys.readouterr()
re_cap = re.search("<patch \(-{0,1}[0-9]+\)>\\n", captured.out)
assert re_cap is not None and \
re_cap.start() == 0 and re_cap.end() == len(captured.out),\
"expected __str__ expression for patch to be of <patch (num)> format"
def test_patch__str__(capsys):
g0 = p9.ggplot(p9_data.mpg) +\
p9.geom_bar(p9.aes(x="hwy")) +\
p9.labs(title = 'Plot 0')
g1 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
p9.labs(title = 'Plot 1')
g2 = p9.ggplot(p9_data.mpg) +\
p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
p9.labs(title = 'Plot 2')
vis_left = cow.patch(g1,g2) + cow.layout(ncol = 1, rel_heights = [1,1])
vis_patch = cow.patch(g0, vis_left) + cow.layout(nrow = 1)
print(repr(vis_patch))
captured = capsys.readouterr()
re_cap = re.search("^<patch \(-{0,1}[0-9]+\)>\\nnum_grobs: 2"+\
"\\n---\\nlayout:\\n<layout \(-{0,1}[0-9]+\)>\\n", captured.out)
assert re_cap is not None and \
re_cap.start() == 0,\
"expected __repr__ expression for patch more descriptive w.r.t."+\
" # grobs and layout"
# grammar -----------
def test_patch__and__(image_regression):
# # creation of some some ggplot objects
# g0 = p9.ggplot(p9_data.mpg) +\
# p9.geom_bar(p9.aes(x="hwy")) +\
# p9.labs(title = 'Plot 0')
# g1 = p9.ggplot(p9_data.mpg) +\
# p9.geom_point(p9.aes(x="hwy", y = "displ")) +\
# p9.labs(title = 'Plot 1')
# g2 = p9.ggplot(p9_data.mpg) +\
# p9.geom_point(p9.aes(x="hwy", y = "displ", color="class")) +\
# p9.labs(title = 'Plot 2')
# g3 = p9.ggplot(p9_data.mpg[p9_data.mpg["class"].isin(["compact",
# "suv",
# "pickup"])]) +\
# p9.geom_histogram(p9.aes(x="hwy")) +\
# p9.facet_wrap("class")
# g0p = cow.patch(g0)
# g1p = cow.patch(g1)
# g2p = cow.patch(g2)
# g3p = cow.patch(g3)
# g01 = g0p + g1p
# g02 = g0p + g2p
# g012 = g0p + g1p + g2p
# g012_2 = g01 + g2p
pass
| 39.243959
| 92
| 0.544351
| 4,798
| 34,103
| 3.697165
| 0.077115
| 0.018603
| 0.027397
| 0.036304
| 0.81831
| 0.791758
| 0.769378
| 0.736682
| 0.724393
| 0.713625
| 0
| 0.050884
| 0.315397
| 34,103
| 868
| 93
| 39.289171
| 0.708913
| 0.072369
| 0
| 0.558923
| 0
| 0.001684
| 0.219782
| 0.00178
| 0
| 0
| 0
| 0
| 0.102694
| 1
| 0.025253
| false
| 0.008418
| 0.020202
| 0
| 0.047138
| 0.003367
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
060096acb0f8ddb5884b9a68c8a2ae60c07396fc
| 6,326
|
py
|
Python
|
tests/invertible_resnet.py
|
paulhfu/FrEIA
|
e5694e596503a930395cde843f6523da09b6b6e7
|
[
"MIT"
] | 1
|
2021-05-14T03:34:36.000Z
|
2021-05-14T03:34:36.000Z
|
tests/invertible_resnet.py
|
paulhfu/FrEIA
|
e5694e596503a930395cde843f6523da09b6b6e7
|
[
"MIT"
] | null | null | null |
tests/invertible_resnet.py
|
paulhfu/FrEIA
|
e5694e596503a930395cde843f6523da09b6b6e7
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import torch
import torch.nn as nn
import torch.optim
import sys
sys.path.append('../')
from FrEIA.modules import *
from FrEIA.framework import *
class ActNormTest(unittest.TestCase):
def __init__(self, *args):
super().__init__(*args)
self.batch_size = 256
self.inp_size_linear = (20,)
self.inp_size_conv = (3, 10, 10)
torch.manual_seed(0)
nodes = [InputNode(*self.inp_size_linear, name='input')]
nodes.append(Node(nodes[-1], ActNorm, {},
name=f'actnorm'))
nodes.append(OutputNode(nodes[-1], name='output'))
self.net_linear = ReversibleGraphNet(nodes, verbose=False)
nodes = [InputNode(*self.inp_size_conv, name='input')]
nodes.append(Node(nodes[-1], ActNorm, {},
name=f'actnorm'))
nodes.append(OutputNode(nodes[-1], name='output'))
self.net_conv = ReversibleGraphNet(nodes, verbose=False)
def test_init(self):
x = torch.randn(self.batch_size, *self.inp_size_linear)
x = x * torch.rand_like(x) + torch.randn_like(x)
y = self.net_linear(x)
# Channel-wise mean should be zero
self.assertTrue(torch.allclose(y.transpose(0,1).contiguous().view(self.inp_size_linear[0], -1).mean(dim=-1),
torch.zeros(self.inp_size_linear[0]), atol=1e-06))
# Channel-wise std should be one
self.assertTrue(torch.allclose(y.transpose(0,1).contiguous().view(self.inp_size_linear[0], -1).std(dim=-1),
torch.ones(self.inp_size_linear[0]), atol=1e-06))
x = torch.randn(self.batch_size, *self.inp_size_conv)
x = x * torch.rand_like(x) + torch.randn_like(x)
y = self.net_conv(x)
# Channel-wise mean should be zero
self.assertTrue(torch.allclose(y.transpose(0,1).contiguous().view(self.inp_size_conv[0], -1).mean(dim=-1),
torch.zeros(self.inp_size_conv[0]), atol=1e-06))
# Channel-wise std should be one
self.assertTrue(torch.allclose(y.transpose(0,1).contiguous().view(self.inp_size_conv[0], -1).std(dim=-1),
torch.ones(self.inp_size_conv[0]), atol=1e-06))
class IResNetTest(unittest.TestCase):
def __init__(self, *args):
super().__init__(*args)
self.batch_size = 7
self.inp_size_linear = (20,)
self.inp_size_conv = (3, 10, 10)
self.tol = 1e-6
torch.manual_seed(0)
nodes = [InputNode(*self.inp_size_linear, name='input')]
cond = ConditionNode(*self.inp_size_linear, name='cond')
for i in range(5):
nodes.append(Node(nodes[-1], ActNorm, {},
name=f'actnorm_{i}'))
nodes.append(Node(nodes[-1], IResNetLayer,
{'hutchinson_samples': 20,
'internal_size': 100,
'n_internal_layers': 3},
conditions=[cond],
name=f'i_resnet_{i}'))
nodes.append(OutputNode(nodes[-1], name='output'))
self.i_resnet_linear = ReversibleGraphNet(nodes + [cond,], verbose=False)
for node in self.i_resnet_linear.node_list:
if isinstance(node.module, IResNetLayer):
node.module.lipschitz_correction()
nodes = [InputNode(*self.inp_size_conv, name='input')]
for i in range(5):
nodes.append(Node(nodes[-1], ActNorm, {},
name=f'actnorm_{i}'))
nodes.append(Node(nodes[-1], IResNetLayer, {'hutchinson_samples': 20},
name=f'i_resnet_{i}'))
nodes.append(OutputNode(nodes[-1], name='output'))
self.i_resnet_conv = ReversibleGraphNet(nodes, verbose=False)
for node in self.i_resnet_conv.node_list:
if isinstance(node.module, IResNetLayer):
node.module.lipschitz_correction()
def test_inverse(self):
x = torch.randn(self.batch_size, *self.inp_size_linear)
x = x * torch.randn_like(x)
x = x + torch.randn_like(x)
c = torch.randn(self.batch_size, *self.inp_size_linear)
y = self.i_resnet_linear(x, c)
x_hat = self.i_resnet_linear(y, c, rev=True)
# Check that inverse is close to input
self.assertTrue(torch.allclose(x, x_hat, atol=self.tol))
x = torch.randn(self.batch_size, *self.inp_size_conv)
x = x * torch.randn_like(x)
x = x + torch.randn_like(x)
y = self.i_resnet_conv(x)
x_hat = self.i_resnet_conv(y, rev=True)
# Check that inverse is close to input
self.assertTrue(torch.allclose(x, x_hat, atol=self.tol))
def test_jacobian(self):
x = torch.randn(self.batch_size, *self.inp_size_linear)
x = x * torch.randn(self.batch_size, *[1 for i in range(len(self.inp_size_linear))])
x = x + torch.randn(self.batch_size, *[1 for i in range(len(self.inp_size_linear))])
c = torch.randn(self.batch_size, *self.inp_size_linear)
# Estimate log det of Jacobian via power series
logdet = self.i_resnet_linear.log_jacobian(x, c=c)
# Approximate log det of Jacobian numerically
logdet_num = self.i_resnet_linear.log_jacobian_numerical(x, c=c)
# Check that they are the same (with huge tolerance)
# print(f'\n{logdet}\n{logdet_num}')
self.assertTrue(torch.allclose(logdet, logdet_num, atol=1.5, rtol=0.15))
x = torch.randn(self.batch_size, *self.inp_size_conv)
x = x * torch.randn(self.batch_size, *[1 for i in range(len(self.inp_size_conv))])
x = x + torch.randn(self.batch_size, *[1 for i in range(len(self.inp_size_conv))])
# Estimate log det of Jacobian via power series
logdet = self.i_resnet_conv.log_jacobian(x=x)
# Approximate log det of Jacobian numerically
logdet_num = self.i_resnet_conv.log_jacobian_numerical(x)
# Check that they are the same (with huge tolerance)
# print(f'\n{logdet}\n{logdet_num}')
self.assertTrue(torch.allclose(logdet, logdet_num, atol=1.5, rtol=0.1))
if __name__ == '__main__':
unittest.main()
| 41.077922
| 116
| 0.601328
| 867
| 6,326
| 4.193772
| 0.155709
| 0.055831
| 0.087734
| 0.074807
| 0.866887
| 0.834708
| 0.817107
| 0.815732
| 0.769252
| 0.769252
| 0
| 0.01965
| 0.267942
| 6,326
| 153
| 117
| 41.346405
| 0.765493
| 0.087417
| 0
| 0.471698
| 0
| 0
| 0.032129
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 1
| 0.04717
| false
| 0
| 0.075472
| 0
| 0.141509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ae46d74f3b7ebd3f6111d974721adce0cc3a286a
| 5,694
|
py
|
Python
|
couch/tests/test_fields.py
|
madron/django-couch
|
21e4c3a0022bdb7cfaff017f72025afbf5220b3b
|
[
"MIT"
] | null | null | null |
couch/tests/test_fields.py
|
madron/django-couch
|
21e4c3a0022bdb7cfaff017f72025afbf5220b3b
|
[
"MIT"
] | null | null | null |
couch/tests/test_fields.py
|
madron/django-couch
|
21e4c3a0022bdb7cfaff017f72025afbf5220b3b
|
[
"MIT"
] | null | null | null |
import pytz
from datetime import datetime
from django.test import override_settings
from django.test import SimpleTestCase
from django.utils import timezone
from .. import exceptions
from .. import fields
class TextFieldTest(SimpleTestCase):
def test_to_json(self):
value = fields.TextField().to_json('Text')
self.assertEqual(value, 'Text')
def test_to_python(self):
value = fields.TextField().to_python('Text')
self.assertEqual(value, 'Text')
class DateTimeFieldTest(SimpleTestCase):
def test_to_json_naive(self):
with self.assertRaises(exceptions.CouchError) as context:
fields.DateTimeField().to_json(datetime(2013, 5, 1, 12, 0))
self.assertEqual(context.exception.args, ('Naive datetimes are not supported.',))
def test_to_json_aware(self):
# Athens
data = pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 12, 0))
value = fields.DateTimeField().to_json(data)
self.assertEqual(value, '2013-05-01T09:00:00+00:00')
# Rome
data = pytz.timezone('Europe/Rome').localize(datetime(2013, 5, 1, 12, 0))
value = fields.DateTimeField().to_json(data)
self.assertEqual(value, '2013-05-01T10:00:00+00:00')
def test_to_json_aware_dst(self):
# Daylight saving time
data = pytz.timezone('Europe/Rome').localize(datetime(2013, 2, 1, 12, 0))
value = fields.DateTimeField().to_json(data)
self.assertEqual(value, '2013-02-01T11:00:00+00:00')
# No daylight saving time
data = pytz.timezone('Europe/Rome').localize(datetime(2013, 8, 1, 12, 0))
value = fields.DateTimeField().to_json(data)
self.assertEqual(value, '2013-08-01T10:00:00+00:00')
def test_to_python_naive(self):
# Naive datetimes should not be present in couch but, if present, we assume they
# are stored in utc
with timezone.override(pytz.utc):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00')
self.assertEqual(value, pytz.utc.localize(datetime(2013, 5, 1, 12, 0)))
pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 12, 0))
with timezone.override(pytz.timezone('Europe/Athens')):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 15, 0)))
def test_to_python_aware(self):
with timezone.override(pytz.utc):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+00:00')
self.assertEqual(value, pytz.utc.localize(datetime(2013, 5, 1, 12, 0)))
with timezone.override(pytz.timezone('Europe/Athens')):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+00:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 15, 0)))
def test_to_python_no_utc(self):
# Datetime should be stored in utc to permit ordering and filtering in couch.
# Nethertheless different timezones are correctly parsed
with timezone.override(pytz.utc):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+06:00')
self.assertEqual(value, pytz.utc.localize(datetime(2013, 5, 1, 6, 0)))
with timezone.override(pytz.timezone('Europe/Athens')):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+06:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 9, 0)))
@override_settings(TIME_ZONE='Europe/Athens')
def test_to_python_naive_override(self):
# Naive datetimes should not be present in couch but, if present, we assume they
# are stored in utc
value = fields.DateTimeField().to_python('2013-05-01T12:00:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 15, 0)))
@override_settings(TIME_ZONE='Europe/Athens')
def test_to_python_aware_override(self):
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+00:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 15, 0)))
@override_settings(TIME_ZONE='Europe/Athens')
def test_to_python_no_utc_override(self):
# Datetime should be stored in utc to permit ordering and filtering in couch.
# Nethertheless different timezones are correctly parsed
value = fields.DateTimeField().to_python('2013-05-01T12:00:00+06:00')
self.assertEqual(value, pytz.timezone('Europe/Athens').localize(datetime(2013, 5, 1, 9, 0)))
class JsonFieldTest(SimpleTestCase):
def test_to_json_dict(self):
data = dict(
name='Alex Martelli',
age=61,
books=[
dict(title='Python Cookbok'),
dict(title='Python in a Nutshell'),
],
)
value = fields.JsonField().to_json(data)
self.assertEqual(value, data)
def test_to_json_list(self):
data = ['red', dict(name='Alex Martelli', age=61), ['cat', 'dog', 28]]
value = fields.JsonField().to_json(data)
self.assertEqual(value, data)
def test_to_python_dict(self):
data = '{"name": "Alex Martelli", "age": 61, "books": ' \
'[{"title": "Python Cookbok"}, {"title": "Python in a Nutshell"}]}'
value = fields.JsonField().to_python(data)
self.assertEqual(value, data)
def test_to_python_list(self):
data = '["red", {"name": "Alex Martelli", "age": 61}, ["cat", "dog", 28]]'
value = fields.JsonField().to_python(data)
self.assertEqual(value, data)
| 46.292683
| 101
| 0.654549
| 754
| 5,694
| 4.842175
| 0.143236
| 0.029581
| 0.104081
| 0.049849
| 0.837031
| 0.770748
| 0.746919
| 0.746919
| 0.727746
| 0.709942
| 0
| 0.08083
| 0.204777
| 5,694
| 122
| 102
| 46.672131
| 0.725486
| 0.089919
| 0
| 0.434783
| 0
| 0.01087
| 0.158058
| 0.048365
| 0
| 0
| 0
| 0
| 0.228261
| 1
| 0.163043
| false
| 0
| 0.076087
| 0
| 0.271739
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
883bb0fe8219df6a1f658ce0cd57579e65be22ef
| 126
|
py
|
Python
|
api/views.py
|
Bahman-Ahmadi/mail_api
|
75d946fc36e23f2772fa675ec125f69ab528e26c
|
[
"MIT"
] | 2
|
2021-12-06T16:22:21.000Z
|
2021-12-07T07:37:17.000Z
|
api/views.py
|
Bahman-Ahmadi/mail_api
|
75d946fc36e23f2772fa675ec125f69ab528e26c
|
[
"MIT"
] | null | null | null |
api/views.py
|
Bahman-Ahmadi/mail_api
|
75d946fc36e23f2772fa675ec125f69ab528e26c
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, f'api/index.html')
| 21
| 42
| 0.769841
| 19
| 126
| 5.105263
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 126
| 5
| 43
| 25.2
| 0.881818
| 0.18254
| 0
| 0
| 0
| 0
| 0.138614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
88770fcbb95bdaad15d6a320f9fdae236af207b7
| 1,599
|
py
|
Python
|
S4/S4 Library/simulation/postures/posture_tunables.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/postures/posture_tunables.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/postures/posture_tunables.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from postures.posture_cost import TunablePostureCostVariant
from postures.posture_validators import TunablePostureValidatorVariant
from sims4.tuning.tunable import OptionalTunable, TunableTuple, TunableList
class TunableSupportedPostureTransitionData(OptionalTunable):
def __init__(self, *args, **kwargs):
super().__init__(*args, tunable=TunableTuple(cost=TunablePostureCostVariant(), validators=TunableList(description='\n Define under what circumstances this transition is valid.\n There are performance implications of adding tested edges to\n the posture graph. \n \n In general, this should be handled by testing posture-\n providing interactions altogether. This should really only\n be used to restrict the ability to go from a specific\n posture to another specific posture under certain\n circumstances.\n \n e.g. Prevent Squeamish Sims from sitting on dirty toilets.\n * Do not use this tuning. Instead, test out the interaction\n directly.\n \n e.g. Prevent Toddlers with low motor skill from entering the\n High Chair posture from stand. However, allow them to be\n placed on the High Chair from carry.\n * Use this tuning.\n ', tunable=TunablePostureValidatorVariant())), enabled_by_default=True, **kwargs)
| 177.666667
| 1,286
| 0.623515
| 171
| 1,599
| 5.760234
| 0.567251
| 0.006091
| 0.038579
| 0.008122
| 0.022335
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000924
| 0.323327
| 1,599
| 8
| 1,287
| 199.875
| 0.909427
| 0
| 0
| 0
| 0
| 0.166667
| 0.676673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
31fff98ed02dcd37f938f1038b71b5268fc1d5ed
| 125
|
py
|
Python
|
cvpl-cms/cms/exceptions.py
|
robinsax/canvas-plugin-multirepo
|
20fd6a3cc42af5f2cde73e3b100d3edeb4e50c01
|
[
"Apache-2.0"
] | null | null | null |
cvpl-cms/cms/exceptions.py
|
robinsax/canvas-plugin-multirepo
|
20fd6a3cc42af5f2cde73e3b100d3edeb4e50c01
|
[
"Apache-2.0"
] | null | null | null |
cvpl-cms/cms/exceptions.py
|
robinsax/canvas-plugin-multirepo
|
20fd6a3cc42af5f2cde73e3b100d3edeb4e50c01
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
'''
Package exceptions.
'''
class NoSuchContent(Exception): pass
class ContentSyntaxError(Exception): pass
| 13.888889
| 41
| 0.752
| 13
| 125
| 7.230769
| 0.769231
| 0.276596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009091
| 0.12
| 125
| 8
| 42
| 15.625
| 0.845455
| 0.272
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
ee02a38de985251928ab46696007ef3a937cc03c
| 143
|
py
|
Python
|
ctypes_generation/extended_structs/_SYMBOL_INFO.py
|
lucasg/PythonForWindows
|
578b4d9193f0daf31b4f522304f1d8fd3346d281
|
[
"BSD-3-Clause"
] | null | null | null |
ctypes_generation/extended_structs/_SYMBOL_INFO.py
|
lucasg/PythonForWindows
|
578b4d9193f0daf31b4f522304f1d8fd3346d281
|
[
"BSD-3-Clause"
] | null | null | null |
ctypes_generation/extended_structs/_SYMBOL_INFO.py
|
lucasg/PythonForWindows
|
578b4d9193f0daf31b4f522304f1d8fd3346d281
|
[
"BSD-3-Clause"
] | null | null | null |
old_SYMBOL_INFO = _SYMBOL_INFO
class _SYMBOL_INFO(old_SYMBOL_INFO):
@property
def tag(self):
return SymTagEnum.mapper[self.Tag]
| 28.6
| 42
| 0.741259
| 20
| 143
| 4.9
| 0.55
| 0.408163
| 0.265306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174825
| 143
| 5
| 42
| 28.6
| 0.830508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ee0ef75541d443d6c940e88c2a8751ee9ab90933
| 24
|
py
|
Python
|
terminal/__init__.py
|
gvb84/term-emu
|
ef38abb0d45f0368666112642114a24ad2bb15ad
|
[
"BSD-3-Clause"
] | 5
|
2019-04-17T19:17:59.000Z
|
2021-02-17T08:36:28.000Z
|
terminal/__init__.py
|
gvb84/term-emu
|
ef38abb0d45f0368666112642114a24ad2bb15ad
|
[
"BSD-3-Clause"
] | null | null | null |
terminal/__init__.py
|
gvb84/term-emu
|
ef38abb0d45f0368666112642114a24ad2bb15ad
|
[
"BSD-3-Clause"
] | 6
|
2018-12-16T23:38:52.000Z
|
2021-07-02T10:56:17.000Z
|
from . import emulator
| 8
| 22
| 0.75
| 3
| 24
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 24
| 2
| 23
| 12
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ee29deb0a8fb92a7535179aa9654194554095252
| 42
|
py
|
Python
|
test.py
|
HengyueLi/PyDictFileEncy
|
67ed23c99eda2577e36e43eafd2c6a1823f615c2
|
[
"MIT"
] | null | null | null |
test.py
|
HengyueLi/PyDictFileEncy
|
67ed23c99eda2577e36e43eafd2c6a1823f615c2
|
[
"MIT"
] | null | null | null |
test.py
|
HengyueLi/PyDictFileEncy
|
67ed23c99eda2577e36e43eafd2c6a1823f615c2
|
[
"MIT"
] | null | null | null |
from pydictfileency import PyDictFileEncy
| 21
| 41
| 0.904762
| 4
| 42
| 9.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ee42eb6a3d4074b264d2d52063a484bfca8b608e
| 299
|
py
|
Python
|
backend/tests/test_polarity.py
|
DKeen0123/SentiMind
|
0ffb702e88879b3e2e02d3d94a703b1f8a785bd3
|
[
"MIT"
] | 5
|
2018-04-09T16:47:53.000Z
|
2018-07-05T11:03:25.000Z
|
backend/tests/test_polarity.py
|
DKeen0123/SentiMind
|
0ffb702e88879b3e2e02d3d94a703b1f8a785bd3
|
[
"MIT"
] | 2
|
2018-04-09T17:40:40.000Z
|
2020-07-07T21:12:07.000Z
|
backend/tests/test_polarity.py
|
marcusfgardiner/SentiMind
|
d14b366ab36190df0bf3c867a149b7260ed1e2e4
|
[
"MIT"
] | 3
|
2018-04-12T22:14:55.000Z
|
2018-04-17T10:36:58.000Z
|
from .context import polarity
def test_pos_polarity_result():
assert polarity.polarity_result(0.8) == 'positive'
def test_neg_polarity_result():
assert polarity.polarity_result(-0.8) == 'negative'
def test_neutral_polarity_result():
assert polarity.polarity_result(0) == 'neutral'
| 21.357143
| 55
| 0.752508
| 39
| 299
| 5.461538
| 0.384615
| 0.394366
| 0.28169
| 0.394366
| 0.615023
| 0.615023
| 0.615023
| 0.413146
| 0
| 0
| 0
| 0.019305
| 0.133779
| 299
| 13
| 56
| 23
| 0.803089
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.428571
| true
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c99ea9698482e399619c296762fdbe9797abfcb4
| 6,510
|
py
|
Python
|
coba/tests/test_environments_formats.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/tests/test_environments_formats.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/tests/test_environments_formats.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import unittest
from coba.environments.formats import EnvironmentFileFmtV1
from coba.environments.core import SimulatedEnvironment
class EnvironmentFileFmtV1_Tests(unittest.TestCase):
def test_one_simulation(self):
json_txt = """{
"simulations" : [
{ "OpenmlSimulation": 150 }
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertEqual({'openml':150}, simulations[0].params)
def test_raw_simulation(self):
json_txt = """{
"simulations" : { "OpenmlSimulation": 150 }
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertEqual({'openml':150}, simulations[0].params)
def test_one_simulation_one_filter(self):
json_txt = """{
"simulations" : [
[{ "OpenmlSimulation": 150 }, {"Take":10} ]
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertEqual({"openml":150, "take":10}, simulations[0].params)
def test_one_simulation_two_filters(self):
json_txt = """{
"simulations" : [
[{ "OpenmlSimulation": 150 }, {"Take":[10,20], "method":"foreach"} ]
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertEqual(2, len(simulations))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertEqual({"openml":150, "take":10}, simulations[0].params)
self.assertEqual({"openml":150, "take":20}, simulations[1].params)
def test_two_simulations_two_filters(self):
json_txt = """{
"simulations" : [
[{ "OpenmlSimulation": [150,151], "method":"foreach" }, { "Take":[10,20], "method":"foreach" }]
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertEqual(4, len(simulations))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertIsInstance(simulations[2], SimulatedEnvironment)
self.assertIsInstance(simulations[3], SimulatedEnvironment)
self.assertEqual({"openml":150, "take":10}, simulations[0].params)
self.assertEqual({"openml":150, "take":20}, simulations[1].params)
self.assertEqual({"openml":151, "take":10}, simulations[2].params)
self.assertEqual({"openml":151, "take":20}, simulations[3].params)
def test_two_singular_simulations(self):
json_txt = """{
"simulations" : [
{"OpenmlSimulation": 150},
{"OpenmlSimulation": 151}
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertEqual({"openml":150}, simulations[0].params)
self.assertEqual({"openml":151}, simulations[1].params)
def test_one_foreach_simulation(self):
json_txt = """{
"simulations" : [
{"OpenmlSimulation": [150,151], "method":"foreach"}
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertEqual({"openml":150}, simulations[0].params)
self.assertEqual({"openml":151}, simulations[1].params)
def test_one_variable(self):
json_txt = """{
"variables": {"$openml_sims": {"OpenmlSimulation": [150,151], "method":"foreach"} },
"simulations" : [ "$openml_sims" ]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertEqual({"openml":150}, simulations[0].params)
self.assertEqual({"openml":151}, simulations[1].params)
def test_two_variables(self):
json_txt = """{
"variables": {
"$openmls": {"OpenmlSimulation": [150,151], "method":"foreach"},
"$takes" : {"Take":[10,20], "method":"foreach"}
},
"simulations" : [
["$openmls", "$takes"],
"$openmls"
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertEqual(6, len(simulations))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertIsInstance(simulations[2], SimulatedEnvironment)
self.assertIsInstance(simulations[3], SimulatedEnvironment)
self.assertIsInstance(simulations[4], SimulatedEnvironment)
self.assertIsInstance(simulations[5], SimulatedEnvironment)
self.assertEqual({"openml":150, "take":10}, simulations[0].params)
self.assertEqual({"openml":150, "take":20}, simulations[1].params)
self.assertEqual({"openml":151, "take":10}, simulations[2].params)
self.assertEqual({"openml":151, "take":20}, simulations[3].params)
self.assertEqual({"openml":150 }, simulations[4].params)
self.assertEqual({"openml":151 }, simulations[5].params)
def test_pipe_list(self):
json_txt = """{
"simulations" : [
[ {"OpenmlSimulation":150}, [ {"Take":10}, {"Take":20} ] ]
]
}"""
simulations = EnvironmentFileFmtV1().filter(json.loads(json_txt))
self.assertEqual(2, len(simulations))
self.assertIsInstance(simulations[0], SimulatedEnvironment)
self.assertIsInstance(simulations[1], SimulatedEnvironment)
self.assertEqual({"openml":150, "take":10}, simulations[0].params)
self.assertEqual({"openml":150, "take":20}, simulations[1].params)
if __name__ == '__main__':
unittest.main()
| 39.93865
| 111
| 0.61828
| 572
| 6,510
| 6.933566
| 0.097902
| 0.102118
| 0.179778
| 0.090772
| 0.873424
| 0.855774
| 0.812153
| 0.801311
| 0.762229
| 0.719617
| 0
| 0.044053
| 0.232873
| 6,510
| 163
| 112
| 39.93865
| 0.7501
| 0
| 0
| 0.630769
| 0
| 0.007692
| 0.252496
| 0.00384
| 0
| 0
| 0
| 0
| 0.384615
| 1
| 0.076923
| false
| 0
| 0.030769
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c9c07093d9dd30024a3544b65390abfc817346db
| 2,275
|
py
|
Python
|
tests/contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/test_michelson_coding_KT1Cx5.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2020-08-11T02:31:24.000Z
|
2020-08-11T02:31:24.000Z
|
tests/contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/test_michelson_coding_KT1Cx5.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2020-12-30T16:44:56.000Z
|
2020-12-30T16:44:56.000Z
|
tests/contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/test_michelson_coding_KT1Cx5.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2022-03-20T19:01:00.000Z
|
2022-03-20T19:01:00.000Z
|
from unittest import TestCase
from tests import get_data
from pytezos.michelson.micheline import michelson_to_micheline
from pytezos.michelson.formatter import micheline_to_michelson
class MichelsonCodingTestKT1Cx5(TestCase):
def setUp(self):
self.maxDiff = None
def test_michelson_parse_code_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/code_KT1Cx5.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/code_KT1Cx5.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_code_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/code_KT1Cx5.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/code_KT1Cx5.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_code_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/code_KT1Cx5.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_storage_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/storage_KT1Cx5.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/storage_KT1Cx5.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_storage_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/storage_KT1Cx5.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/storage_KT1Cx5.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_storage_KT1Cx5(self):
expected = get_data(
path='contracts/KT1Cx5ohe4r8QgtP647eidHgZBJhr9L5DSJA/storage_KT1Cx5.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
| 42.12963
| 88
| 0.735824
| 222
| 2,275
| 7.247748
| 0.162162
| 0.047856
| 0.068365
| 0.124301
| 0.845245
| 0.845245
| 0.845245
| 0.835301
| 0.821628
| 0.821628
| 0
| 0.067391
| 0.191209
| 2,275
| 53
| 89
| 42.924528
| 0.807065
| 0
| 0
| 0.55814
| 0
| 0
| 0.28
| 0.28
| 0
| 0
| 0
| 0
| 0.139535
| 1
| 0.162791
| false
| 0
| 0.093023
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c9e3fe30243476c10915f7f270526901151fc5b7
| 167
|
py
|
Python
|
app/recipe/admin.py
|
sazzadrupak/recipe-app-api
|
0e108e5446d0d4c47a209df4a92aa6246e811c8f
|
[
"MIT"
] | null | null | null |
app/recipe/admin.py
|
sazzadrupak/recipe-app-api
|
0e108e5446d0d4c47a209df4a92aa6246e811c8f
|
[
"MIT"
] | null | null | null |
app/recipe/admin.py
|
sazzadrupak/recipe-app-api
|
0e108e5446d0d4c47a209df4a92aa6246e811c8f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from recipe import models
admin.site.register(models.Tag)
admin.site.register(models.Ingredient)
admin.site.register(models.Recipe)
| 20.875
| 38
| 0.826347
| 24
| 167
| 5.75
| 0.458333
| 0.195652
| 0.369565
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077844
| 167
| 7
| 39
| 23.857143
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a0208df79d782e9296ccfee15a7e9939a2f8c801
| 42
|
py
|
Python
|
hello_world.py
|
rjamesg/profiles-rest-api
|
9d270ce64affbd257a007edcd1bbe0134b7679f2
|
[
"MIT"
] | null | null | null |
hello_world.py
|
rjamesg/profiles-rest-api
|
9d270ce64affbd257a007edcd1bbe0134b7679f2
|
[
"MIT"
] | null | null | null |
hello_world.py
|
rjamesg/profiles-rest-api
|
9d270ce64affbd257a007edcd1bbe0134b7679f2
|
[
"MIT"
] | null | null | null |
print("Hello World! We are on Vagrant!")
| 21
| 41
| 0.690476
| 7
| 42
| 4.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 1
| 42
| 42
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
4e426a88b11a6b742b80bb53a5f8f67d668251b7
| 82
|
py
|
Python
|
01-Lesson-Plans/02-Python/3/Activities/05-Ins_Imports/Solved/functions.py
|
tatianegercina/FinTech
|
b40687aa362d78674e223eb15ecf14bc59f90b62
|
[
"ADSL"
] | 1
|
2021-04-13T07:14:34.000Z
|
2021-04-13T07:14:34.000Z
|
01-Lesson-Plans/02-Python/3/Activities/05-Ins_Imports/Solved/functions.py
|
tatianegercina/FinTech
|
b40687aa362d78674e223eb15ecf14bc59f90b62
|
[
"ADSL"
] | 2
|
2021-06-02T03:14:19.000Z
|
2022-02-11T23:21:24.000Z
|
01-Lesson-Plans/02-Python/3/Activities/05-Ins_Imports/Solved/functions.py
|
tatianegercina/FinTech
|
b40687aa362d78674e223eb15ecf14bc59f90b62
|
[
"ADSL"
] | 1
|
2021-05-07T13:26:50.000Z
|
2021-05-07T13:26:50.000Z
|
def print_hello():
print("hello!")
def print_goodbye():
print("goodbye!")
| 16.4
| 21
| 0.634146
| 10
| 82
| 5
| 0.4
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 82
| 5
| 21
| 16.4
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0.168675
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
4e96f66b91dd4f41805527533c53f8654924a725
| 46
|
py
|
Python
|
papuanvoices/tests/conftest.py
|
blurks/papuanvoices
|
03f02643d47ce62ab75129ed88af7ceee3681595
|
[
"Apache-2.0"
] | null | null | null |
papuanvoices/tests/conftest.py
|
blurks/papuanvoices
|
03f02643d47ce62ab75129ed88af7ceee3681595
|
[
"Apache-2.0"
] | null | null | null |
papuanvoices/tests/conftest.py
|
blurks/papuanvoices
|
03f02643d47ce62ab75129ed88af7ceee3681595
|
[
"Apache-2.0"
] | 1
|
2021-11-16T13:36:04.000Z
|
2021-11-16T13:36:04.000Z
|
from papuanvoices import models
import pytest
| 15.333333
| 31
| 0.869565
| 6
| 46
| 6.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 46
| 2
| 32
| 23
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4ed90efc74402e5d0b76964cf1b13b76d720b282
| 78
|
py
|
Python
|
Calculator_Application/calculations/sub.py
|
jpweldon/Module_2_Practice
|
cb546bbfcf5ffb7c6388f854e0eb8873834cfab9
|
[
"MIT"
] | null | null | null |
Calculator_Application/calculations/sub.py
|
jpweldon/Module_2_Practice
|
cb546bbfcf5ffb7c6388f854e0eb8873834cfab9
|
[
"MIT"
] | null | null | null |
Calculator_Application/calculations/sub.py
|
jpweldon/Module_2_Practice
|
cb546bbfcf5ffb7c6388f854e0eb8873834cfab9
|
[
"MIT"
] | null | null | null |
# Define a Subtraction Function
def sub(num1, num2):
return num1 - num2
| 13
| 31
| 0.692308
| 11
| 78
| 4.909091
| 0.818182
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.230769
| 78
| 5
| 32
| 15.6
| 0.833333
| 0.371795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
14cad1f2e0afa17c911b2fce48a465fd3e36ed39
| 340
|
py
|
Python
|
terrascript/profitbricks/d.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/profitbricks/d.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/profitbricks/d.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/profitbricks/d.py
import terrascript
class profitbricks_datacenter(terrascript.Data):
pass
class profitbricks_location(terrascript.Data):
pass
class profitbricks_image(terrascript.Data):
pass
class profitbricks_resource(terrascript.Data):
pass
class profitbricks_snapshot(terrascript.Data):
pass
| 14.782609
| 48
| 0.785294
| 36
| 340
| 7.277778
| 0.361111
| 0.324427
| 0.362595
| 0.366412
| 0.549618
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144118
| 340
| 22
| 49
| 15.454545
| 0.900344
| 0.085294
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.454545
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
090f4e6e05a6575b5f342af8eea7aa2d2e420946
| 114
|
py
|
Python
|
external/loaders/loaders/mappers/_nudged/__init__.py
|
jacnugent/fv3net
|
84958651bdd17784fdab98f87ad0d65414c03368
|
[
"MIT"
] | 5
|
2021-03-20T22:42:40.000Z
|
2021-06-30T18:39:36.000Z
|
external/loaders/loaders/mappers/_nudged/__init__.py
|
jacnugent/fv3net
|
84958651bdd17784fdab98f87ad0d65414c03368
|
[
"MIT"
] | 195
|
2021-09-16T05:47:18.000Z
|
2022-03-31T22:03:15.000Z
|
external/loaders/loaders/mappers/_nudged/__init__.py
|
ai2cm/fv3net
|
e62038aee0a97d6207e66baabd8938467838cf51
|
[
"MIT"
] | 1
|
2021-06-16T22:04:24.000Z
|
2021-06-16T22:04:24.000Z
|
from ._nudged import (
open_nudge_to_obs,
open_nudge_to_fine,
open_nudge_to_fine_multiple_datasets,
)
| 19
| 41
| 0.77193
| 17
| 114
| 4.470588
| 0.588235
| 0.355263
| 0.434211
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 114
| 5
| 42
| 22.8
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
091274132c89a6f8a3c04e39228dc741dcff192c
| 2,000
|
py
|
Python
|
pinboard/test_text_transforms.py
|
bellisk/junkdrawer
|
82aed500678ed60be61170c91371a4fc357b0e0c
|
[
"MIT"
] | null | null | null |
pinboard/test_text_transforms.py
|
bellisk/junkdrawer
|
82aed500678ed60be61170c91371a4fc357b0e0c
|
[
"MIT"
] | null | null | null |
pinboard/test_text_transforms.py
|
bellisk/junkdrawer
|
82aed500678ed60be61170c91371a4fc357b0e0c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8
import pytest
from text_transforms import apply_markdown_blockquotes, cleanup_blockquote_whitespace
@pytest.mark.parametrize('description, expected', [
("hello world", "hello world"),
("<blockquote>hello world</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>\nhello world</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>\n hello world</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>\r\nhello world</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>hello world\n</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>hello world\r\n</blockquote>", "<blockquote>hello world</blockquote>"),
("<blockquote>hello world\n\n</blockquote>", "<blockquote>hello world</blockquote>"),
])
def test_cleanup_blockquote_whitespace(description, expected):
assert cleanup_blockquote_whitespace(description) == expected
@pytest.mark.parametrize('description, expected', [
("hello world", "hello world"),
("> hello world", "<blockquote>hello world</blockquote>"),
("> hello world\n\nfoo bar", "<blockquote>hello world</blockquote>\n\nfoo bar"),
("foo bar\n\n> hello world", "foo bar\n\n<blockquote>hello world</blockquote>"),
("foo bar\n\n> hello world\n\nbar baz", "foo bar\n\n<blockquote>hello world</blockquote>\n\nbar baz"),
("> hello world\n\n> howdy friend", "<blockquote>hello world\n\nhowdy friend</blockquote>"),
("> hello world\n\n> howdy friend\n\nfoo bar", "<blockquote>hello world\n\nhowdy friend</blockquote>\n\nfoo bar"),
("> hello world\n>\n> howdy friend\n\nfoo bar", "<blockquote>hello world\n\nhowdy friend</blockquote>\n\nfoo bar"),
("foo bar\n\n> hello world\n\n> howdy friend", "foo bar\n\n<blockquote>hello world\n\nhowdy friend</blockquote>"),
])
def test_apply_markdown_blockquotes(description, expected):
assert apply_markdown_blockquotes(description) == expected
| 55.555556
| 119
| 0.709
| 250
| 2,000
| 5.612
| 0.152
| 0.228083
| 0.299359
| 0.256593
| 0.888097
| 0.753386
| 0.679259
| 0.598004
| 0.371347
| 0.155381
| 0
| 0.000562
| 0.11
| 2,000
| 35
| 120
| 57.142857
| 0.78764
| 0.02
| 0
| 0.296296
| 0
| 0
| 0.659857
| 0.122063
| 0
| 0
| 0
| 0
| 0.074074
| 1
| 0.074074
| false
| 0
| 0.074074
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
092d632a8428978fd8582b0b57c394066ed46e63
| 1,529
|
py
|
Python
|
AI_takeoff/customGym/custom_gym/envs/myxpc/actions/top.py
|
Skillerde6de/Minor-AI-2019_2020
|
57f6aed2d8066e48e2d99c8b97d5839b4f6ae7bc
|
[
"MIT"
] | 1
|
2021-01-08T08:14:34.000Z
|
2021-01-08T08:14:34.000Z
|
AI_takeoff/customGym/custom_gym/envs/myxpc/actions/top.py
|
Skillerde6de/Minor-AI-2019_2020
|
57f6aed2d8066e48e2d99c8b97d5839b4f6ae7bc
|
[
"MIT"
] | 1
|
2020-07-04T20:42:17.000Z
|
2020-07-04T20:43:40.000Z
|
AI_takeoff/customGym/custom_gym/envs/myxpc/actions/top.py
|
Skillerde6de/Minor-AI-2019_2020
|
57f6aed2d8066e48e2d99c8b97d5839b4f6ae7bc
|
[
"MIT"
] | null | null | null |
from custom_gym.envs.myxpc import xpc2 as xpc
def top_left():
print('top_left')
with xpc.XPlaneConnect() as client:
# Verify connection
try:
# If X-Plane does not respond to the request, a timeout error
# will be raised.
client.getDREF("sim/test/test_float")
except:
print("Error establishing connection to X-Plane.")
print("Exiting...")
return
top_l = [-1.0, -1.0, -998, -998, -998, -998, -998]
client.sendCTRL(top_l)
def top_mid():
print('top_mid')
with xpc.XPlaneConnect() as client:
# Verify connection
try:
# If X-Plane does not respond to the request, a timeout error
# will be raised.
client.getDREF("sim/test/test_float")
except:
print("Error establishing connection to X-Plane.")
print("Exiting...")
return
top_m = [-1.0, 0.0, -998, -998, -998, -998, -998]
client.sendCTRL(top_m)
def top_right():
print('top_right')
with xpc.XPlaneConnect() as client:
# Verify connection
try:
# If X-Plane does not respond to the request, a timeout error
# will be raised.
client.getDREF("sim/test/test_float")
except:
print("Error establishing connection to X-Plane.")
print("Exiting...")
return
top_r = [-1.0, 1.0, -998, -998, -998, -998, -998]
client.sendCTRL(top_r)
| 30.58
| 73
| 0.548725
| 192
| 1,529
| 4.286458
| 0.265625
| 0.087485
| 0.09842
| 0.087485
| 0.863913
| 0.863913
| 0.863913
| 0.863913
| 0.863913
| 0.823815
| 0
| 0.056919
| 0.333551
| 1,529
| 49
| 74
| 31.204082
| 0.750736
| 0.18378
| 0
| 0.617647
| 0
| 0
| 0.189015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.029412
| 0
| 0.205882
| 0.264706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
116dfb0e1da539cb219eba67e8a79452f7f508c4
| 5,654
|
py
|
Python
|
tests/agent_test.py
|
Ostorlab/agent_tsunami
|
405ca0629a1ac42103d5f04719f3d8b87ddca406
|
[
"Apache-2.0"
] | 2
|
2022-03-04T11:56:13.000Z
|
2022-03-05T23:07:36.000Z
|
tests/agent_test.py
|
Ostorlab/agent_tsunami
|
405ca0629a1ac42103d5f04719f3d8b87ddca406
|
[
"Apache-2.0"
] | null | null | null |
tests/agent_test.py
|
Ostorlab/agent_tsunami
|
405ca0629a1ac42103d5f04719f3d8b87ddca406
|
[
"Apache-2.0"
] | null | null | null |
"""Unittests for agent."""
import pytest
from ostorlab.agent import message
from ostorlab.agent.kb import kb
from ostorlab.agent.mixins import agent_report_vulnerability_mixin
from agent.tsunami import tsunami
def testTsunamiAgent_WhenMessageHaveInvalidIpVersion_ShouldRaiseValueErrorException(tsunami_agent):
"""Test Tsunami agent when receiving a message with invalid ip version.
Tsunami support ipv4, ipv6 and hostname (domain), therefore every received message
should have a valid ip version, other-ways the agent should raise a ValueError exception.
"""
msg = message.Message.from_data(selector='v3.asset.ip.v4', data={'version': 15631, 'host': '0.0.0.0'})
with pytest.raises(ValueError):
tsunami_agent.process(msg)
def testTsunamiAgent_WhenTsunamiScanIsCalled_ShouldRaiseValueErrorException(mocker, tsunami_agent):
"""Test Tsunami agent when receiving a message with invalid ip version.
Tsunami support ipv4, ipv6 and hostname (domain), therefore every received message
should have a valid ip version, other-ways the agent should raise a ValueError exception.
"""
mock_tsunami_scan = mocker.patch('agent.tsunami.tsunami.Tsunami.scan', return_value={'target': 0})
msg = message.Message.from_data(selector='v3.asset.ip.v4', data={'version': 4, 'host': '0.0.0.0'})
target = tsunami.Target(address='0.0.0.0', version='v4')
tsunami_agent.process(msg)
mock_tsunami_scan.assert_called_once()
assert mock_tsunami_scan.call_args.kwargs['target'].address == target.address
assert mock_tsunami_scan.call_args.kwargs['target'].version == target.version
def testTsunamiAgent_WhenTsunamiScanHasVulnerabilities_ShouldReportVulnerabilities(mocker, tsunami_agent):
"""Test Tsunami agent when vulnerabilities are detected.
Tsunami supports ipv4, ipv6 and hostname (domain), therefore every received message
should have a valid ip version, other-ways the agent should raise a ValueError exception.
"""
data = {
'scanStatus': 'SUCCEEDED',
'vulnerabilities': [
{
'vulnerability': {
'title': 'Ostorlab Platform',
'description': 'Ostorlab is not password protected'
}
}
]
}
risk_rating = 'HIGH'
description = 'Ostorlab is not password protected'
kb_entry = kb.Entry(
title='Ostorlab Platform',
risk_rating=risk_rating,
short_description=description,
description=description,
recommendation = '',
references = {},
security_issue = True,
privacy_issue = False,
has_public_exploit = True,
targeted_by_malware = True,
targeted_by_ransomware = True,
targeted_by_nation_state = True
)
mocker.patch('agent.tsunami.tsunami.Tsunami.scan', return_value=data)
mock_report_vulnerability = mocker.patch('agent.tsunami_agent.AgentTsunami.report_vulnerability', return_value=None)
msg = message.Message.from_data(selector='v3.asset.ip.v4', data={'version': 4, 'host': '0.0.0.0'})
tsunami.Target(address='0.0.0.0', version='v4')
tsunami_agent.process(msg)
mock_report_vulnerability.assert_called_once_with(entry=kb_entry,
technical_detail=f'```json\n{data}\n```', risk_rating=agent_report_vulnerability_mixin.RiskRating.HIGH)
def testTsunamiAgent_WhenLinkAssetAndTsunamiScanHasVulnerabilities_ShouldReportVulnerabilities(mocker, tsunami_agent):
"""Test Tsunami agent when vulnerabilities are detected.
Tsunami supports ipv4, ipv6 and hostname (domain), therefore every received message
should have a valid ip version, other-ways the agent should raise a ValueError exception.
"""
data = {
'scanStatus': 'SUCCEEDED',
'vulnerabilities': [
{
'vulnerability': {
'title': 'Ostorlab Platform',
'description': 'Ostorlab is not password protected'
}
}
]
}
risk_rating = 'HIGH'
description = 'Ostorlab is not password protected'
kb_entry = kb.Entry(
title='Ostorlab Platform',
risk_rating=risk_rating,
short_description=description,
description=description,
recommendation = '',
references = {},
security_issue = True,
privacy_issue = False,
has_public_exploit = True,
targeted_by_malware = True,
targeted_by_ransomware = True,
targeted_by_nation_state = True
)
mocker.patch('agent.tsunami.tsunami.Tsunami.scan', return_value=data)
mock_report_vulnerability = mocker.patch('agent.tsunami_agent.AgentTsunami.report_vulnerability', return_value=None)
msg = message.Message.from_data(selector='v3.asset.link', data={'url': 'https://test.ostorlab.co',
'method': 'GET'})
tsunami.Target(domain='test.ostorlab.co')
tsunami_agent.process(msg)
mock_report_vulnerability.assert_called_once_with(entry=kb_entry,
technical_detail=f'```json\n{data}\n```', risk_rating=agent_report_vulnerability_mixin.RiskRating.HIGH)
| 44.519685
| 120
| 0.631411
| 581
| 5,654
| 5.969019
| 0.209983
| 0.008651
| 0.008651
| 0.005767
| 0.8094
| 0.807093
| 0.807093
| 0.805363
| 0.781719
| 0.766724
| 0
| 0.011013
| 0.277326
| 5,654
| 126
| 121
| 44.873016
| 0.837739
| 0.169791
| 0
| 0.636364
| 0
| 0
| 0.170812
| 0.045375
| 0
| 0
| 0
| 0
| 0.056818
| 1
| 0.045455
| false
| 0.045455
| 0.056818
| 0
| 0.102273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
11ee5816e7344113fe17ce6358248bce0f83ed3e
| 19,503
|
py
|
Python
|
cottonformation/res/ecr.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/ecr.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/ecr.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class RepositoryLifecyclePolicy(Property):
"""
AWS Object Type = "AWS::ECR::Repository.LifecyclePolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-lifecyclepolicy.html
Property Document:
- ``p_LifecyclePolicyText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-lifecyclepolicy.html#cfn-ecr-repository-lifecyclepolicy-lifecyclepolicytext
- ``p_RegistryId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-lifecyclepolicy.html#cfn-ecr-repository-lifecyclepolicy-registryid
"""
AWS_OBJECT_TYPE = "AWS::ECR::Repository.LifecyclePolicy"
p_LifecyclePolicyText: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LifecyclePolicyText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-lifecyclepolicy.html#cfn-ecr-repository-lifecyclepolicy-lifecyclepolicytext"""
p_RegistryId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RegistryId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-lifecyclepolicy.html#cfn-ecr-repository-lifecyclepolicy-registryid"""
@attr.s
class ReplicationConfigurationReplicationDestination(Property):
"""
AWS Object Type = "AWS::ECR::ReplicationConfiguration.ReplicationDestination"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationdestination.html
Property Document:
- ``rp_Region``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationdestination.html#cfn-ecr-replicationconfiguration-replicationdestination-region
- ``rp_RegistryId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationdestination.html#cfn-ecr-replicationconfiguration-replicationdestination-registryid
"""
AWS_OBJECT_TYPE = "AWS::ECR::ReplicationConfiguration.ReplicationDestination"
rp_Region: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Region"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationdestination.html#cfn-ecr-replicationconfiguration-replicationdestination-region"""
rp_RegistryId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RegistryId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationdestination.html#cfn-ecr-replicationconfiguration-replicationdestination-registryid"""
@attr.s
class ReplicationConfigurationReplicationRule(Property):
"""
AWS Object Type = "AWS::ECR::ReplicationConfiguration.ReplicationRule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationrule.html
Property Document:
- ``rp_Destinations``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationrule.html#cfn-ecr-replicationconfiguration-replicationrule-destinations
"""
AWS_OBJECT_TYPE = "AWS::ECR::ReplicationConfiguration.ReplicationRule"
rp_Destinations: typing.List[typing.Union['ReplicationConfigurationReplicationDestination', dict]] = attr.ib(
default=None,
converter=ReplicationConfigurationReplicationDestination.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ReplicationConfigurationReplicationDestination), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Destinations"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationrule.html#cfn-ecr-replicationconfiguration-replicationrule-destinations"""
@attr.s
class RepositoryImageScanningConfiguration(Property):
"""
AWS Object Type = "AWS::ECR::Repository.ImageScanningConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-imagescanningconfiguration.html
Property Document:
- ``p_ScanOnPush``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-imagescanningconfiguration.html#cfn-ecr-repository-imagescanningconfiguration-scanonpush
"""
AWS_OBJECT_TYPE = "AWS::ECR::Repository.ImageScanningConfiguration"
p_ScanOnPush: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "ScanOnPush"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-imagescanningconfiguration.html#cfn-ecr-repository-imagescanningconfiguration-scanonpush"""
@attr.s
class RepositoryEncryptionConfiguration(Property):
"""
AWS Object Type = "AWS::ECR::Repository.EncryptionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-encryptionconfiguration.html
Property Document:
- ``rp_EncryptionType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-encryptionconfiguration.html#cfn-ecr-repository-encryptionconfiguration-encryptiontype
- ``p_KmsKey``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-encryptionconfiguration.html#cfn-ecr-repository-encryptionconfiguration-kmskey
"""
AWS_OBJECT_TYPE = "AWS::ECR::Repository.EncryptionConfiguration"
rp_EncryptionType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "EncryptionType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-encryptionconfiguration.html#cfn-ecr-repository-encryptionconfiguration-encryptiontype"""
p_KmsKey: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKey"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-repository-encryptionconfiguration.html#cfn-ecr-repository-encryptionconfiguration-kmskey"""
@attr.s
class ReplicationConfigurationReplicationConfiguration(Property):
"""
AWS Object Type = "AWS::ECR::ReplicationConfiguration.ReplicationConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationconfiguration.html
Property Document:
- ``rp_Rules``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationconfiguration.html#cfn-ecr-replicationconfiguration-replicationconfiguration-rules
"""
AWS_OBJECT_TYPE = "AWS::ECR::ReplicationConfiguration.ReplicationConfiguration"
rp_Rules: typing.List[typing.Union['ReplicationConfigurationReplicationRule', dict]] = attr.ib(
default=None,
converter=ReplicationConfigurationReplicationRule.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ReplicationConfigurationReplicationRule), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Rules"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecr-replicationconfiguration-replicationconfiguration.html#cfn-ecr-replicationconfiguration-replicationconfiguration-rules"""
#--- Resource declaration ---
@attr.s
class Repository(Resource):
"""
AWS Object Type = "AWS::ECR::Repository"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html
Property Document:
- ``p_EncryptionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-encryptionconfiguration
- ``p_ImageScanningConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-imagescanningconfiguration
- ``p_ImageTagMutability``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-imagetagmutability
- ``p_LifecyclePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-lifecyclepolicy
- ``p_RepositoryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-repositoryname
- ``p_RepositoryPolicyText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-repositorypolicytext
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-tags
"""
AWS_OBJECT_TYPE = "AWS::ECR::Repository"
p_EncryptionConfiguration: typing.Union['RepositoryEncryptionConfiguration', dict] = attr.ib(
default=None,
converter=RepositoryEncryptionConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(RepositoryEncryptionConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "EncryptionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-encryptionconfiguration"""
p_ImageScanningConfiguration: typing.Union['RepositoryImageScanningConfiguration', dict] = attr.ib(
default=None,
converter=RepositoryImageScanningConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(RepositoryImageScanningConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "ImageScanningConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-imagescanningconfiguration"""
p_ImageTagMutability: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ImageTagMutability"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-imagetagmutability"""
p_LifecyclePolicy: typing.Union['RepositoryLifecyclePolicy', dict] = attr.ib(
default=None,
converter=RepositoryLifecyclePolicy.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(RepositoryLifecyclePolicy)),
metadata={AttrMeta.PROPERTY_NAME: "LifecyclePolicy"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-lifecyclepolicy"""
p_RepositoryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-repositoryname"""
p_RepositoryPolicyText: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryPolicyText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-repositorypolicytext"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#cfn-ecr-repository-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#aws-resource-ecr-repository-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_RepositoryUri(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html#aws-resource-ecr-repository-return-values"""
return GetAtt(resource=self, attr_name="RepositoryUri")
@attr.s
class ReplicationConfiguration(Resource):
"""
AWS Object Type = "AWS::ECR::ReplicationConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-replicationconfiguration.html
Property Document:
- ``rp_ReplicationConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-replicationconfiguration.html#cfn-ecr-replicationconfiguration-replicationconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ECR::ReplicationConfiguration"
rp_ReplicationConfiguration: typing.Union['ReplicationConfigurationReplicationConfiguration', dict] = attr.ib(
default=None,
converter=ReplicationConfigurationReplicationConfiguration.from_dict,
validator=attr.validators.instance_of(ReplicationConfigurationReplicationConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "ReplicationConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-replicationconfiguration.html#cfn-ecr-replicationconfiguration-replicationconfiguration"""
@property
def rv_RegistryId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-replicationconfiguration.html#aws-resource-ecr-replicationconfiguration-return-values"""
return GetAtt(resource=self, attr_name="RegistryId")
@attr.s
class PublicRepository(Resource):
"""
AWS Object Type = "AWS::ECR::PublicRepository"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html
Property Document:
- ``p_RepositoryCatalogData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositorycatalogdata
- ``p_RepositoryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositoryname
- ``p_RepositoryPolicyText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositorypolicytext
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-tags
"""
AWS_OBJECT_TYPE = "AWS::ECR::PublicRepository"
p_RepositoryCatalogData: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryCatalogData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositorycatalogdata"""
p_RepositoryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositoryname"""
p_RepositoryPolicyText: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryPolicyText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-repositorypolicytext"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#cfn-ecr-publicrepository-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-publicrepository.html#aws-resource-ecr-publicrepository-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@attr.s
class RegistryPolicy(Resource):
"""
AWS Object Type = "AWS::ECR::RegistryPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-registrypolicy.html
Property Document:
- ``rp_PolicyText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-registrypolicy.html#cfn-ecr-registrypolicy-policytext
"""
AWS_OBJECT_TYPE = "AWS::ECR::RegistryPolicy"
rp_PolicyText: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "PolicyText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-registrypolicy.html#cfn-ecr-registrypolicy-policytext"""
@property
def rv_RegistryId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-registrypolicy.html#aws-resource-ecr-registrypolicy-return-values"""
return GetAtt(resource=self, attr_name="RegistryId")
| 56.043103
| 225
| 0.767215
| 2,003
| 19,503
| 7.385921
| 0.047928
| 0.056239
| 0.043869
| 0.067798
| 0.846897
| 0.841693
| 0.810058
| 0.762336
| 0.762066
| 0.745843
| 0
| 0.000057
| 0.107214
| 19,503
| 347
| 226
| 56.204611
| 0.849586
| 0.35856
| 0
| 0.443787
| 0
| 0
| 0.114421
| 0.082506
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029586
| false
| 0
| 0.023669
| 0
| 0.331361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ee9beaec517bbad64990f3f3106bc0b86c51696a
| 2,080
|
py
|
Python
|
battles/tests/tests_forms.py
|
pamella/pokebattle
|
a90b27d87b12dd079356ad4b5a15a196004e1c5e
|
[
"MIT"
] | 2
|
2019-02-27T16:11:11.000Z
|
2019-12-10T14:01:18.000Z
|
battles/tests/tests_forms.py
|
pamella/pokebattle
|
a90b27d87b12dd079356ad4b5a15a196004e1c5e
|
[
"MIT"
] | 14
|
2019-02-21T14:36:22.000Z
|
2022-02-26T09:49:44.000Z
|
battles/tests/tests_forms.py
|
pamella/pokebattle
|
a90b27d87b12dd079356ad4b5a15a196004e1c5e
|
[
"MIT"
] | null | null | null |
from model_mommy import mommy
from battles.forms import CreateBattleForm, SelectTrainerTeamForm
from battles.tests.tests_helpers import PokeBattleTestCase
class CreateBattleFormTest(PokeBattleTestCase):
def test_pokemon_team_sum_invalid(self):
attr = {
'initial': {
'trainer_creator': self.user,
},
'data': {
'trainer_opponent': mommy.make('users.User'),
'order_1': 0,
'order_2': 1,
'order_3': 2,
'pokemon_1': mommy.make(
'pokemons.Pokemon', name='slowbro').id,
'pokemon_2': mommy.make(
'pokemons.Pokemon', name='golem').id,
'pokemon_3': mommy.make(
'pokemons.Pokemon', name='doduo').id,
}
}
form = CreateBattleForm(**attr)
self.assertFalse(form.is_valid())
with self.assertRaisesMessage(
Exception,
'Trainer, your pokemon team stats can not sum more than 600 points.'):
form.clean()
class SelectTrainerTeamFormTest(PokeBattleTestCase):
def test_pokemon_team_sum_invalid(self):
attr = {
'initial': {
'trainer_creator': self.user.id
},
'data': {
'trainer_opponent': mommy.make('users.User'),
'order_1': 0,
'order_2': 1,
'order_3': 2,
'pokemon_1': mommy.make(
'pokemons.Pokemon', name='slowbro').id,
'pokemon_2': mommy.make(
'pokemons.Pokemon', name='golem').id,
'pokemon_3': mommy.make(
'pokemons.Pokemon', name='doduo').id,
}
}
form = SelectTrainerTeamForm(**attr)
self.assertFalse(form.is_valid())
with self.assertRaisesMessage(
Exception,
'Trainer, your pokemon team stats can not sum more than 600 points.'):
form.clean()
| 34.098361
| 86
| 0.5125
| 189
| 2,080
| 5.492063
| 0.301587
| 0.069364
| 0.098266
| 0.138728
| 0.77842
| 0.77842
| 0.77842
| 0.77842
| 0.77842
| 0.77842
| 0
| 0.018462
| 0.375
| 2,080
| 60
| 87
| 34.666667
| 0.78
| 0
| 0
| 0.716981
| 0
| 0
| 0.222115
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 1
| 0.037736
| false
| 0
| 0.056604
| 0
| 0.132075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eeafbdce1107f03d7966ec273d2ca2553756833f
| 2,930
|
py
|
Python
|
complex_venv/lib/python3.7/site-packages/test/test_remove_empty_sahpes.py
|
lubianat/complex_bot
|
e0ddabcc0487c52b14fb94950c5a812f0bdb2283
|
[
"MIT"
] | 1
|
2021-10-06T00:21:10.000Z
|
2021-10-06T00:21:10.000Z
|
complex_venv/lib/python3.7/site-packages/test/test_remove_empty_sahpes.py
|
lubianat/complex_bot
|
e0ddabcc0487c52b14fb94950c5a812f0bdb2283
|
[
"MIT"
] | 14
|
2021-01-15T21:51:38.000Z
|
2021-11-10T10:08:22.000Z
|
complex_venv/lib/python3.7/site-packages/test/test_remove_empty_sahpes.py
|
lubianat/complex_bot
|
e0ddabcc0487c52b14fb94950c5a812f0bdb2283
|
[
"MIT"
] | 1
|
2021-01-18T10:32:56.000Z
|
2021-01-18T10:32:56.000Z
|
import unittest
from shexer.shaper import Shaper
from test.const import G1, BASE_FILES, G1_ALL_CLASSES_NO_COMMENTS
from test.t_utils import file_vs_str_tunned_comparison, number_of_shapes
import os.path as pth
_BASE_DIR = BASE_FILES + "empty_shapes" + pth.sep
class TestRemoveEmptyShapes(unittest.TestCase):
def test_one_empty_remove(self):
shaper = Shaper(target_classes=["http://xmlns.com/foaf/0.1/Machine"],
graph_file_input=G1,
all_classes_mode=False,
input_format="turtle",
disable_comments=True,
remove_empty_shapes=True)
str_result = shaper.shex_graph(string_output=True)
self.assertTrue(number_of_shapes(str_result) == 0)
def test_one_empty_not_remove(self):
shaper = Shaper(target_classes=["http://xmlns.com/foaf/0.1/Machine"],
graph_file_input=G1,
all_classes_mode=False,
input_format="turtle",
disable_comments=True,
remove_empty_shapes=False)
str_result = shaper.shex_graph(string_output=True)
self.assertTrue(file_vs_str_tunned_comparison(file_path=_BASE_DIR + "one_empty_not_remove.shex",
str_target=str_result))
def test_some_empty_remove(self):
shaper = Shaper(target_classes=["http://xmlns.com/foaf/0.1/Machine",
"http://xmlns.com/foaf/0.1/Person",
"http://xmlns.com/foaf/0.1/Document"],
graph_file_input=G1,
all_classes_mode=False,
input_format="turtle",
disable_comments=True,
remove_empty_shapes=True)
str_result = shaper.shex_graph(string_output=True)
self.assertTrue(file_vs_str_tunned_comparison(file_path=G1_ALL_CLASSES_NO_COMMENTS,
str_target=str_result))
def test_some_empty_not_remove(self):
shaper = Shaper(target_classes=["http://xmlns.com/foaf/0.1/Machine",
"http://xmlns.com/foaf/0.1/Person",
"http://xmlns.com/foaf/0.1/Document"],
graph_file_input=G1,
all_classes_mode=False,
input_format="turtle",
disable_comments=True,
remove_empty_shapes=False)
str_result = shaper.shex_graph(string_output=True)
self.assertTrue(file_vs_str_tunned_comparison(file_path=_BASE_DIR + "some_empty_not_remove.shex",
str_target=str_result))
| 51.403509
| 106
| 0.549829
| 317
| 2,930
| 4.716088
| 0.198738
| 0.048161
| 0.064214
| 0.085619
| 0.833445
| 0.787291
| 0.787291
| 0.787291
| 0.712375
| 0.712375
| 0
| 0.012938
| 0.366894
| 2,930
| 57
| 107
| 51.403509
| 0.792992
| 0
| 0
| 0.7
| 0
| 0
| 0.122087
| 0.017739
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.08
| false
| 0
| 0.1
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eec19a406637dd86d08d3549bf432eb984f5c749
| 159
|
py
|
Python
|
apps/base/coinbase.py
|
silverlogic/bit-tag-api
|
22bc1d79b3d48493ff3880f4c976c4e7c8135973
|
[
"MIT"
] | null | null | null |
apps/base/coinbase.py
|
silverlogic/bit-tag-api
|
22bc1d79b3d48493ff3880f4c976c4e7c8135973
|
[
"MIT"
] | null | null | null |
apps/base/coinbase.py
|
silverlogic/bit-tag-api
|
22bc1d79b3d48493ff3880f4c976c4e7c8135973
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from coinbase.wallet.client import Client
coinbase_client = Client(settings.COINBASE_API_KEY, settings.COINBASE_API_SECRET)
| 26.5
| 81
| 0.855346
| 22
| 159
| 5.954545
| 0.5
| 0.244275
| 0.290076
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08805
| 159
| 5
| 82
| 31.8
| 0.903448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eefb432424d517dc189fcac77b9ac5aa8c6dcf86
| 86
|
py
|
Python
|
apps/libs/__init__.py
|
panla/fastapi_sockets
|
9d425b0835f295245f38c71314bcaba9615d5cb7
|
[
"MIT"
] | 2
|
2021-12-28T10:58:33.000Z
|
2022-01-14T15:50:43.000Z
|
apps/libs/__init__.py
|
panla/fastapi_sockets
|
9d425b0835f295245f38c71314bcaba9615d5cb7
|
[
"MIT"
] | null | null | null |
apps/libs/__init__.py
|
panla/fastapi_sockets
|
9d425b0835f295245f38c71314bcaba9615d5cb7
|
[
"MIT"
] | null | null | null |
from .middleware import register_middleware
from .exception import register_exception
| 28.666667
| 43
| 0.883721
| 10
| 86
| 7.4
| 0.5
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 44
| 43
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e1034ac12ae11a7bcc86a642943a634a22f68811
| 126
|
py
|
Python
|
fmd/__init__.py
|
keshav47/cnn-facial-landmark
|
2c4011db2917428ebebf70d7fc9ad59c89002bfc
|
[
"MIT"
] | null | null | null |
fmd/__init__.py
|
keshav47/cnn-facial-landmark
|
2c4011db2917428ebebf70d7fc9ad59c89002bfc
|
[
"MIT"
] | 1
|
2021-09-07T10:19:43.000Z
|
2021-09-07T10:19:43.000Z
|
fmd/__init__.py
|
keshav47/cnn-facial-landmark
|
2c4011db2917428ebebf70d7fc9ad59c89002bfc
|
[
"MIT"
] | null | null | null |
"""Facial landmark dataset toolkit.
Useage: https://github.com/yinguobing/facial-landmark-dataset
"""
from .wflw import WFLW
| 21
| 61
| 0.769841
| 16
| 126
| 6.0625
| 0.75
| 0.28866
| 0.43299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 126
| 5
| 62
| 25.2
| 0.850877
| 0.753968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e11a78b4569881b504b9569e785ba0326b601d0f
| 81
|
py
|
Python
|
converter/convertDocxToText.py
|
rsandagon/cv-ner
|
d7fb6e55b2caa6aff614ffc6d44ce0b3074c4044
|
[
"MIT"
] | 4
|
2020-05-24T04:11:35.000Z
|
2021-09-14T16:58:02.000Z
|
converter/convertDocxToText.py
|
rsandagon/cv-ner
|
d7fb6e55b2caa6aff614ffc6d44ce0b3074c4044
|
[
"MIT"
] | 8
|
2021-03-19T02:09:19.000Z
|
2022-03-11T23:53:28.000Z
|
converter/convertDocxToText.py
|
rsandagon/cv-ner
|
d7fb6e55b2caa6aff614ffc6d44ce0b3074c4044
|
[
"MIT"
] | 2
|
2020-10-09T12:08:59.000Z
|
2020-12-20T22:19:56.000Z
|
import docx2txt
def convert_docx_to_text(path):
return docx2txt.process(path)
| 13.5
| 31
| 0.814815
| 12
| 81
| 5.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.111111
| 81
| 5
| 32
| 16.2
| 0.847222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
016d7cd0591f5befd0bbac8b9199d8ba989f70b5
| 144,911
|
py
|
Python
|
loopchain/protos/loopchain_pb2.py
|
extendjh/loopchain
|
bb0cd6270677f796d90289fa31d2bb2bc9015f11
|
[
"Apache-2.0"
] | 2
|
2017-08-14T00:38:01.000Z
|
2021-07-02T06:44:32.000Z
|
loopchain/protos/loopchain_pb2.py
|
extendjh/loopchain
|
bb0cd6270677f796d90289fa31d2bb2bc9015f11
|
[
"Apache-2.0"
] | null | null | null |
loopchain/protos/loopchain_pb2.py
|
extendjh/loopchain
|
bb0cd6270677f796d90289fa31d2bb2bc9015f11
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: loopchain.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='loopchain.proto',
package='',
syntax='proto2',
serialized_pb=_b('\n\x0floopchain.proto\"F\n\x07Message\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x0c\n\x04meta\x18\x03 \x01(\t\x12\x0e\n\x06object\x18\x04 \x01(\x0c\"]\n\x15\x43omplainLeaderRequest\x12\x1c\n\x14\x63omplained_leader_id\x18\x01 \x02(\t\x12\x15\n\rnew_leader_id\x18\x02 \x02(\t\x12\x0f\n\x07message\x18\x03 \x02(\t\"\x1d\n\x08PeerList\x12\x11\n\tpeer_list\x18\x01 \x02(\x0c\"\x1f\n\x0f\x43reateTxRequest\x12\x0c\n\x04\x64\x61ta\x18\x01 \x02(\t\"J\n\rCreateTxReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x0f\n\x07tx_hash\x18\x02 \x02(\t\x12\x11\n\tmore_info\x18\x03 \x02(\t\"\x14\n\x06TxSend\x12\n\n\x02tx\x18\x01 \x02(\x0c\"r\n\x0fGetBlockRequest\x12\x12\n\nblock_hash\x18\x01 \x01(\t\x12\x18\n\x0c\x62lock_height\x18\x02 \x01(\x05:\x02-1\x12\x19\n\x11\x62lock_data_filter\x18\x03 \x02(\t\x12\x16\n\x0etx_data_filter\x18\x04 \x02(\t\"i\n\rGetBlockReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x12\n\nblock_hash\x18\x02 \x02(\t\x12\x17\n\x0f\x62lock_data_json\x18\x03 \x02(\t\x12\x14\n\x0ctx_data_json\x18\x04 \x03(\t\"\x1e\n\x0cQueryRequest\x12\x0e\n\x06params\x18\x01 \x02(\t\"5\n\nQueryReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x10\n\x08response\x18\x02 \x02(\t\"\x1f\n\x0cGetTxRequest\x12\x0f\n\x07tx_hash\x18\x01 \x02(\t\"R\n\nGetTxReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x0c\n\x04meta\x18\x02 \x02(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x02(\t\x12\x11\n\tmore_info\x18\x04 \x02(\t\")\n\x16GetInvokeResultRequest\x12\x0f\n\x07tx_hash\x18\x01 \x02(\t\"=\n\x14GetInvokeResultReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x0e\n\x06result\x18\x02 \x01(\t\"&\n\x10\x42lockSyncRequest\x12\x12\n\nblock_hash\x18\x01 \x02(\t\"f\n\x0e\x42lockSyncReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x14\n\x0c\x62lock_height\x18\x02 \x02(\x05\x12\x18\n\x10max_block_height\x18\x03 \x02(\x05\x12\r\n\x05\x62lock\x18\x04 \x02(\x0c\"\x1a\n\tBlockSend\x12\r\n\x05\x62lock\x18\x01 \x02(\x0c\"H\n\nBlockReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x0f\n\x07message\x18\x02 \x02(\t\x12\x12\n\nblock_hash\x18\x03 \x02(\t\"f\n\tBlockVote\x12\x11\n\tvote_code\x18\x01 \x02(\x05\x12\x0f\n\x07message\x18\x02 \x02(\t\x12\x12\n\nblock_hash\x18\x03 \x02(\t\x12\x0f\n\x07peer_id\x18\x04 \x02(\t\x12\x10\n\x08group_id\x18\x05 \x02(\t\"2\n\rBlockAnnounce\x12\x12\n\nblock_hash\x18\x01 \x02(\t\x12\r\n\x05\x62lock\x18\x02 \x01(\x0c\"2\n\rCommonRequest\x12\x0f\n\x07request\x18\x01 \x02(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\"5\n\x0b\x43ommonReply\x12\x15\n\rresponse_code\x18\x01 \x02(\x05\x12\x0f\n\x07message\x18\x02 \x02(\t\"h\n\x10SubscribeRequest\x12\x13\n\x0bpeer_target\x18\x01 \x02(\t\x12\x1c\n\tpeer_type\x18\x02 \x02(\x0e\x32\t.PeerType\x12\x0f\n\x07peer_id\x18\x03 \x01(\t\x12\x10\n\x08group_id\x18\x04 \x01(\t\" \n\rStatusRequest\x12\x0f\n\x07request\x18\x01 \x02(\t\"E\n\x0bStatusReply\x12\x0e\n\x06status\x18\x01 \x02(\t\x12\x14\n\x0c\x62lock_height\x18\x02 \x02(\x05\x12\x10\n\x08total_tx\x18\x03 \x02(\x05\"\x1d\n\x0bStopRequest\x12\x0e\n\x06reason\x18\x01 \x02(\t\"\x1b\n\tStopReply\x12\x0e\n\x06status\x18\x01 \x02(\t\"\x9b\x01\n\x0bPeerRequest\x12\x13\n\x0bpeer_object\x18\x01 \x02(\x0c\x12\x0f\n\x07peer_id\x18\x02 \x02(\t\x12\x13\n\x0bpeer_target\x18\x03 \x02(\t\x12\x10\n\x08group_id\x18\x04 \x02(\t\x12\x1c\n\tpeer_type\x18\x05 \x02(\x0e\x32\t.PeerType\x12\r\n\x05token\x18\x06 \x01(\t\x12\x12\n\npeer_order\x18\x07 \x01(\x05\"+\n\x06PeerID\x12\x0f\n\x07peer_id\x18\x02 \x02(\t\x12\x10\n\x08group_id\x18\x04 \x02(\t\"A\n\tPeerReply\x12\x0e\n\x06status\x18\x01 \x02(\x05\x12\x11\n\tpeer_list\x18\x02 \x02(\x0c\x12\x11\n\tmore_info\x18\x03 \x01(\t*<\n\x08PeerType\x12\x08\n\x04PEER\x10\x00\x12\x13\n\x0f\x42LOCK_GENERATOR\x10\x01\x12\x11\n\rRADIO_STATION\x10\x02\x32\xfc\x07\n\x0cInnerService\x12\x1f\n\x07Request\x12\x08.Message\x1a\x08.Message\"\x00\x12+\n\tGetStatus\x12\x0e.StatusRequest\x1a\x0c.StatusReply\"\x00\x12\x30\n\x0eGetScoreStatus\x12\x0e.StatusRequest\x1a\x0c.StatusReply\"\x00\x12\"\n\x04Stop\x12\x0c.StopRequest\x1a\n.StopReply\"\x00\x12%\n\x05GetTx\x12\r.GetTxRequest\x1a\x0b.GetTxReply\"\x00\x12.\n\x08GetBlock\x12\x10.GetBlockRequest\x1a\x0e.GetBlockReply\"\x00\x12%\n\x05Query\x12\r.QueryRequest\x1a\x0b.QueryReply\"\x00\x12\x31\n\tBlockSync\x12\x11.BlockSyncRequest\x1a\x0f.BlockSyncReply\"\x00\x12\x36\n\x18\x41nnounceUnconfirmedBlock\x12\n.BlockSend\x1a\x0c.CommonReply\"\x00\x12\x38\n\x16\x41nnounceConfirmedBlock\x12\x0e.BlockAnnounce\x1a\x0c.CommonReply\"\x00\x12/\n\x0f\x41nnounceNewPeer\x12\x0c.PeerRequest\x1a\x0c.CommonReply\"\x00\x12&\n\x04\x45\x63ho\x12\x0e.CommonRequest\x1a\x0c.CommonReply\"\x00\x12\x38\n\x0e\x43omplainLeader\x12\x16.ComplainLeaderRequest\x1a\x0c.CommonReply\"\x00\x12;\n\x11\x41nnounceNewLeader\x12\x16.ComplainLeaderRequest\x1a\x0c.CommonReply\"\x00\x12\x31\n\x10GetLastBlockHash\x12\x0e.CommonRequest\x1a\x0b.BlockReply\"\x00\x12.\n\tSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x12\x30\n\x0bUnSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x12 \n\x05\x41\x64\x64Tx\x12\x07.TxSend\x1a\x0c.CommonReply\"\x00\x12\x32\n\x14VoteUnconfirmedBlock\x12\n.BlockVote\x1a\x0c.CommonReply\"\x00\x12\x34\n\x12NotifyLeaderBroken\x12\x0e.CommonRequest\x1a\x0c.CommonReply\"\x00\x12\x34\n\x12NotifyProcessError\x12\x0e.CommonRequest\x1a\x0c.CommonReply\"\x00\x32\xb3\x08\n\x0bPeerService\x12\x1f\n\x07Request\x12\x08.Message\x1a\x08.Message\"\x00\x12+\n\tGetStatus\x12\x0e.StatusRequest\x1a\x0c.StatusReply\"\x00\x12\x30\n\x0eGetScoreStatus\x12\x0e.StatusRequest\x1a\x0c.StatusReply\"\x00\x12\"\n\x04Stop\x12\x0c.StopRequest\x1a\n.StopReply\"\x00\x12.\n\x08\x43reateTx\x12\x10.CreateTxRequest\x1a\x0e.CreateTxReply\"\x00\x12%\n\x05GetTx\x12\r.GetTxRequest\x1a\x0b.GetTxReply\"\x00\x12.\n\x08GetBlock\x12\x10.GetBlockRequest\x1a\x0e.GetBlockReply\"\x00\x12%\n\x05Query\x12\r.QueryRequest\x1a\x0b.QueryReply\"\x00\x12\x43\n\x0fGetInvokeResult\x12\x17.GetInvokeResultRequest\x1a\x15.GetInvokeResultReply\"\x00\x12\x31\n\tBlockSync\x12\x11.BlockSyncRequest\x1a\x0f.BlockSyncReply\"\x00\x12\x36\n\x18\x41nnounceUnconfirmedBlock\x12\n.BlockSend\x1a\x0c.CommonReply\"\x00\x12\x38\n\x16\x41nnounceConfirmedBlock\x12\x0e.BlockAnnounce\x1a\x0c.CommonReply\"\x00\x12/\n\x0f\x41nnounceNewPeer\x12\x0c.PeerRequest\x1a\x0c.CommonReply\"\x00\x12-\n\x12\x41nnounceDeletePeer\x12\x07.PeerID\x1a\x0c.CommonReply\"\x00\x12&\n\x04\x45\x63ho\x12\x0e.CommonRequest\x1a\x0c.CommonReply\"\x00\x12\x38\n\x0e\x43omplainLeader\x12\x16.ComplainLeaderRequest\x1a\x0c.CommonReply\"\x00\x12;\n\x11\x41nnounceNewLeader\x12\x16.ComplainLeaderRequest\x1a\x0c.CommonReply\"\x00\x12\x31\n\x10GetLastBlockHash\x12\x0e.CommonRequest\x1a\x0b.BlockReply\"\x00\x12.\n\tSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x12\x30\n\x0bUnSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x12 \n\x05\x41\x64\x64Tx\x12\x07.TxSend\x1a\x0c.CommonReply\"\x00\x12\x32\n\x14VoteUnconfirmedBlock\x12\n.BlockVote\x1a\x0c.CommonReply\"\x00\x32\xa0\x03\n\x0cRadioStation\x12\x1f\n\x07Request\x12\x08.Message\x1a\x08.Message\"\x00\x12+\n\tGetStatus\x12\x0e.StatusRequest\x1a\x0c.StatusReply\"\x00\x12\"\n\x04Stop\x12\x0c.StopRequest\x1a\n.StopReply\"\x00\x12)\n\x0b\x43onnectPeer\x12\x0c.PeerRequest\x1a\n.PeerReply\"\x00\x12*\n\x0bGetPeerList\x12\x0e.CommonRequest\x1a\t.PeerList\"\x00\x12(\n\rGetPeerStatus\x12\x07.PeerID\x1a\x0c.StatusReply\"\x00\x12;\n\x11\x41nnounceNewLeader\x12\x16.ComplainLeaderRequest\x1a\x0c.CommonReply\"\x00\x12.\n\tSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x12\x30\n\x0bUnSubscribe\x12\x11.SubscribeRequest\x1a\x0c.CommonReply\"\x00\x32,\n\tContainer\x12\x1f\n\x07Request\x12\x08.Message\x1a\x08.Message\"\x00')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PEERTYPE = _descriptor.EnumDescriptor(
name='PeerType',
full_name='PeerType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PEER', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BLOCK_GENERATOR', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RADIO_STATION', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1931,
serialized_end=1991,
)
_sym_db.RegisterEnumDescriptor(_PEERTYPE)
PeerType = enum_type_wrapper.EnumTypeWrapper(_PEERTYPE)
PEER = 0
BLOCK_GENERATOR = 1
RADIO_STATION = 2
_MESSAGE = _descriptor.Descriptor(
name='Message',
full_name='Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='Message.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='Message.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='meta', full_name='Message.meta', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='object', full_name='Message.object', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=19,
serialized_end=89,
)
_COMPLAINLEADERREQUEST = _descriptor.Descriptor(
name='ComplainLeaderRequest',
full_name='ComplainLeaderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='complained_leader_id', full_name='ComplainLeaderRequest.complained_leader_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_leader_id', full_name='ComplainLeaderRequest.new_leader_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='ComplainLeaderRequest.message', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=91,
serialized_end=184,
)
_PEERLIST = _descriptor.Descriptor(
name='PeerList',
full_name='PeerList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peer_list', full_name='PeerList.peer_list', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=186,
serialized_end=215,
)
_CREATETXREQUEST = _descriptor.Descriptor(
name='CreateTxRequest',
full_name='CreateTxRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='CreateTxRequest.data', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=217,
serialized_end=248,
)
_CREATETXREPLY = _descriptor.Descriptor(
name='CreateTxReply',
full_name='CreateTxReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='CreateTxReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tx_hash', full_name='CreateTxReply.tx_hash', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='more_info', full_name='CreateTxReply.more_info', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=250,
serialized_end=324,
)
_TXSEND = _descriptor.Descriptor(
name='TxSend',
full_name='TxSend',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx', full_name='TxSend.tx', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=326,
serialized_end=346,
)
_GETBLOCKREQUEST = _descriptor.Descriptor(
name='GetBlockRequest',
full_name='GetBlockRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block_hash', full_name='GetBlockRequest.block_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_height', full_name='GetBlockRequest.block_height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_data_filter', full_name='GetBlockRequest.block_data_filter', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tx_data_filter', full_name='GetBlockRequest.tx_data_filter', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=348,
serialized_end=462,
)
_GETBLOCKREPLY = _descriptor.Descriptor(
name='GetBlockReply',
full_name='GetBlockReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='GetBlockReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_hash', full_name='GetBlockReply.block_hash', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_data_json', full_name='GetBlockReply.block_data_json', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tx_data_json', full_name='GetBlockReply.tx_data_json', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=464,
serialized_end=569,
)
_QUERYREQUEST = _descriptor.Descriptor(
name='QueryRequest',
full_name='QueryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='params', full_name='QueryRequest.params', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=571,
serialized_end=601,
)
_QUERYREPLY = _descriptor.Descriptor(
name='QueryReply',
full_name='QueryReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='QueryReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='response', full_name='QueryReply.response', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=603,
serialized_end=656,
)
_GETTXREQUEST = _descriptor.Descriptor(
name='GetTxRequest',
full_name='GetTxRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='GetTxRequest.tx_hash', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=658,
serialized_end=689,
)
_GETTXREPLY = _descriptor.Descriptor(
name='GetTxReply',
full_name='GetTxReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='GetTxReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='meta', full_name='GetTxReply.meta', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='GetTxReply.data', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='more_info', full_name='GetTxReply.more_info', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=691,
serialized_end=773,
)
_GETINVOKERESULTREQUEST = _descriptor.Descriptor(
name='GetInvokeResultRequest',
full_name='GetInvokeResultRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='GetInvokeResultRequest.tx_hash', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=775,
serialized_end=816,
)
_GETINVOKERESULTREPLY = _descriptor.Descriptor(
name='GetInvokeResultReply',
full_name='GetInvokeResultReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='GetInvokeResultReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='result', full_name='GetInvokeResultReply.result', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=818,
serialized_end=879,
)
_BLOCKSYNCREQUEST = _descriptor.Descriptor(
name='BlockSyncRequest',
full_name='BlockSyncRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block_hash', full_name='BlockSyncRequest.block_hash', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=881,
serialized_end=919,
)
_BLOCKSYNCREPLY = _descriptor.Descriptor(
name='BlockSyncReply',
full_name='BlockSyncReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='BlockSyncReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_height', full_name='BlockSyncReply.block_height', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_block_height', full_name='BlockSyncReply.max_block_height', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block', full_name='BlockSyncReply.block', index=3,
number=4, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=921,
serialized_end=1023,
)
_BLOCKSEND = _descriptor.Descriptor(
name='BlockSend',
full_name='BlockSend',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block', full_name='BlockSend.block', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1025,
serialized_end=1051,
)
_BLOCKREPLY = _descriptor.Descriptor(
name='BlockReply',
full_name='BlockReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='BlockReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='BlockReply.message', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_hash', full_name='BlockReply.block_hash', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1053,
serialized_end=1125,
)
_BLOCKVOTE = _descriptor.Descriptor(
name='BlockVote',
full_name='BlockVote',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vote_code', full_name='BlockVote.vote_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='BlockVote.message', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_hash', full_name='BlockVote.block_hash', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_id', full_name='BlockVote.peer_id', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group_id', full_name='BlockVote.group_id', index=4,
number=5, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1127,
serialized_end=1229,
)
_BLOCKANNOUNCE = _descriptor.Descriptor(
name='BlockAnnounce',
full_name='BlockAnnounce',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block_hash', full_name='BlockAnnounce.block_hash', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block', full_name='BlockAnnounce.block', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1231,
serialized_end=1281,
)
_COMMONREQUEST = _descriptor.Descriptor(
name='CommonRequest',
full_name='CommonRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='CommonRequest.request', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group_id', full_name='CommonRequest.group_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1283,
serialized_end=1333,
)
_COMMONREPLY = _descriptor.Descriptor(
name='CommonReply',
full_name='CommonReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='CommonReply.response_code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='CommonReply.message', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1335,
serialized_end=1388,
)
_SUBSCRIBEREQUEST = _descriptor.Descriptor(
name='SubscribeRequest',
full_name='SubscribeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peer_target', full_name='SubscribeRequest.peer_target', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_type', full_name='SubscribeRequest.peer_type', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_id', full_name='SubscribeRequest.peer_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group_id', full_name='SubscribeRequest.group_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1390,
serialized_end=1494,
)
_STATUSREQUEST = _descriptor.Descriptor(
name='StatusRequest',
full_name='StatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='StatusRequest.request', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1496,
serialized_end=1528,
)
_STATUSREPLY = _descriptor.Descriptor(
name='StatusReply',
full_name='StatusReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='StatusReply.status', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='block_height', full_name='StatusReply.block_height', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='total_tx', full_name='StatusReply.total_tx', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1530,
serialized_end=1599,
)
_STOPREQUEST = _descriptor.Descriptor(
name='StopRequest',
full_name='StopRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='reason', full_name='StopRequest.reason', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1601,
serialized_end=1630,
)
_STOPREPLY = _descriptor.Descriptor(
name='StopReply',
full_name='StopReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='StopReply.status', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1632,
serialized_end=1659,
)
_PEERREQUEST = _descriptor.Descriptor(
name='PeerRequest',
full_name='PeerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peer_object', full_name='PeerRequest.peer_object', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_id', full_name='PeerRequest.peer_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_target', full_name='PeerRequest.peer_target', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group_id', full_name='PeerRequest.group_id', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_type', full_name='PeerRequest.peer_type', index=4,
number=5, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='token', full_name='PeerRequest.token', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_order', full_name='PeerRequest.peer_order', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1662,
serialized_end=1817,
)
_PEERID = _descriptor.Descriptor(
name='PeerID',
full_name='PeerID',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peer_id', full_name='PeerID.peer_id', index=0,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group_id', full_name='PeerID.group_id', index=1,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1819,
serialized_end=1862,
)
_PEERREPLY = _descriptor.Descriptor(
name='PeerReply',
full_name='PeerReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='PeerReply.status', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peer_list', full_name='PeerReply.peer_list', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='more_info', full_name='PeerReply.more_info', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1864,
serialized_end=1929,
)
_SUBSCRIBEREQUEST.fields_by_name['peer_type'].enum_type = _PEERTYPE
_PEERREQUEST.fields_by_name['peer_type'].enum_type = _PEERTYPE
DESCRIPTOR.message_types_by_name['Message'] = _MESSAGE
DESCRIPTOR.message_types_by_name['ComplainLeaderRequest'] = _COMPLAINLEADERREQUEST
DESCRIPTOR.message_types_by_name['PeerList'] = _PEERLIST
DESCRIPTOR.message_types_by_name['CreateTxRequest'] = _CREATETXREQUEST
DESCRIPTOR.message_types_by_name['CreateTxReply'] = _CREATETXREPLY
DESCRIPTOR.message_types_by_name['TxSend'] = _TXSEND
DESCRIPTOR.message_types_by_name['GetBlockRequest'] = _GETBLOCKREQUEST
DESCRIPTOR.message_types_by_name['GetBlockReply'] = _GETBLOCKREPLY
DESCRIPTOR.message_types_by_name['QueryRequest'] = _QUERYREQUEST
DESCRIPTOR.message_types_by_name['QueryReply'] = _QUERYREPLY
DESCRIPTOR.message_types_by_name['GetTxRequest'] = _GETTXREQUEST
DESCRIPTOR.message_types_by_name['GetTxReply'] = _GETTXREPLY
DESCRIPTOR.message_types_by_name['GetInvokeResultRequest'] = _GETINVOKERESULTREQUEST
DESCRIPTOR.message_types_by_name['GetInvokeResultReply'] = _GETINVOKERESULTREPLY
DESCRIPTOR.message_types_by_name['BlockSyncRequest'] = _BLOCKSYNCREQUEST
DESCRIPTOR.message_types_by_name['BlockSyncReply'] = _BLOCKSYNCREPLY
DESCRIPTOR.message_types_by_name['BlockSend'] = _BLOCKSEND
DESCRIPTOR.message_types_by_name['BlockReply'] = _BLOCKREPLY
DESCRIPTOR.message_types_by_name['BlockVote'] = _BLOCKVOTE
DESCRIPTOR.message_types_by_name['BlockAnnounce'] = _BLOCKANNOUNCE
DESCRIPTOR.message_types_by_name['CommonRequest'] = _COMMONREQUEST
DESCRIPTOR.message_types_by_name['CommonReply'] = _COMMONREPLY
DESCRIPTOR.message_types_by_name['SubscribeRequest'] = _SUBSCRIBEREQUEST
DESCRIPTOR.message_types_by_name['StatusRequest'] = _STATUSREQUEST
DESCRIPTOR.message_types_by_name['StatusReply'] = _STATUSREPLY
DESCRIPTOR.message_types_by_name['StopRequest'] = _STOPREQUEST
DESCRIPTOR.message_types_by_name['StopReply'] = _STOPREPLY
DESCRIPTOR.message_types_by_name['PeerRequest'] = _PEERREQUEST
DESCRIPTOR.message_types_by_name['PeerID'] = _PEERID
DESCRIPTOR.message_types_by_name['PeerReply'] = _PEERREPLY
DESCRIPTOR.enum_types_by_name['PeerType'] = _PEERTYPE
Message = _reflection.GeneratedProtocolMessageType('Message', (_message.Message,), dict(
DESCRIPTOR = _MESSAGE,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:Message)
))
_sym_db.RegisterMessage(Message)
ComplainLeaderRequest = _reflection.GeneratedProtocolMessageType('ComplainLeaderRequest', (_message.Message,), dict(
DESCRIPTOR = _COMPLAINLEADERREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:ComplainLeaderRequest)
))
_sym_db.RegisterMessage(ComplainLeaderRequest)
PeerList = _reflection.GeneratedProtocolMessageType('PeerList', (_message.Message,), dict(
DESCRIPTOR = _PEERLIST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:PeerList)
))
_sym_db.RegisterMessage(PeerList)
CreateTxRequest = _reflection.GeneratedProtocolMessageType('CreateTxRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATETXREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:CreateTxRequest)
))
_sym_db.RegisterMessage(CreateTxRequest)
CreateTxReply = _reflection.GeneratedProtocolMessageType('CreateTxReply', (_message.Message,), dict(
DESCRIPTOR = _CREATETXREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:CreateTxReply)
))
_sym_db.RegisterMessage(CreateTxReply)
TxSend = _reflection.GeneratedProtocolMessageType('TxSend', (_message.Message,), dict(
DESCRIPTOR = _TXSEND,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:TxSend)
))
_sym_db.RegisterMessage(TxSend)
GetBlockRequest = _reflection.GeneratedProtocolMessageType('GetBlockRequest', (_message.Message,), dict(
DESCRIPTOR = _GETBLOCKREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetBlockRequest)
))
_sym_db.RegisterMessage(GetBlockRequest)
GetBlockReply = _reflection.GeneratedProtocolMessageType('GetBlockReply', (_message.Message,), dict(
DESCRIPTOR = _GETBLOCKREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetBlockReply)
))
_sym_db.RegisterMessage(GetBlockReply)
QueryRequest = _reflection.GeneratedProtocolMessageType('QueryRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:QueryRequest)
))
_sym_db.RegisterMessage(QueryRequest)
QueryReply = _reflection.GeneratedProtocolMessageType('QueryReply', (_message.Message,), dict(
DESCRIPTOR = _QUERYREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:QueryReply)
))
_sym_db.RegisterMessage(QueryReply)
GetTxRequest = _reflection.GeneratedProtocolMessageType('GetTxRequest', (_message.Message,), dict(
DESCRIPTOR = _GETTXREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetTxRequest)
))
_sym_db.RegisterMessage(GetTxRequest)
GetTxReply = _reflection.GeneratedProtocolMessageType('GetTxReply', (_message.Message,), dict(
DESCRIPTOR = _GETTXREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetTxReply)
))
_sym_db.RegisterMessage(GetTxReply)
GetInvokeResultRequest = _reflection.GeneratedProtocolMessageType('GetInvokeResultRequest', (_message.Message,), dict(
DESCRIPTOR = _GETINVOKERESULTREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetInvokeResultRequest)
))
_sym_db.RegisterMessage(GetInvokeResultRequest)
GetInvokeResultReply = _reflection.GeneratedProtocolMessageType('GetInvokeResultReply', (_message.Message,), dict(
DESCRIPTOR = _GETINVOKERESULTREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:GetInvokeResultReply)
))
_sym_db.RegisterMessage(GetInvokeResultReply)
BlockSyncRequest = _reflection.GeneratedProtocolMessageType('BlockSyncRequest', (_message.Message,), dict(
DESCRIPTOR = _BLOCKSYNCREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockSyncRequest)
))
_sym_db.RegisterMessage(BlockSyncRequest)
BlockSyncReply = _reflection.GeneratedProtocolMessageType('BlockSyncReply', (_message.Message,), dict(
DESCRIPTOR = _BLOCKSYNCREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockSyncReply)
))
_sym_db.RegisterMessage(BlockSyncReply)
BlockSend = _reflection.GeneratedProtocolMessageType('BlockSend', (_message.Message,), dict(
DESCRIPTOR = _BLOCKSEND,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockSend)
))
_sym_db.RegisterMessage(BlockSend)
BlockReply = _reflection.GeneratedProtocolMessageType('BlockReply', (_message.Message,), dict(
DESCRIPTOR = _BLOCKREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockReply)
))
_sym_db.RegisterMessage(BlockReply)
BlockVote = _reflection.GeneratedProtocolMessageType('BlockVote', (_message.Message,), dict(
DESCRIPTOR = _BLOCKVOTE,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockVote)
))
_sym_db.RegisterMessage(BlockVote)
BlockAnnounce = _reflection.GeneratedProtocolMessageType('BlockAnnounce', (_message.Message,), dict(
DESCRIPTOR = _BLOCKANNOUNCE,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:BlockAnnounce)
))
_sym_db.RegisterMessage(BlockAnnounce)
CommonRequest = _reflection.GeneratedProtocolMessageType('CommonRequest', (_message.Message,), dict(
DESCRIPTOR = _COMMONREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:CommonRequest)
))
_sym_db.RegisterMessage(CommonRequest)
CommonReply = _reflection.GeneratedProtocolMessageType('CommonReply', (_message.Message,), dict(
DESCRIPTOR = _COMMONREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:CommonReply)
))
_sym_db.RegisterMessage(CommonReply)
SubscribeRequest = _reflection.GeneratedProtocolMessageType('SubscribeRequest', (_message.Message,), dict(
DESCRIPTOR = _SUBSCRIBEREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:SubscribeRequest)
))
_sym_db.RegisterMessage(SubscribeRequest)
StatusRequest = _reflection.GeneratedProtocolMessageType('StatusRequest', (_message.Message,), dict(
DESCRIPTOR = _STATUSREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:StatusRequest)
))
_sym_db.RegisterMessage(StatusRequest)
StatusReply = _reflection.GeneratedProtocolMessageType('StatusReply', (_message.Message,), dict(
DESCRIPTOR = _STATUSREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:StatusReply)
))
_sym_db.RegisterMessage(StatusReply)
StopRequest = _reflection.GeneratedProtocolMessageType('StopRequest', (_message.Message,), dict(
DESCRIPTOR = _STOPREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:StopRequest)
))
_sym_db.RegisterMessage(StopRequest)
StopReply = _reflection.GeneratedProtocolMessageType('StopReply', (_message.Message,), dict(
DESCRIPTOR = _STOPREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:StopReply)
))
_sym_db.RegisterMessage(StopReply)
PeerRequest = _reflection.GeneratedProtocolMessageType('PeerRequest', (_message.Message,), dict(
DESCRIPTOR = _PEERREQUEST,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:PeerRequest)
))
_sym_db.RegisterMessage(PeerRequest)
PeerID = _reflection.GeneratedProtocolMessageType('PeerID', (_message.Message,), dict(
DESCRIPTOR = _PEERID,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:PeerID)
))
_sym_db.RegisterMessage(PeerID)
PeerReply = _reflection.GeneratedProtocolMessageType('PeerReply', (_message.Message,), dict(
DESCRIPTOR = _PEERREPLY,
__module__ = 'loopchain_pb2'
# @@protoc_insertion_point(class_scope:PeerReply)
))
_sym_db.RegisterMessage(PeerReply)
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
class InnerServiceStub(object):
"""########################################################################
Peer Service for Inner Process
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Request = channel.unary_unary(
'/InnerService/Request',
request_serializer=Message.SerializeToString,
response_deserializer=Message.FromString,
)
self.GetStatus = channel.unary_unary(
'/InnerService/GetStatus',
request_serializer=StatusRequest.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.GetScoreStatus = channel.unary_unary(
'/InnerService/GetScoreStatus',
request_serializer=StatusRequest.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.Stop = channel.unary_unary(
'/InnerService/Stop',
request_serializer=StopRequest.SerializeToString,
response_deserializer=StopReply.FromString,
)
self.GetTx = channel.unary_unary(
'/InnerService/GetTx',
request_serializer=GetTxRequest.SerializeToString,
response_deserializer=GetTxReply.FromString,
)
self.GetBlock = channel.unary_unary(
'/InnerService/GetBlock',
request_serializer=GetBlockRequest.SerializeToString,
response_deserializer=GetBlockReply.FromString,
)
self.Query = channel.unary_unary(
'/InnerService/Query',
request_serializer=QueryRequest.SerializeToString,
response_deserializer=QueryReply.FromString,
)
self.BlockSync = channel.unary_unary(
'/InnerService/BlockSync',
request_serializer=BlockSyncRequest.SerializeToString,
response_deserializer=BlockSyncReply.FromString,
)
self.AnnounceUnconfirmedBlock = channel.unary_unary(
'/InnerService/AnnounceUnconfirmedBlock',
request_serializer=BlockSend.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceConfirmedBlock = channel.unary_unary(
'/InnerService/AnnounceConfirmedBlock',
request_serializer=BlockAnnounce.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceNewPeer = channel.unary_unary(
'/InnerService/AnnounceNewPeer',
request_serializer=PeerRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.Echo = channel.unary_unary(
'/InnerService/Echo',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.ComplainLeader = channel.unary_unary(
'/InnerService/ComplainLeader',
request_serializer=ComplainLeaderRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceNewLeader = channel.unary_unary(
'/InnerService/AnnounceNewLeader',
request_serializer=ComplainLeaderRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.GetLastBlockHash = channel.unary_unary(
'/InnerService/GetLastBlockHash',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=BlockReply.FromString,
)
self.Subscribe = channel.unary_unary(
'/InnerService/Subscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.UnSubscribe = channel.unary_unary(
'/InnerService/UnSubscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AddTx = channel.unary_unary(
'/InnerService/AddTx',
request_serializer=TxSend.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.VoteUnconfirmedBlock = channel.unary_unary(
'/InnerService/VoteUnconfirmedBlock',
request_serializer=BlockVote.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.NotifyLeaderBroken = channel.unary_unary(
'/InnerService/NotifyLeaderBroken',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.NotifyProcessError = channel.unary_unary(
'/InnerService/NotifyProcessError',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
class InnerServiceServicer(object):
"""########################################################################
Peer Service for Inner Process
"""
def Request(self, request, context):
"""Connection
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetScoreStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stop(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTx(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Query(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def BlockSync(self, request, context):
"""Peer 의 Block Height 보정용 interface
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceUnconfirmedBlock(self, request, context):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceConfirmedBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceNewPeer(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Echo(self, request, context):
"""Test 검증을 위한 인터페이스
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ComplainLeader(self, request, context):
"""Leader 선정을 위한 인터페이스
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceNewLeader(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLastBlockHash(self, request, context):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UnSubscribe(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddTx(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VoteUnconfirmedBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def NotifyLeaderBroken(self, request, context):
"""/////////////////////////////////////////////////////////////////////
Inner channel 전용 메시지, 내부의 Process 가 요청하는 interface
Notify- 로 시작한다.
/////////////////////////////////////////////////////////////////////
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def NotifyProcessError(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_InnerServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Request': grpc.unary_unary_rpc_method_handler(
servicer.Request,
request_deserializer=Message.FromString,
response_serializer=Message.SerializeToString,
),
'GetStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetStatus,
request_deserializer=StatusRequest.FromString,
response_serializer=StatusReply.SerializeToString,
),
'GetScoreStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetScoreStatus,
request_deserializer=StatusRequest.FromString,
response_serializer=StatusReply.SerializeToString,
),
'Stop': grpc.unary_unary_rpc_method_handler(
servicer.Stop,
request_deserializer=StopRequest.FromString,
response_serializer=StopReply.SerializeToString,
),
'GetTx': grpc.unary_unary_rpc_method_handler(
servicer.GetTx,
request_deserializer=GetTxRequest.FromString,
response_serializer=GetTxReply.SerializeToString,
),
'GetBlock': grpc.unary_unary_rpc_method_handler(
servicer.GetBlock,
request_deserializer=GetBlockRequest.FromString,
response_serializer=GetBlockReply.SerializeToString,
),
'Query': grpc.unary_unary_rpc_method_handler(
servicer.Query,
request_deserializer=QueryRequest.FromString,
response_serializer=QueryReply.SerializeToString,
),
'BlockSync': grpc.unary_unary_rpc_method_handler(
servicer.BlockSync,
request_deserializer=BlockSyncRequest.FromString,
response_serializer=BlockSyncReply.SerializeToString,
),
'AnnounceUnconfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceUnconfirmedBlock,
request_deserializer=BlockSend.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceConfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceConfirmedBlock,
request_deserializer=BlockAnnounce.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceNewPeer': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceNewPeer,
request_deserializer=PeerRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'Echo': grpc.unary_unary_rpc_method_handler(
servicer.Echo,
request_deserializer=CommonRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'ComplainLeader': grpc.unary_unary_rpc_method_handler(
servicer.ComplainLeader,
request_deserializer=ComplainLeaderRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceNewLeader': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceNewLeader,
request_deserializer=ComplainLeaderRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'GetLastBlockHash': grpc.unary_unary_rpc_method_handler(
servicer.GetLastBlockHash,
request_deserializer=CommonRequest.FromString,
response_serializer=BlockReply.SerializeToString,
),
'Subscribe': grpc.unary_unary_rpc_method_handler(
servicer.Subscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'UnSubscribe': grpc.unary_unary_rpc_method_handler(
servicer.UnSubscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AddTx': grpc.unary_unary_rpc_method_handler(
servicer.AddTx,
request_deserializer=TxSend.FromString,
response_serializer=CommonReply.SerializeToString,
),
'VoteUnconfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.VoteUnconfirmedBlock,
request_deserializer=BlockVote.FromString,
response_serializer=CommonReply.SerializeToString,
),
'NotifyLeaderBroken': grpc.unary_unary_rpc_method_handler(
servicer.NotifyLeaderBroken,
request_deserializer=CommonRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'NotifyProcessError': grpc.unary_unary_rpc_method_handler(
servicer.NotifyProcessError,
request_deserializer=CommonRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'InnerService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class PeerServiceStub(object):
"""Peer Service for Outer Client or other Peer
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Request = channel.unary_unary(
'/PeerService/Request',
request_serializer=Message.SerializeToString,
response_deserializer=Message.FromString,
)
self.GetStatus = channel.unary_unary(
'/PeerService/GetStatus',
request_serializer=StatusRequest.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.GetScoreStatus = channel.unary_unary(
'/PeerService/GetScoreStatus',
request_serializer=StatusRequest.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.Stop = channel.unary_unary(
'/PeerService/Stop',
request_serializer=StopRequest.SerializeToString,
response_deserializer=StopReply.FromString,
)
self.CreateTx = channel.unary_unary(
'/PeerService/CreateTx',
request_serializer=CreateTxRequest.SerializeToString,
response_deserializer=CreateTxReply.FromString,
)
self.GetTx = channel.unary_unary(
'/PeerService/GetTx',
request_serializer=GetTxRequest.SerializeToString,
response_deserializer=GetTxReply.FromString,
)
self.GetBlock = channel.unary_unary(
'/PeerService/GetBlock',
request_serializer=GetBlockRequest.SerializeToString,
response_deserializer=GetBlockReply.FromString,
)
self.Query = channel.unary_unary(
'/PeerService/Query',
request_serializer=QueryRequest.SerializeToString,
response_deserializer=QueryReply.FromString,
)
self.GetInvokeResult = channel.unary_unary(
'/PeerService/GetInvokeResult',
request_serializer=GetInvokeResultRequest.SerializeToString,
response_deserializer=GetInvokeResultReply.FromString,
)
self.BlockSync = channel.unary_unary(
'/PeerService/BlockSync',
request_serializer=BlockSyncRequest.SerializeToString,
response_deserializer=BlockSyncReply.FromString,
)
self.AnnounceUnconfirmedBlock = channel.unary_unary(
'/PeerService/AnnounceUnconfirmedBlock',
request_serializer=BlockSend.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceConfirmedBlock = channel.unary_unary(
'/PeerService/AnnounceConfirmedBlock',
request_serializer=BlockAnnounce.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceNewPeer = channel.unary_unary(
'/PeerService/AnnounceNewPeer',
request_serializer=PeerRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceDeletePeer = channel.unary_unary(
'/PeerService/AnnounceDeletePeer',
request_serializer=PeerID.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.Echo = channel.unary_unary(
'/PeerService/Echo',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.ComplainLeader = channel.unary_unary(
'/PeerService/ComplainLeader',
request_serializer=ComplainLeaderRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AnnounceNewLeader = channel.unary_unary(
'/PeerService/AnnounceNewLeader',
request_serializer=ComplainLeaderRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.GetLastBlockHash = channel.unary_unary(
'/PeerService/GetLastBlockHash',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=BlockReply.FromString,
)
self.Subscribe = channel.unary_unary(
'/PeerService/Subscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.UnSubscribe = channel.unary_unary(
'/PeerService/UnSubscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.AddTx = channel.unary_unary(
'/PeerService/AddTx',
request_serializer=TxSend.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.VoteUnconfirmedBlock = channel.unary_unary(
'/PeerService/VoteUnconfirmedBlock',
request_serializer=BlockVote.SerializeToString,
response_deserializer=CommonReply.FromString,
)
class PeerServiceServicer(object):
"""Peer Service for Outer Client or other Peer
"""
def Request(self, request, context):
"""Connection
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetScoreStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stop(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateTx(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTx(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Query(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInvokeResult(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def BlockSync(self, request, context):
"""Peer 의 Block Height 보정용 interface
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceUnconfirmedBlock(self, request, context):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceConfirmedBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceNewPeer(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceDeletePeer(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Echo(self, request, context):
"""Test 검증을 위한 인터페이스
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ComplainLeader(self, request, context):
"""Leader 선정을 위한 인터페이스
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceNewLeader(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLastBlockHash(self, request, context):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UnSubscribe(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddTx(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VoteUnconfirmedBlock(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PeerServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Request': grpc.unary_unary_rpc_method_handler(
servicer.Request,
request_deserializer=Message.FromString,
response_serializer=Message.SerializeToString,
),
'GetStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetStatus,
request_deserializer=StatusRequest.FromString,
response_serializer=StatusReply.SerializeToString,
),
'GetScoreStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetScoreStatus,
request_deserializer=StatusRequest.FromString,
response_serializer=StatusReply.SerializeToString,
),
'Stop': grpc.unary_unary_rpc_method_handler(
servicer.Stop,
request_deserializer=StopRequest.FromString,
response_serializer=StopReply.SerializeToString,
),
'CreateTx': grpc.unary_unary_rpc_method_handler(
servicer.CreateTx,
request_deserializer=CreateTxRequest.FromString,
response_serializer=CreateTxReply.SerializeToString,
),
'GetTx': grpc.unary_unary_rpc_method_handler(
servicer.GetTx,
request_deserializer=GetTxRequest.FromString,
response_serializer=GetTxReply.SerializeToString,
),
'GetBlock': grpc.unary_unary_rpc_method_handler(
servicer.GetBlock,
request_deserializer=GetBlockRequest.FromString,
response_serializer=GetBlockReply.SerializeToString,
),
'Query': grpc.unary_unary_rpc_method_handler(
servicer.Query,
request_deserializer=QueryRequest.FromString,
response_serializer=QueryReply.SerializeToString,
),
'GetInvokeResult': grpc.unary_unary_rpc_method_handler(
servicer.GetInvokeResult,
request_deserializer=GetInvokeResultRequest.FromString,
response_serializer=GetInvokeResultReply.SerializeToString,
),
'BlockSync': grpc.unary_unary_rpc_method_handler(
servicer.BlockSync,
request_deserializer=BlockSyncRequest.FromString,
response_serializer=BlockSyncReply.SerializeToString,
),
'AnnounceUnconfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceUnconfirmedBlock,
request_deserializer=BlockSend.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceConfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceConfirmedBlock,
request_deserializer=BlockAnnounce.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceNewPeer': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceNewPeer,
request_deserializer=PeerRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceDeletePeer': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceDeletePeer,
request_deserializer=PeerID.FromString,
response_serializer=CommonReply.SerializeToString,
),
'Echo': grpc.unary_unary_rpc_method_handler(
servicer.Echo,
request_deserializer=CommonRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'ComplainLeader': grpc.unary_unary_rpc_method_handler(
servicer.ComplainLeader,
request_deserializer=ComplainLeaderRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AnnounceNewLeader': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceNewLeader,
request_deserializer=ComplainLeaderRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'GetLastBlockHash': grpc.unary_unary_rpc_method_handler(
servicer.GetLastBlockHash,
request_deserializer=CommonRequest.FromString,
response_serializer=BlockReply.SerializeToString,
),
'Subscribe': grpc.unary_unary_rpc_method_handler(
servicer.Subscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'UnSubscribe': grpc.unary_unary_rpc_method_handler(
servicer.UnSubscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'AddTx': grpc.unary_unary_rpc_method_handler(
servicer.AddTx,
request_deserializer=TxSend.FromString,
response_serializer=CommonReply.SerializeToString,
),
'VoteUnconfirmedBlock': grpc.unary_unary_rpc_method_handler(
servicer.VoteUnconfirmedBlock,
request_deserializer=BlockVote.FromString,
response_serializer=CommonReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'PeerService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class RadioStationStub(object):
"""#######################################################
RadioStation Service
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Request = channel.unary_unary(
'/RadioStation/Request',
request_serializer=Message.SerializeToString,
response_deserializer=Message.FromString,
)
self.GetStatus = channel.unary_unary(
'/RadioStation/GetStatus',
request_serializer=StatusRequest.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.Stop = channel.unary_unary(
'/RadioStation/Stop',
request_serializer=StopRequest.SerializeToString,
response_deserializer=StopReply.FromString,
)
self.ConnectPeer = channel.unary_unary(
'/RadioStation/ConnectPeer',
request_serializer=PeerRequest.SerializeToString,
response_deserializer=PeerReply.FromString,
)
self.GetPeerList = channel.unary_unary(
'/RadioStation/GetPeerList',
request_serializer=CommonRequest.SerializeToString,
response_deserializer=PeerList.FromString,
)
self.GetPeerStatus = channel.unary_unary(
'/RadioStation/GetPeerStatus',
request_serializer=PeerID.SerializeToString,
response_deserializer=StatusReply.FromString,
)
self.AnnounceNewLeader = channel.unary_unary(
'/RadioStation/AnnounceNewLeader',
request_serializer=ComplainLeaderRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.Subscribe = channel.unary_unary(
'/RadioStation/Subscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
self.UnSubscribe = channel.unary_unary(
'/RadioStation/UnSubscribe',
request_serializer=SubscribeRequest.SerializeToString,
response_deserializer=CommonReply.FromString,
)
class RadioStationServicer(object):
"""#######################################################
RadioStation Service
"""
def Request(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stop(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ConnectPeer(self, request, context):
"""Peer 접속
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPeerList(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPeerStatus(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AnnounceNewLeader(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 loopchain.proto 와 동일하게 구성되어야 한다.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UnSubscribe(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RadioStationServicer_to_server(servicer, server):
rpc_method_handlers = {
'Request': grpc.unary_unary_rpc_method_handler(
servicer.Request,
request_deserializer=Message.FromString,
response_serializer=Message.SerializeToString,
),
'GetStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetStatus,
request_deserializer=StatusRequest.FromString,
response_serializer=StatusReply.SerializeToString,
),
'Stop': grpc.unary_unary_rpc_method_handler(
servicer.Stop,
request_deserializer=StopRequest.FromString,
response_serializer=StopReply.SerializeToString,
),
'ConnectPeer': grpc.unary_unary_rpc_method_handler(
servicer.ConnectPeer,
request_deserializer=PeerRequest.FromString,
response_serializer=PeerReply.SerializeToString,
),
'GetPeerList': grpc.unary_unary_rpc_method_handler(
servicer.GetPeerList,
request_deserializer=CommonRequest.FromString,
response_serializer=PeerList.SerializeToString,
),
'GetPeerStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetPeerStatus,
request_deserializer=PeerID.FromString,
response_serializer=StatusReply.SerializeToString,
),
'AnnounceNewLeader': grpc.unary_unary_rpc_method_handler(
servicer.AnnounceNewLeader,
request_deserializer=ComplainLeaderRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'Subscribe': grpc.unary_unary_rpc_method_handler(
servicer.Subscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
'UnSubscribe': grpc.unary_unary_rpc_method_handler(
servicer.UnSubscribe,
request_deserializer=SubscribeRequest.FromString,
response_serializer=CommonReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'RadioStation', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class ContainerStub(object):
"""#######################################################
Container Service
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Request = channel.unary_unary(
'/Container/Request',
request_serializer=Message.SerializeToString,
response_deserializer=Message.FromString,
)
class ContainerServicer(object):
"""#######################################################
Container Service
"""
def Request(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ContainerServicer_to_server(servicer, server):
rpc_method_handlers = {
'Request': grpc.unary_unary_rpc_method_handler(
servicer.Request,
request_deserializer=Message.FromString,
response_serializer=Message.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Container', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BetaInnerServiceServicer(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""########################################################################
Peer Service for Inner Process
"""
def Request(self, request, context):
"""Connection
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetScoreStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Stop(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetTx(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Query(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def BlockSync(self, request, context):
"""Peer 의 Block Height 보정용 interface
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceUnconfirmedBlock(self, request, context):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceConfirmedBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceNewPeer(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Echo(self, request, context):
"""Test 검증을 위한 인터페이스
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def ComplainLeader(self, request, context):
"""Leader 선정을 위한 인터페이스
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceNewLeader(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetLastBlockHash(self, request, context):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def UnSubscribe(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AddTx(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def VoteUnconfirmedBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def NotifyLeaderBroken(self, request, context):
"""/////////////////////////////////////////////////////////////////////
Inner channel 전용 메시지, 내부의 Process 가 요청하는 interface
Notify- 로 시작한다.
/////////////////////////////////////////////////////////////////////
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def NotifyProcessError(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaInnerServiceStub(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""########################################################################
Peer Service for Inner Process
"""
def Request(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Connection
"""
raise NotImplementedError()
Request.future = None
def GetStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetStatus.future = None
def GetScoreStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetScoreStatus.future = None
def Stop(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Stop.future = None
def GetTx(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetTx.future = None
def GetBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetBlock.future = None
def Query(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Query.future = None
def BlockSync(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Peer 의 Block Height 보정용 interface
"""
raise NotImplementedError()
BlockSync.future = None
def AnnounceUnconfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
raise NotImplementedError()
AnnounceUnconfirmedBlock.future = None
def AnnounceConfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceConfirmedBlock.future = None
def AnnounceNewPeer(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceNewPeer.future = None
def Echo(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Test 검증을 위한 인터페이스
"""
raise NotImplementedError()
Echo.future = None
def ComplainLeader(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Leader 선정을 위한 인터페이스
"""
raise NotImplementedError()
ComplainLeader.future = None
def AnnounceNewLeader(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceNewLeader.future = None
def GetLastBlockHash(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
raise NotImplementedError()
GetLastBlockHash.future = None
def Subscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
raise NotImplementedError()
Subscribe.future = None
def UnSubscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
UnSubscribe.future = None
def AddTx(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AddTx.future = None
def VoteUnconfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
VoteUnconfirmedBlock.future = None
def NotifyLeaderBroken(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""/////////////////////////////////////////////////////////////////////
Inner channel 전용 메시지, 내부의 Process 가 요청하는 interface
Notify- 로 시작한다.
/////////////////////////////////////////////////////////////////////
"""
raise NotImplementedError()
NotifyLeaderBroken.future = None
def NotifyProcessError(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
NotifyProcessError.future = None
def beta_create_InnerService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_deserializers = {
('InnerService', 'AddTx'): TxSend.FromString,
('InnerService', 'AnnounceConfirmedBlock'): BlockAnnounce.FromString,
('InnerService', 'AnnounceNewLeader'): ComplainLeaderRequest.FromString,
('InnerService', 'AnnounceNewPeer'): PeerRequest.FromString,
('InnerService', 'AnnounceUnconfirmedBlock'): BlockSend.FromString,
('InnerService', 'BlockSync'): BlockSyncRequest.FromString,
('InnerService', 'ComplainLeader'): ComplainLeaderRequest.FromString,
('InnerService', 'Echo'): CommonRequest.FromString,
('InnerService', 'GetBlock'): GetBlockRequest.FromString,
('InnerService', 'GetLastBlockHash'): CommonRequest.FromString,
('InnerService', 'GetScoreStatus'): StatusRequest.FromString,
('InnerService', 'GetStatus'): StatusRequest.FromString,
('InnerService', 'GetTx'): GetTxRequest.FromString,
('InnerService', 'NotifyLeaderBroken'): CommonRequest.FromString,
('InnerService', 'NotifyProcessError'): CommonRequest.FromString,
('InnerService', 'Query'): QueryRequest.FromString,
('InnerService', 'Request'): Message.FromString,
('InnerService', 'Stop'): StopRequest.FromString,
('InnerService', 'Subscribe'): SubscribeRequest.FromString,
('InnerService', 'UnSubscribe'): SubscribeRequest.FromString,
('InnerService', 'VoteUnconfirmedBlock'): BlockVote.FromString,
}
response_serializers = {
('InnerService', 'AddTx'): CommonReply.SerializeToString,
('InnerService', 'AnnounceConfirmedBlock'): CommonReply.SerializeToString,
('InnerService', 'AnnounceNewLeader'): CommonReply.SerializeToString,
('InnerService', 'AnnounceNewPeer'): CommonReply.SerializeToString,
('InnerService', 'AnnounceUnconfirmedBlock'): CommonReply.SerializeToString,
('InnerService', 'BlockSync'): BlockSyncReply.SerializeToString,
('InnerService', 'ComplainLeader'): CommonReply.SerializeToString,
('InnerService', 'Echo'): CommonReply.SerializeToString,
('InnerService', 'GetBlock'): GetBlockReply.SerializeToString,
('InnerService', 'GetLastBlockHash'): BlockReply.SerializeToString,
('InnerService', 'GetScoreStatus'): StatusReply.SerializeToString,
('InnerService', 'GetStatus'): StatusReply.SerializeToString,
('InnerService', 'GetTx'): GetTxReply.SerializeToString,
('InnerService', 'NotifyLeaderBroken'): CommonReply.SerializeToString,
('InnerService', 'NotifyProcessError'): CommonReply.SerializeToString,
('InnerService', 'Query'): QueryReply.SerializeToString,
('InnerService', 'Request'): Message.SerializeToString,
('InnerService', 'Stop'): StopReply.SerializeToString,
('InnerService', 'Subscribe'): CommonReply.SerializeToString,
('InnerService', 'UnSubscribe'): CommonReply.SerializeToString,
('InnerService', 'VoteUnconfirmedBlock'): CommonReply.SerializeToString,
}
method_implementations = {
('InnerService', 'AddTx'): face_utilities.unary_unary_inline(servicer.AddTx),
('InnerService', 'AnnounceConfirmedBlock'): face_utilities.unary_unary_inline(servicer.AnnounceConfirmedBlock),
('InnerService', 'AnnounceNewLeader'): face_utilities.unary_unary_inline(servicer.AnnounceNewLeader),
('InnerService', 'AnnounceNewPeer'): face_utilities.unary_unary_inline(servicer.AnnounceNewPeer),
('InnerService', 'AnnounceUnconfirmedBlock'): face_utilities.unary_unary_inline(servicer.AnnounceUnconfirmedBlock),
('InnerService', 'BlockSync'): face_utilities.unary_unary_inline(servicer.BlockSync),
('InnerService', 'ComplainLeader'): face_utilities.unary_unary_inline(servicer.ComplainLeader),
('InnerService', 'Echo'): face_utilities.unary_unary_inline(servicer.Echo),
('InnerService', 'GetBlock'): face_utilities.unary_unary_inline(servicer.GetBlock),
('InnerService', 'GetLastBlockHash'): face_utilities.unary_unary_inline(servicer.GetLastBlockHash),
('InnerService', 'GetScoreStatus'): face_utilities.unary_unary_inline(servicer.GetScoreStatus),
('InnerService', 'GetStatus'): face_utilities.unary_unary_inline(servicer.GetStatus),
('InnerService', 'GetTx'): face_utilities.unary_unary_inline(servicer.GetTx),
('InnerService', 'NotifyLeaderBroken'): face_utilities.unary_unary_inline(servicer.NotifyLeaderBroken),
('InnerService', 'NotifyProcessError'): face_utilities.unary_unary_inline(servicer.NotifyProcessError),
('InnerService', 'Query'): face_utilities.unary_unary_inline(servicer.Query),
('InnerService', 'Request'): face_utilities.unary_unary_inline(servicer.Request),
('InnerService', 'Stop'): face_utilities.unary_unary_inline(servicer.Stop),
('InnerService', 'Subscribe'): face_utilities.unary_unary_inline(servicer.Subscribe),
('InnerService', 'UnSubscribe'): face_utilities.unary_unary_inline(servicer.UnSubscribe),
('InnerService', 'VoteUnconfirmedBlock'): face_utilities.unary_unary_inline(servicer.VoteUnconfirmedBlock),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_InnerService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_serializers = {
('InnerService', 'AddTx'): TxSend.SerializeToString,
('InnerService', 'AnnounceConfirmedBlock'): BlockAnnounce.SerializeToString,
('InnerService', 'AnnounceNewLeader'): ComplainLeaderRequest.SerializeToString,
('InnerService', 'AnnounceNewPeer'): PeerRequest.SerializeToString,
('InnerService', 'AnnounceUnconfirmedBlock'): BlockSend.SerializeToString,
('InnerService', 'BlockSync'): BlockSyncRequest.SerializeToString,
('InnerService', 'ComplainLeader'): ComplainLeaderRequest.SerializeToString,
('InnerService', 'Echo'): CommonRequest.SerializeToString,
('InnerService', 'GetBlock'): GetBlockRequest.SerializeToString,
('InnerService', 'GetLastBlockHash'): CommonRequest.SerializeToString,
('InnerService', 'GetScoreStatus'): StatusRequest.SerializeToString,
('InnerService', 'GetStatus'): StatusRequest.SerializeToString,
('InnerService', 'GetTx'): GetTxRequest.SerializeToString,
('InnerService', 'NotifyLeaderBroken'): CommonRequest.SerializeToString,
('InnerService', 'NotifyProcessError'): CommonRequest.SerializeToString,
('InnerService', 'Query'): QueryRequest.SerializeToString,
('InnerService', 'Request'): Message.SerializeToString,
('InnerService', 'Stop'): StopRequest.SerializeToString,
('InnerService', 'Subscribe'): SubscribeRequest.SerializeToString,
('InnerService', 'UnSubscribe'): SubscribeRequest.SerializeToString,
('InnerService', 'VoteUnconfirmedBlock'): BlockVote.SerializeToString,
}
response_deserializers = {
('InnerService', 'AddTx'): CommonReply.FromString,
('InnerService', 'AnnounceConfirmedBlock'): CommonReply.FromString,
('InnerService', 'AnnounceNewLeader'): CommonReply.FromString,
('InnerService', 'AnnounceNewPeer'): CommonReply.FromString,
('InnerService', 'AnnounceUnconfirmedBlock'): CommonReply.FromString,
('InnerService', 'BlockSync'): BlockSyncReply.FromString,
('InnerService', 'ComplainLeader'): CommonReply.FromString,
('InnerService', 'Echo'): CommonReply.FromString,
('InnerService', 'GetBlock'): GetBlockReply.FromString,
('InnerService', 'GetLastBlockHash'): BlockReply.FromString,
('InnerService', 'GetScoreStatus'): StatusReply.FromString,
('InnerService', 'GetStatus'): StatusReply.FromString,
('InnerService', 'GetTx'): GetTxReply.FromString,
('InnerService', 'NotifyLeaderBroken'): CommonReply.FromString,
('InnerService', 'NotifyProcessError'): CommonReply.FromString,
('InnerService', 'Query'): QueryReply.FromString,
('InnerService', 'Request'): Message.FromString,
('InnerService', 'Stop'): StopReply.FromString,
('InnerService', 'Subscribe'): CommonReply.FromString,
('InnerService', 'UnSubscribe'): CommonReply.FromString,
('InnerService', 'VoteUnconfirmedBlock'): CommonReply.FromString,
}
cardinalities = {
'AddTx': cardinality.Cardinality.UNARY_UNARY,
'AnnounceConfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
'AnnounceNewLeader': cardinality.Cardinality.UNARY_UNARY,
'AnnounceNewPeer': cardinality.Cardinality.UNARY_UNARY,
'AnnounceUnconfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
'BlockSync': cardinality.Cardinality.UNARY_UNARY,
'ComplainLeader': cardinality.Cardinality.UNARY_UNARY,
'Echo': cardinality.Cardinality.UNARY_UNARY,
'GetBlock': cardinality.Cardinality.UNARY_UNARY,
'GetLastBlockHash': cardinality.Cardinality.UNARY_UNARY,
'GetScoreStatus': cardinality.Cardinality.UNARY_UNARY,
'GetStatus': cardinality.Cardinality.UNARY_UNARY,
'GetTx': cardinality.Cardinality.UNARY_UNARY,
'NotifyLeaderBroken': cardinality.Cardinality.UNARY_UNARY,
'NotifyProcessError': cardinality.Cardinality.UNARY_UNARY,
'Query': cardinality.Cardinality.UNARY_UNARY,
'Request': cardinality.Cardinality.UNARY_UNARY,
'Stop': cardinality.Cardinality.UNARY_UNARY,
'Subscribe': cardinality.Cardinality.UNARY_UNARY,
'UnSubscribe': cardinality.Cardinality.UNARY_UNARY,
'VoteUnconfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'InnerService', cardinalities, options=stub_options)
class BetaPeerServiceServicer(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""Peer Service for Outer Client or other Peer
"""
def Request(self, request, context):
"""Connection
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetScoreStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Stop(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def CreateTx(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetTx(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Query(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetInvokeResult(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def BlockSync(self, request, context):
"""Peer 의 Block Height 보정용 interface
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceUnconfirmedBlock(self, request, context):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceConfirmedBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceNewPeer(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceDeletePeer(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Echo(self, request, context):
"""Test 검증을 위한 인터페이스
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def ComplainLeader(self, request, context):
"""Leader 선정을 위한 인터페이스
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceNewLeader(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetLastBlockHash(self, request, context):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def UnSubscribe(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AddTx(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def VoteUnconfirmedBlock(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaPeerServiceStub(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""Peer Service for Outer Client or other Peer
"""
def Request(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Connection
"""
raise NotImplementedError()
Request.future = None
def GetStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetStatus.future = None
def GetScoreStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetScoreStatus.future = None
def Stop(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Stop.future = None
def CreateTx(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
CreateTx.future = None
def GetTx(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetTx.future = None
def GetBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetBlock.future = None
def Query(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Query.future = None
def GetInvokeResult(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetInvokeResult.future = None
def BlockSync(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Peer 의 Block Height 보정용 interface
"""
raise NotImplementedError()
BlockSync.future = None
def AnnounceUnconfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Subscribe 후 broadcast 받는 인터페이스는 Announce- 로 시작한다.
"""
raise NotImplementedError()
AnnounceUnconfirmedBlock.future = None
def AnnounceConfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceConfirmedBlock.future = None
def AnnounceNewPeer(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceNewPeer.future = None
def AnnounceDeletePeer(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceDeletePeer.future = None
def Echo(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Test 검증을 위한 인터페이스
"""
raise NotImplementedError()
Echo.future = None
def ComplainLeader(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Leader 선정을 위한 인터페이스
"""
raise NotImplementedError()
ComplainLeader.future = None
def AnnounceNewLeader(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceNewLeader.future = None
def GetLastBlockHash(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""/////////////////////////////////////////////////////////////////////
BlockGenerator (leader) 에게만 허용되어야 하는 interface
/////////////////////////////////////////////////////////////////////
RadioStation 에서 GetLastBlockHash 는 Block Height Sync 를 위해서 사용된다.
"""
raise NotImplementedError()
GetLastBlockHash.future = None
def Subscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 radiostation.proto 와 동일하게 구성되어야 한다.
"""
raise NotImplementedError()
Subscribe.future = None
def UnSubscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
UnSubscribe.future = None
def AddTx(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AddTx.future = None
def VoteUnconfirmedBlock(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
VoteUnconfirmedBlock.future = None
def beta_create_PeerService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_deserializers = {
('PeerService', 'AddTx'): TxSend.FromString,
('PeerService', 'AnnounceConfirmedBlock'): BlockAnnounce.FromString,
('PeerService', 'AnnounceDeletePeer'): PeerID.FromString,
('PeerService', 'AnnounceNewLeader'): ComplainLeaderRequest.FromString,
('PeerService', 'AnnounceNewPeer'): PeerRequest.FromString,
('PeerService', 'AnnounceUnconfirmedBlock'): BlockSend.FromString,
('PeerService', 'BlockSync'): BlockSyncRequest.FromString,
('PeerService', 'ComplainLeader'): ComplainLeaderRequest.FromString,
('PeerService', 'CreateTx'): CreateTxRequest.FromString,
('PeerService', 'Echo'): CommonRequest.FromString,
('PeerService', 'GetBlock'): GetBlockRequest.FromString,
('PeerService', 'GetInvokeResult'): GetInvokeResultRequest.FromString,
('PeerService', 'GetLastBlockHash'): CommonRequest.FromString,
('PeerService', 'GetScoreStatus'): StatusRequest.FromString,
('PeerService', 'GetStatus'): StatusRequest.FromString,
('PeerService', 'GetTx'): GetTxRequest.FromString,
('PeerService', 'Query'): QueryRequest.FromString,
('PeerService', 'Request'): Message.FromString,
('PeerService', 'Stop'): StopRequest.FromString,
('PeerService', 'Subscribe'): SubscribeRequest.FromString,
('PeerService', 'UnSubscribe'): SubscribeRequest.FromString,
('PeerService', 'VoteUnconfirmedBlock'): BlockVote.FromString,
}
response_serializers = {
('PeerService', 'AddTx'): CommonReply.SerializeToString,
('PeerService', 'AnnounceConfirmedBlock'): CommonReply.SerializeToString,
('PeerService', 'AnnounceDeletePeer'): CommonReply.SerializeToString,
('PeerService', 'AnnounceNewLeader'): CommonReply.SerializeToString,
('PeerService', 'AnnounceNewPeer'): CommonReply.SerializeToString,
('PeerService', 'AnnounceUnconfirmedBlock'): CommonReply.SerializeToString,
('PeerService', 'BlockSync'): BlockSyncReply.SerializeToString,
('PeerService', 'ComplainLeader'): CommonReply.SerializeToString,
('PeerService', 'CreateTx'): CreateTxReply.SerializeToString,
('PeerService', 'Echo'): CommonReply.SerializeToString,
('PeerService', 'GetBlock'): GetBlockReply.SerializeToString,
('PeerService', 'GetInvokeResult'): GetInvokeResultReply.SerializeToString,
('PeerService', 'GetLastBlockHash'): BlockReply.SerializeToString,
('PeerService', 'GetScoreStatus'): StatusReply.SerializeToString,
('PeerService', 'GetStatus'): StatusReply.SerializeToString,
('PeerService', 'GetTx'): GetTxReply.SerializeToString,
('PeerService', 'Query'): QueryReply.SerializeToString,
('PeerService', 'Request'): Message.SerializeToString,
('PeerService', 'Stop'): StopReply.SerializeToString,
('PeerService', 'Subscribe'): CommonReply.SerializeToString,
('PeerService', 'UnSubscribe'): CommonReply.SerializeToString,
('PeerService', 'VoteUnconfirmedBlock'): CommonReply.SerializeToString,
}
method_implementations = {
('PeerService', 'AddTx'): face_utilities.unary_unary_inline(servicer.AddTx),
('PeerService', 'AnnounceConfirmedBlock'): face_utilities.unary_unary_inline(servicer.AnnounceConfirmedBlock),
('PeerService', 'AnnounceDeletePeer'): face_utilities.unary_unary_inline(servicer.AnnounceDeletePeer),
('PeerService', 'AnnounceNewLeader'): face_utilities.unary_unary_inline(servicer.AnnounceNewLeader),
('PeerService', 'AnnounceNewPeer'): face_utilities.unary_unary_inline(servicer.AnnounceNewPeer),
('PeerService', 'AnnounceUnconfirmedBlock'): face_utilities.unary_unary_inline(servicer.AnnounceUnconfirmedBlock),
('PeerService', 'BlockSync'): face_utilities.unary_unary_inline(servicer.BlockSync),
('PeerService', 'ComplainLeader'): face_utilities.unary_unary_inline(servicer.ComplainLeader),
('PeerService', 'CreateTx'): face_utilities.unary_unary_inline(servicer.CreateTx),
('PeerService', 'Echo'): face_utilities.unary_unary_inline(servicer.Echo),
('PeerService', 'GetBlock'): face_utilities.unary_unary_inline(servicer.GetBlock),
('PeerService', 'GetInvokeResult'): face_utilities.unary_unary_inline(servicer.GetInvokeResult),
('PeerService', 'GetLastBlockHash'): face_utilities.unary_unary_inline(servicer.GetLastBlockHash),
('PeerService', 'GetScoreStatus'): face_utilities.unary_unary_inline(servicer.GetScoreStatus),
('PeerService', 'GetStatus'): face_utilities.unary_unary_inline(servicer.GetStatus),
('PeerService', 'GetTx'): face_utilities.unary_unary_inline(servicer.GetTx),
('PeerService', 'Query'): face_utilities.unary_unary_inline(servicer.Query),
('PeerService', 'Request'): face_utilities.unary_unary_inline(servicer.Request),
('PeerService', 'Stop'): face_utilities.unary_unary_inline(servicer.Stop),
('PeerService', 'Subscribe'): face_utilities.unary_unary_inline(servicer.Subscribe),
('PeerService', 'UnSubscribe'): face_utilities.unary_unary_inline(servicer.UnSubscribe),
('PeerService', 'VoteUnconfirmedBlock'): face_utilities.unary_unary_inline(servicer.VoteUnconfirmedBlock),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_PeerService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_serializers = {
('PeerService', 'AddTx'): TxSend.SerializeToString,
('PeerService', 'AnnounceConfirmedBlock'): BlockAnnounce.SerializeToString,
('PeerService', 'AnnounceDeletePeer'): PeerID.SerializeToString,
('PeerService', 'AnnounceNewLeader'): ComplainLeaderRequest.SerializeToString,
('PeerService', 'AnnounceNewPeer'): PeerRequest.SerializeToString,
('PeerService', 'AnnounceUnconfirmedBlock'): BlockSend.SerializeToString,
('PeerService', 'BlockSync'): BlockSyncRequest.SerializeToString,
('PeerService', 'ComplainLeader'): ComplainLeaderRequest.SerializeToString,
('PeerService', 'CreateTx'): CreateTxRequest.SerializeToString,
('PeerService', 'Echo'): CommonRequest.SerializeToString,
('PeerService', 'GetBlock'): GetBlockRequest.SerializeToString,
('PeerService', 'GetInvokeResult'): GetInvokeResultRequest.SerializeToString,
('PeerService', 'GetLastBlockHash'): CommonRequest.SerializeToString,
('PeerService', 'GetScoreStatus'): StatusRequest.SerializeToString,
('PeerService', 'GetStatus'): StatusRequest.SerializeToString,
('PeerService', 'GetTx'): GetTxRequest.SerializeToString,
('PeerService', 'Query'): QueryRequest.SerializeToString,
('PeerService', 'Request'): Message.SerializeToString,
('PeerService', 'Stop'): StopRequest.SerializeToString,
('PeerService', 'Subscribe'): SubscribeRequest.SerializeToString,
('PeerService', 'UnSubscribe'): SubscribeRequest.SerializeToString,
('PeerService', 'VoteUnconfirmedBlock'): BlockVote.SerializeToString,
}
response_deserializers = {
('PeerService', 'AddTx'): CommonReply.FromString,
('PeerService', 'AnnounceConfirmedBlock'): CommonReply.FromString,
('PeerService', 'AnnounceDeletePeer'): CommonReply.FromString,
('PeerService', 'AnnounceNewLeader'): CommonReply.FromString,
('PeerService', 'AnnounceNewPeer'): CommonReply.FromString,
('PeerService', 'AnnounceUnconfirmedBlock'): CommonReply.FromString,
('PeerService', 'BlockSync'): BlockSyncReply.FromString,
('PeerService', 'ComplainLeader'): CommonReply.FromString,
('PeerService', 'CreateTx'): CreateTxReply.FromString,
('PeerService', 'Echo'): CommonReply.FromString,
('PeerService', 'GetBlock'): GetBlockReply.FromString,
('PeerService', 'GetInvokeResult'): GetInvokeResultReply.FromString,
('PeerService', 'GetLastBlockHash'): BlockReply.FromString,
('PeerService', 'GetScoreStatus'): StatusReply.FromString,
('PeerService', 'GetStatus'): StatusReply.FromString,
('PeerService', 'GetTx'): GetTxReply.FromString,
('PeerService', 'Query'): QueryReply.FromString,
('PeerService', 'Request'): Message.FromString,
('PeerService', 'Stop'): StopReply.FromString,
('PeerService', 'Subscribe'): CommonReply.FromString,
('PeerService', 'UnSubscribe'): CommonReply.FromString,
('PeerService', 'VoteUnconfirmedBlock'): CommonReply.FromString,
}
cardinalities = {
'AddTx': cardinality.Cardinality.UNARY_UNARY,
'AnnounceConfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
'AnnounceDeletePeer': cardinality.Cardinality.UNARY_UNARY,
'AnnounceNewLeader': cardinality.Cardinality.UNARY_UNARY,
'AnnounceNewPeer': cardinality.Cardinality.UNARY_UNARY,
'AnnounceUnconfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
'BlockSync': cardinality.Cardinality.UNARY_UNARY,
'ComplainLeader': cardinality.Cardinality.UNARY_UNARY,
'CreateTx': cardinality.Cardinality.UNARY_UNARY,
'Echo': cardinality.Cardinality.UNARY_UNARY,
'GetBlock': cardinality.Cardinality.UNARY_UNARY,
'GetInvokeResult': cardinality.Cardinality.UNARY_UNARY,
'GetLastBlockHash': cardinality.Cardinality.UNARY_UNARY,
'GetScoreStatus': cardinality.Cardinality.UNARY_UNARY,
'GetStatus': cardinality.Cardinality.UNARY_UNARY,
'GetTx': cardinality.Cardinality.UNARY_UNARY,
'Query': cardinality.Cardinality.UNARY_UNARY,
'Request': cardinality.Cardinality.UNARY_UNARY,
'Stop': cardinality.Cardinality.UNARY_UNARY,
'Subscribe': cardinality.Cardinality.UNARY_UNARY,
'UnSubscribe': cardinality.Cardinality.UNARY_UNARY,
'VoteUnconfirmedBlock': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'PeerService', cardinalities, options=stub_options)
class BetaRadioStationServicer(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""#######################################################
RadioStation Service
"""
def Request(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Stop(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def ConnectPeer(self, request, context):
"""Peer 접속
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetPeerList(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def GetPeerStatus(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def AnnounceNewLeader(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def Subscribe(self, request, context):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 loopchain.proto 와 동일하게 구성되어야 한다.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def UnSubscribe(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaRadioStationStub(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""#######################################################
RadioStation Service
"""
def Request(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Request.future = None
def GetStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetStatus.future = None
def Stop(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Stop.future = None
def ConnectPeer(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Peer 접속
"""
raise NotImplementedError()
ConnectPeer.future = None
def GetPeerList(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetPeerList.future = None
def GetPeerStatus(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
GetPeerStatus.future = None
def AnnounceNewLeader(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
AnnounceNewLeader.future = None
def Subscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Subscribe 와 UnSubscribe 는 Broadcast 관련 메소드로 loopchain.proto 와 동일하게 구성되어야 한다.
"""
raise NotImplementedError()
Subscribe.future = None
def UnSubscribe(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
UnSubscribe.future = None
def beta_create_RadioStation_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_deserializers = {
('RadioStation', 'AnnounceNewLeader'): ComplainLeaderRequest.FromString,
('RadioStation', 'ConnectPeer'): PeerRequest.FromString,
('RadioStation', 'GetPeerList'): CommonRequest.FromString,
('RadioStation', 'GetPeerStatus'): PeerID.FromString,
('RadioStation', 'GetStatus'): StatusRequest.FromString,
('RadioStation', 'Request'): Message.FromString,
('RadioStation', 'Stop'): StopRequest.FromString,
('RadioStation', 'Subscribe'): SubscribeRequest.FromString,
('RadioStation', 'UnSubscribe'): SubscribeRequest.FromString,
}
response_serializers = {
('RadioStation', 'AnnounceNewLeader'): CommonReply.SerializeToString,
('RadioStation', 'ConnectPeer'): PeerReply.SerializeToString,
('RadioStation', 'GetPeerList'): PeerList.SerializeToString,
('RadioStation', 'GetPeerStatus'): StatusReply.SerializeToString,
('RadioStation', 'GetStatus'): StatusReply.SerializeToString,
('RadioStation', 'Request'): Message.SerializeToString,
('RadioStation', 'Stop'): StopReply.SerializeToString,
('RadioStation', 'Subscribe'): CommonReply.SerializeToString,
('RadioStation', 'UnSubscribe'): CommonReply.SerializeToString,
}
method_implementations = {
('RadioStation', 'AnnounceNewLeader'): face_utilities.unary_unary_inline(servicer.AnnounceNewLeader),
('RadioStation', 'ConnectPeer'): face_utilities.unary_unary_inline(servicer.ConnectPeer),
('RadioStation', 'GetPeerList'): face_utilities.unary_unary_inline(servicer.GetPeerList),
('RadioStation', 'GetPeerStatus'): face_utilities.unary_unary_inline(servicer.GetPeerStatus),
('RadioStation', 'GetStatus'): face_utilities.unary_unary_inline(servicer.GetStatus),
('RadioStation', 'Request'): face_utilities.unary_unary_inline(servicer.Request),
('RadioStation', 'Stop'): face_utilities.unary_unary_inline(servicer.Stop),
('RadioStation', 'Subscribe'): face_utilities.unary_unary_inline(servicer.Subscribe),
('RadioStation', 'UnSubscribe'): face_utilities.unary_unary_inline(servicer.UnSubscribe),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_RadioStation_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_serializers = {
('RadioStation', 'AnnounceNewLeader'): ComplainLeaderRequest.SerializeToString,
('RadioStation', 'ConnectPeer'): PeerRequest.SerializeToString,
('RadioStation', 'GetPeerList'): CommonRequest.SerializeToString,
('RadioStation', 'GetPeerStatus'): PeerID.SerializeToString,
('RadioStation', 'GetStatus'): StatusRequest.SerializeToString,
('RadioStation', 'Request'): Message.SerializeToString,
('RadioStation', 'Stop'): StopRequest.SerializeToString,
('RadioStation', 'Subscribe'): SubscribeRequest.SerializeToString,
('RadioStation', 'UnSubscribe'): SubscribeRequest.SerializeToString,
}
response_deserializers = {
('RadioStation', 'AnnounceNewLeader'): CommonReply.FromString,
('RadioStation', 'ConnectPeer'): PeerReply.FromString,
('RadioStation', 'GetPeerList'): PeerList.FromString,
('RadioStation', 'GetPeerStatus'): StatusReply.FromString,
('RadioStation', 'GetStatus'): StatusReply.FromString,
('RadioStation', 'Request'): Message.FromString,
('RadioStation', 'Stop'): StopReply.FromString,
('RadioStation', 'Subscribe'): CommonReply.FromString,
('RadioStation', 'UnSubscribe'): CommonReply.FromString,
}
cardinalities = {
'AnnounceNewLeader': cardinality.Cardinality.UNARY_UNARY,
'ConnectPeer': cardinality.Cardinality.UNARY_UNARY,
'GetPeerList': cardinality.Cardinality.UNARY_UNARY,
'GetPeerStatus': cardinality.Cardinality.UNARY_UNARY,
'GetStatus': cardinality.Cardinality.UNARY_UNARY,
'Request': cardinality.Cardinality.UNARY_UNARY,
'Stop': cardinality.Cardinality.UNARY_UNARY,
'Subscribe': cardinality.Cardinality.UNARY_UNARY,
'UnSubscribe': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'RadioStation', cardinalities, options=stub_options)
class BetaContainerServicer(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""#######################################################
Container Service
"""
def Request(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaContainerStub(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""#######################################################
Container Service
"""
def Request(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
raise NotImplementedError()
Request.future = None
def beta_create_Container_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_deserializers = {
('Container', 'Request'): Message.FromString,
}
response_serializers = {
('Container', 'Request'): Message.SerializeToString,
}
method_implementations = {
('Container', 'Request'): face_utilities.unary_unary_inline(servicer.Request),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_Container_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_serializers = {
('Container', 'Request'): Message.SerializeToString,
}
response_deserializers = {
('Container', 'Request'): Message.FromString,
}
cardinalities = {
'Request': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'Container', cardinalities, options=stub_options)
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
| 42.873077
| 7,702
| 0.712865
| 14,992
| 144,911
| 6.68083
| 0.037086
| 0.020447
| 0.01905
| 0.016474
| 0.754341
| 0.732316
| 0.717509
| 0.702044
| 0.655328
| 0.648559
| 0
| 0.021095
| 0.161582
| 144,911
| 3,379
| 7,703
| 42.885765
| 0.803286
| 0.073024
| 0
| 0.681611
| 1
| 0.00035
| 0.177325
| 0.079367
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061296
| false
| 0.00035
| 0.004553
| 0
| 0.074256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
017d1f9358cdad31c3825831d61287d902db0024
| 1,455
|
py
|
Python
|
tests/test_anonymous.py
|
psentinelli/filebrowser-safe
|
8677982c3d32280f92d2a1499612553a4cbea6c0
|
[
"BSD-3-Clause"
] | 25
|
2015-04-11T15:51:50.000Z
|
2021-11-01T03:53:35.000Z
|
tests/test_anonymous.py
|
psentinelli/filebrowser-safe
|
8677982c3d32280f92d2a1499612553a4cbea6c0
|
[
"BSD-3-Clause"
] | 88
|
2015-01-06T20:59:10.000Z
|
2022-03-29T14:05:10.000Z
|
tests/test_anonymous.py
|
psentinelli/filebrowser-safe
|
8677982c3d32280f92d2a1499612553a4cbea6c0
|
[
"BSD-3-Clause"
] | 85
|
2015-01-06T00:45:22.000Z
|
2021-12-18T20:40:46.000Z
|
from django.test import TestCase
from django.urls import reverse
class FilebrowserAnonymousTestCase(TestCase):
def test_browse(self):
url = reverse("fb_browse")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
def test_mkdir(self):
url = reverse("fb_browse")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
def test_rename(self):
url = reverse("fb_rename")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
def test_delete(self):
url = reverse("fb_delete")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
def test_upload(self):
url = reverse("fb_upload")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
def test_do_upload(self):
url = reverse("fb_do_upload")
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertEqual("/admin/login/?next=" + url, response.url)
| 35.487805
| 67
| 0.650859
| 177
| 1,455
| 5.237288
| 0.163842
| 0.194175
| 0.090615
| 0.10356
| 0.812298
| 0.764833
| 0.764833
| 0.764833
| 0.764833
| 0.764833
| 0
| 0.015817
| 0.217869
| 1,455
| 40
| 68
| 36.375
| 0.79877
| 0
| 0
| 0.606061
| 0
| 0
| 0.117526
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 1
| 0.181818
| false
| 0
| 0.060606
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6d7e8fa257e87b632f8db45c59eeff9555c5fc3d
| 5,617
|
py
|
Python
|
AjkSpider/tools/db_final.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
AjkSpider/tools/db_final.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
AjkSpider/tools/db_final.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
# import psycopg2
#
# conn = psycopg2.connect(database='lj_db', user='postgres', password='495495', host='127.0.0.1', port='5432')
# cur = conn.cursor()
import pymysql.cursors
conn = pymysql.connect(host='localhost',
user='root',
password='162534',
db='dashuju',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
cur = conn.cursor()
# cur.execute('''
# create table proxy(
# id serial primary key,
# ip varchar(255) not null
# )
# ''')
# cur.execute('''
# create table test(
# id serial primary key,
# t1 varchar(255) not null,
# t2 int not null
# )
# ''')
cur.execute('''
create table t_web_lj_city(
id serial primary key,
cn_name varchar(255) not null,
route varchar(255) not null,
url varchar(255) not null
)
''')
cur.execute('''
create table t_web_lj_district(
id serial primary key,
cn_name varchar(255) not null,
route varchar(255) not null,
city_id int not null
)
''')
cur.execute('''
create table t_web_lj_community(
id serial primary key,
cn_name varchar(255) not null,
route varchar(255) not null,
district_id int not null
)
''')
cur.execute('''
create table t_web_lj_residence(
id serial primary key,
residence_name varchar(255) not null,
avg_price varchar(255) not null,
avg_time varchar(255) not null,
address varchar(255) not null,
coordinate varchar(255) not null,
build_time varchar(255) not null,
property_price varchar(255) not null,
property_company varchar(255) not null,
developer varchar(255) not null,
total_buildings varchar(255) not null,
total_houses varchar(255) not null,
bsn_dt varchar(255) not null,
tms varchar(255) not null,
url varchar(255) not null,
webst_nm varchar(255) not null,
crawl_time varchar(255) not null,
community_id int not null
)
''')
#
# cur.execute('''
# create table lj_residence_around(
# id serial primary key,
#
# title varchar(255) not null,
# description varchar(255) not null,
# distance varchar(255) not null,
# type2 varchar(255) not null,
# type1 varchar(255) not null,
#
# url varchar(255) not null,
# crawl_time varchar(255) not null,
# residence_id int not null
# )
# ''')
cur.execute('''
create table t_web_lj_esf(
id serial primary key,
structure varchar(255) not null,
orientation varchar(255) not null,
area varchar(255) not null,
inner_area varchar(255) not null,
heating_style varchar(255) not null,
decoration varchar(255) not null,
floor varchar(255) not null,
total_floor varchar(255) not null,
house_type_struct varchar(255) not null,
build_type varchar(255) not null,
build_struct varchar(255) not null,
household varchar(255) not null,
elevator varchar(255) not null,
ring_num varchar(255) not null,
lj_num varchar(255) not null,
house_age varchar(255) not null,
property_type varchar(255) not null,
house_type varchar(255) not null,
house_owner varchar(255) not null,
listing_date varchar(255) not null,
total_price varchar(255) not null,
unit_price varchar(255) not null,
last_deal varchar(255) not null,
mortgage varchar(255) not null,
house_backup varchar(255) not null,
bsn_dt varchar(255) not null,
tms varchar(255) not null,
url varchar(255) not null,
webbst_nm varchar(255) not null,
crawl_time varchar(255) not null,
residence_url varchar(255) not null,
residence_id int not null
)
''')
cur.execute('''
create table t_web_lj_deal(
id serial primary key,
structure varchar(255) not null,
orientation varchar(255) not null,
area varchar(255) not null,
inner_area varchar(255) not null,
heating_style varchar(255) not null,
decoration varchar(255) not null,
floor varchar(255) not null,
total_floor varchar(255) not null,
house_type_struct varchar(255) not null,
build_type varchar(255) not null,
build_struct varchar(255) not null,
household varchar(255) not null,
elevator varchar(255) not null,
house_age varchar(255) not null,
property_type varchar(255) not null,
house_type varchar(255) not null,
house_owner varchar(255) not null,
listing_price varchar(255) not null,
listing_date varchar(255) not null,
total_price varchar(255) not null,
transaction_date varchar(255) not null,
last_deal varchar(255) not null,
deal_cycle varchar(255) not null,
look_times varchar(255) not null,
bsn_dt varchar(255) not null,
tms varchar(255) not null,
url varchar(255) not null,
webbst_nm varchar(255) not null,
crawl_time varchar(255) not null,
residence_url varchar(255) not null,
residence_id int not null
)
''')
print 'db_final ok'
conn.commit()
cur.close()
conn.close()
| 30.527174
| 111
| 0.589461
| 714
| 5,617
| 4.515406
| 0.161064
| 0.217122
| 0.375
| 0.490385
| 0.765819
| 0.687035
| 0.687035
| 0.687035
| 0.665012
| 0.644231
| 0
| 0.081273
| 0.323126
| 5,617
| 183
| 112
| 30.693989
| 0.766702
| 0.142247
| 0
| 0.625954
| 0
| 0
| 0.89524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.007634
| 0.007634
| null | null | 0.007634
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a3041c6f7b5d29f798e962855bef511409f20e9c
| 75
|
py
|
Python
|
util/__init__.py
|
metaperl/freegold-focus
|
49cf86be57c76b265f711e337f85d8ee2b156301
|
[
"MIT"
] | null | null | null |
util/__init__.py
|
metaperl/freegold-focus
|
49cf86be57c76b265f711e337f85d8ee2b156301
|
[
"MIT"
] | null | null | null |
util/__init__.py
|
metaperl/freegold-focus
|
49cf86be57c76b265f711e337f85d8ee2b156301
|
[
"MIT"
] | null | null | null |
def full_path(p=""):
return os.path.join(os.path.dirname(__file__), p)
| 25
| 53
| 0.68
| 13
| 75
| 3.538462
| 0.692308
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 75
| 2
| 54
| 37.5
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
099040c419923597189bc548a0b6f41cc897df36
| 24
|
py
|
Python
|
utils/__init__.py
|
Lars-H/hdt_sampler
|
c30ab87dbe0c489ed274d52af1af090045cade9d
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
Lars-H/hdt_sampler
|
c30ab87dbe0c489ed274d52af1af090045cade9d
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
Lars-H/hdt_sampler
|
c30ab87dbe0c489ed274d52af1af090045cade9d
|
[
"MIT"
] | 1
|
2020-11-12T15:07:22.000Z
|
2020-11-12T15:07:22.000Z
|
from .rdf_utils import *
| 24
| 24
| 0.791667
| 4
| 24
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
09e6b4830587e9b61ed1a9b552f158cf1f3a37d7
| 49
|
py
|
Python
|
utils/losses/__init__.py
|
rbg-research/Floor-Plan-Detection
|
c6e63d880e8996eb31eebd70aa7331018392db05
|
[
"MIT"
] | 3
|
2022-01-11T16:42:23.000Z
|
2022-02-21T09:05:25.000Z
|
utils/losses/__init__.py
|
rbg-research/Floor-Plan-Detection
|
c6e63d880e8996eb31eebd70aa7331018392db05
|
[
"MIT"
] | null | null | null |
utils/losses/__init__.py
|
rbg-research/Floor-Plan-Detection
|
c6e63d880e8996eb31eebd70aa7331018392db05
|
[
"MIT"
] | 1
|
2021-11-19T07:01:57.000Z
|
2021-11-19T07:01:57.000Z
|
from floortrans.losses.uncertainty_loss import *
| 24.5
| 48
| 0.857143
| 6
| 49
| 6.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
09e83c3999ea88dd92d57e165272d1d34c9a854a
| 34
|
py
|
Python
|
env/local/lib/python2.7/site-packages/pyteomics/openms/__init__.py
|
sdgroeve/Omega_server
|
e788af5d5e2717c19100a88467c7126c8fcef033
|
[
"MIT"
] | 1
|
2018-08-20T08:59:42.000Z
|
2018-08-20T08:59:42.000Z
|
env/local/lib/python2.7/site-packages/pyteomics/openms/__init__.py
|
sdgroeve/Omega_server
|
e788af5d5e2717c19100a88467c7126c8fcef033
|
[
"MIT"
] | 9
|
2018-02-14T14:24:39.000Z
|
2021-12-13T19:45:34.000Z
|
python_env/lib/python2.7/site-packages/pyteomics/openms/__init__.py
|
Rappsilber-Laboratory/xiSPEC_ms_parser
|
3c8f9ab1a5ca449ddca35be7e5fd1eb58b0fa943
|
[
"Apache-2.0"
] | 2
|
2017-12-11T14:47:35.000Z
|
2018-02-17T09:56:37.000Z
|
from . import featurexml, trafoxml
| 34
| 34
| 0.823529
| 4
| 34
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
61dd325a5bbd7a2d4d1490d59970a7329c7efe11
| 56
|
py
|
Python
|
Chapter04/list_sort.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | 13
|
2018-06-21T01:44:49.000Z
|
2021-12-01T10:49:53.000Z
|
Chapter04/list_sort.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | null | null | null |
Chapter04/list_sort.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | 6
|
2018-10-05T08:29:24.000Z
|
2022-01-11T14:49:50.000Z
|
>>> l.sort()
>>> l
[[1, 43], [2, 34], [3, 56], [6, 98]]
| 14
| 36
| 0.321429
| 11
| 56
| 1.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.214286
| 56
| 3
| 37
| 18.666667
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1106570e272354757eb2ce10c92335b8486b1215
| 5,263
|
py
|
Python
|
pendulum/tests/test_realism.py
|
PabRod/pendulum
|
957409d44f6d0bbf31adad1b6778c3fcd8c0fdf3
|
[
"MIT"
] | 1
|
2022-02-24T21:02:47.000Z
|
2022-02-24T21:02:47.000Z
|
pendulum/tests/test_realism.py
|
PabRod/pendulum
|
957409d44f6d0bbf31adad1b6778c3fcd8c0fdf3
|
[
"MIT"
] | 8
|
2019-07-16T13:34:13.000Z
|
2019-08-09T09:20:46.000Z
|
pendulum/tests/test_realism.py
|
PabRod/pendulum
|
957409d44f6d0bbf31adad1b6778c3fcd8c0fdf3
|
[
"MIT"
] | 1
|
2019-07-26T09:09:14.000Z
|
2019-07-26T09:09:14.000Z
|
from pendulum.models import *
from pendulum.models import _format_accelerations
import numpy as np
import pytest
@pytest.mark.parametrize("input, exp_output", [
((0, 0), (0, 0)), # Stable equilibrium
((np.pi, 0), (0, 0)) # Unstable equilibrium
])
def test_dpendulum(input, exp_output):
''' Test the equilibrium solutions
'''
tol = 1e-8
df = dpendulum(input)
assert(df == pytest.approx(exp_output, tol)), \
'pendulum is not behaving as expected'
def test_damped_pendulum():
''' Test the long-term solution of a damped pendulum
'''
tol = 1e-8
## Set the pendulum
yinit = (0, 1)
d = 2 # Damping
ts = np.linspace(0, 100, 100)
sol = pendulum(yinit, ts, d = d)
last_theta = sol[-1, 0]
last_w = sol[-1, 1]
assert(last_theta == pytest.approx(0.0, tol))
assert(last_w == pytest.approx(0.0, tol))
def test_undamped_pendulum():
''' Test the long-term solution of a damped pendulum
'''
tol = 1e-8
## Set the problem
ts = np.linspace(0, 100, 100) # Simulation time
yinit = (0, 1) # Initial condition (th_0, w_0)
## Solve it
sol = pendulum(yinit, ts)
last_theta = sol[-1, 0]
last_w = sol[-1, 1]
assert(last_theta != pytest.approx(0.0, tol))
assert(last_w != pytest.approx(0.0, tol))
def test_freefall_pendulum():
''' Check the solution for a free-falling non-inertial pendulum
'''
tol = 1e-4
## Set-up your problem
g = 9.8 # Acceleration of gravity
pos_x = lambda t : 0.0*t # Pivot's position
pos_y = lambda t : -g/2*t**2 # Free falling
ts = np.linspace(0, 10, 1000) # Simulation time
yinit = (np.pi/2, 0) # Initial condition (th_0, w_0)
## Solve it
sol = pendulum(yinit, ts, pos_x, pos_y, g = g)
## No relative movement is expected
assert(sol[-1, 0] == pytest.approx(yinit[0], tol))
# Repeat test in acceleration mode
acc_x = lambda t: 0.0*t # Pivot's acceleration
acc_y = lambda t: 0.0*t - g
sol_2 = pendulum(yinit, ts, acc_x, acc_y, is_acceleration = True, g = g)
## No relative movement is expected
assert(sol_2[-1, 0] == pytest.approx(yinit[0], tol))
def test_noninertial_pendulum_no_acceleration():
''' Tests the non inertial pendulum with no acceleration
'''
ts = np.linspace(0, 10, 1000) # Simulation time
yinit = (0, 0) # Initial condition (th_0, w_0)
forc_x = lambda t : 1.0*t # Uniform speed
forc_y = lambda t : 2.0*t
# The dynamics should be the same by virtue of Galileo's relativity
f_inertial = lambda state, t : dpendulum(state, t)
f_non_intertial = lambda state, t : dpendulum(state, t, forc_x, forc_y)
assert(f_inertial(yinit, 0.0) == f_non_intertial(yinit, 0.0))
def test_noninertial_pendulum():
''' Tests the non inertial pendulum with acceleration
'''
ts = np.linspace(0, 10, 1000) # Simulation time
yinit = (0, 0) # Initial condition (th_0, w_0)
forc_x = lambda t : 1.0*t**2 # Accelerated movement
forc_y = lambda t : 2.0*t
# The dynamics should be different
f_inertial = lambda state, t : dpendulum(state, t)
f_non_intertial = lambda state, t : dpendulum(state, t, forc_x, forc_y)
assert(f_inertial(yinit, 0.0) != f_non_intertial(yinit, 0.0))
@pytest.mark.parametrize("input, exp_output", [
((0, 0, 0, 0), (0, 0, 0, 0)), # Stable equilibrium
((np.pi, 0, 0, 0), (0, 0, 0, 0)), # Unstable equilibria
((0, 0, np.pi, 0), (0, 0, 0, 0)),
((np.pi, 0, np.pi, 0), (0, 0, 0, 0))
])
def test_ddouble_pendulum(input, exp_output):
''' Test the equilibrium solutions
'''
tol = 1e-8
df = ddouble_pendulum(input, 0)
assert(df == pytest.approx(exp_output, tol)), \
'pendulum is not behaving as expected'
def test_ni_double_pendulum_no_acceleration():
'''Tests the non-inertial double pendulum with no acceleration
'''
ts = np.linspace(0, 10, 1000) # Simulation time
yinit = (0, 0, 0, 0) # Initial condition (th_0, w_0, th_1, w_1)
forc_x = lambda t : 1.0*t # Uniform speed
forc_y = lambda t : 2.0*t
# The dynamics should be the same by virtue of Galileo's relativity principle
f_inertial = lambda state, t : ddouble_pendulum(state, t)
f_non_intertial = lambda state, t : ddouble_pendulum(state, t, forc_x, forc_y)
assert(f_inertial(yinit, 0.0) == f_non_intertial(yinit, 0.0))
def test_freefall_double_pendulum():
''' Check the solution for a free-falling non-inertial pendulum
'''
tol = 1e-4
## Set-up your problem
g = 9.8 # Acceleration of gravity
pos_x = lambda t : 0.0*t # Pivot's position
pos_y = lambda t : -g/2*t**2 # Free falling
ts = np.linspace(0, 10, 1000) # Simulation time
yinit = (np.pi/2, 0, np.pi/2, 0) # Initial condition (th_0, w_0)
## Solve it
sol = double_pendulum(yinit, ts, pos_x, pos_y, g = g)
## No relative movement is expected
assert(sol[-1, 0] == pytest.approx(yinit[0], tol))
# Repeat test in acceleration mode
acc_x = lambda t: 0.0*t # Pivot's acceleration
acc_y = lambda t: 0.0*t - g
sol_2 = double_pendulum(yinit, ts, acc_x, acc_y, is_acceleration = True, g = g)
## No relative movement is expected
assert(sol_2[-1, 0] == pytest.approx(yinit[0], tol))
| 31.327381
| 83
| 0.633099
| 846
| 5,263
| 3.808511
| 0.140662
| 0.029795
| 0.020484
| 0.018622
| 0.870267
| 0.865922
| 0.844507
| 0.80478
| 0.781813
| 0.781813
| 0
| 0.05125
| 0.232567
| 5,263
| 167
| 84
| 31.51497
| 0.746472
| 0.291469
| 0
| 0.580645
| 0
| 0
| 0.029274
| 0
| 0
| 0
| 0
| 0
| 0.139785
| 1
| 0.096774
| false
| 0
| 0.043011
| 0
| 0.139785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
111309dacfa9a9a6f5b8d63238a364a77e6bc248
| 156
|
py
|
Python
|
Codes/converters.py
|
Muntaha-Islam0019/Hello-World
|
a650bd32e039076ea10caea850453fdaa4744975
|
[
"MIT"
] | 1
|
2019-12-18T09:59:44.000Z
|
2019-12-18T09:59:44.000Z
|
Codes/converters.py
|
Muntaha-Islam0019/Hello-World
|
a650bd32e039076ea10caea850453fdaa4744975
|
[
"MIT"
] | null | null | null |
Codes/converters.py
|
Muntaha-Islam0019/Hello-World
|
a650bd32e039076ea10caea850453fdaa4744975
|
[
"MIT"
] | null | null | null |
# This code is connected with '18 - Modules.py'
def kgs_to_lbs(weight):
return 2.20462 * weight
def lbs_t_kgs(weight):
return 0.453592 * weight
| 15.6
| 47
| 0.692308
| 26
| 156
| 4
| 0.730769
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 0.211538
| 156
| 9
| 48
| 17.333333
| 0.723577
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
11513e05d23220f9294ce46d5bdbe84362f48720
| 76
|
py
|
Python
|
src/icupy/number.py
|
miute/icupy
|
253dd666936c23577abd1fdd94bc9cea012017b8
|
[
"MIT"
] | null | null | null |
src/icupy/number.py
|
miute/icupy
|
253dd666936c23577abd1fdd94bc9cea012017b8
|
[
"MIT"
] | null | null | null |
src/icupy/number.py
|
miute/icupy
|
253dd666936c23577abd1fdd94bc9cea012017b8
|
[
"MIT"
] | null | null | null |
"""
Module for icu::number namespace
"""
from .icu.number import * # noqa
| 12.666667
| 33
| 0.657895
| 10
| 76
| 5
| 0.8
| 0.36
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 76
| 5
| 34
| 15.2
| 0.806452
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
28f1a2976e4303be5114da583b652ed4711125d0
| 27
|
py
|
Python
|
vmsshgen/__init__.py
|
dimon222/py-vmsshgen
|
4d5278c7d22e1d4dc217b1ac017377ca5af8b3a3
|
[
"Apache-2.0"
] | null | null | null |
vmsshgen/__init__.py
|
dimon222/py-vmsshgen
|
4d5278c7d22e1d4dc217b1ac017377ca5af8b3a3
|
[
"Apache-2.0"
] | 1
|
2022-03-04T06:35:48.000Z
|
2022-03-04T06:35:48.000Z
|
vmsshgen/__init__.py
|
dimon222/py-vmsshgen
|
4d5278c7d22e1d4dc217b1ac017377ca5af8b3a3
|
[
"Apache-2.0"
] | null | null | null |
from .vmsshgen import main
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e94090e9c786612ccd50c0f325b5ba2d5cfb5b2f
| 11,118
|
py
|
Python
|
modelmaker/predictor.py
|
wangjm12138/modelmaker
|
aa42ce9d504cc13a636b0c9f4ac49b71538c7cda
|
[
"MIT"
] | null | null | null |
modelmaker/predictor.py
|
wangjm12138/modelmaker
|
aa42ce9d504cc13a636b0c9f4ac49b71538c7cda
|
[
"MIT"
] | null | null | null |
modelmaker/predictor.py
|
wangjm12138/modelmaker
|
aa42ce9d504cc13a636b0c9f4ac49b71538c7cda
|
[
"MIT"
] | null | null | null |
import requests
#from .infers_images_api import *
from .client.api import *
from abc import ABCMeta, abstractmethod
from six import with_metaclass
from json import JSONEncoder
import json
import logging
import os
logging.basicConfig()
LOGGER = logging.getLogger('modelmaker-sdk/Predictor')
LOGGER_LEVEL = os.getenv("MODELMAKER_LEVEL", logging.INFO) #cloud
LOGGER.setLevel(int(LOGGER_LEVEL))
class Predictor(object):
"""
A ModelMaker Predictor that can be predicted, got service information and list,
changed service state and configuration.
"""
def __init__(self, session, service_id=None):
"""
Initialize a Predictor, determine the predictor authorize type.
param session: Building interactions with Wangsu Cloud service.
param service_id: The deployed model service id
"""
self.session = session
if service_id is not None and isinstance(service_id,int) == False:
raise TypeError("service_id type is int!")
self.service_id = service_id
self.predictor_instance = PredictorApiAccountImpl(self.session, service_id)
def info(self, service_id = None):
""" Get the deployed model service information
Args:
service_id: The deployed model service id
return: The deployed service information,including model service access address.
"""
result = self.predictor_instance.get_service_info(service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def info_list(self):
"""
return User service list
"""
result = self.predictor_instance.get_service_list()
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def start(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service start tasks result.
"""
result = self.predictor_instance.change_service_state('start', service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def stop(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service stop tasks result.
"""
result = self.predictor_instance.change_service_state('stop', service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def update(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service update tasks result.
"""
result = self.predictor_instance.update_request(service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def delete(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service delete tasks result.
"""
result = self.predictor_instance.delete_request(service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
# def update_service_config(self, service_id=None, **config_body):
# """ update a service configuration
# Args:
# service_id: service id
# config_body: service configuration parameters
# :return: Service update configuration tasks result.
# """
# return self.predictor_instance.update_service_config(service_id=service_id, **config_body)
#
# def get_service_monitor(self, service_id=None):
#
# """ service monitor information
# Args: service_id:
# return: monitor information
# """
# return self.predictor_instance.get_service_monitor(service_id=service_id)
#
# def get_service_logs(self, service_id=None):
# """ service logs
# Args: service_id:
# return: monitor information
# """
# return self.predictor_instance.get_service_logs(service_id=service_id)
class PredictorApiBase(with_metaclass(ABCMeta, object)):
""" Make prediction requests to a ModelMaker model service endpoint
"""
def __init__(self):
""" Initialize Predictor
service_id: The deployed model service id
"""
# @abstractmethod
# def get_service_info(self):
# """ Get the deployed model service information
# return: The deployed service information,including model service access address.
# """
# pass
#
# @abstractmethod
# def predict(self, data, data_type):
# """
# data(object): Input data for which you want the model to provide inference.
# data_type: {files, images}
# """
# pass
#
# @abstractmethod
# def get_service_list(self):
# """
# return User service list
# """
# pass
#
# @abstractmethod
# def change_service_state(self, node_id, action_body, service_id=None):
# """ change a service state.
# Args:
# node_id: node id
# action_body: Operate type, {stop, run}
# return: Service stop or start tasks result.
# """
# pass
#
# @abstractmethod
# def update_service_config(self, service_id=None, **config_body):
# """ update a service configuration
# Args:
# service_id: service id
# config_body: service configuration parameters
# :return: Service update configuration tasks result.
# """
# pass
#
# @abstractmethod
# def get_service_monitor(self, service_id=None):
# """ service monitor information
# Args: service_id:
# return: monitor information
# """
# pass
#
# @abstractmethod
# def get_service_logs(self, service_id=None):
# """ service logs
# Args: service_id:
# return: monitor information
# """
# pass
class PredictorApiAccountImpl(PredictorApiBase):
""" Make prediction requests to a ModelMaker model service endpoint
"""
def __init__(self, session, service_id):
""" Initialize Predictor
Args:
session: Building interactions with Wangsu Cloud Service, including project id.
service_id: The deployed model service id
"""
self.session = session
if service_id is not None and isinstance(service_id,int) == False:
raise TypeError("service_id type is int!")
self.service_id = service_id
self.service_api = ServiceApi(session.client)
def get_service_info(self, service_id=None):
""" Get the deployed model service information
"""
if service_id is None and self.service_id:
service_id = self.service_id
elif service_id and self.service_id and service_id !=self.service_id:
print("Current service_id is %s, but it will replace by service_id %s"%(str(self.service_id),str(service_id)))
elif service_id is None and self.service_id is None:
raise ValueError("service_id is need")
body={}
return self.service_api.get_service_info(self.session.project_id, body=body, service_id=service_id)
def get_service_list(self):
"""
return User service list
"""
body={}
return self.service_api.get_service_info(self.session.project_id, body=body, service_id=None)
def info(self, service_id = None):
""" Get the deployed model service information
Args:
service_id: The deployed model service id
return: The deployed service information,including model service access address.
"""
result = self.get_service_info(service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def info_list(self):
"""
return User service list
"""
result = self.get_service_list()
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def start(self, service_id=None):
result = self.change_service_state('start', service_id=None)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def stop(self, service_id=None):
result = self.change_service_state('stop', service_id=None)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def change_service_state(self, action_body, service_id=None):
""" change a service state.
Args:
service_id: service_id
action_body: Operate type, {start, stop}
return: Service stop or start tasks result.
"""
if service_id is None and self.service_id:
service_id = self.service_id
elif service_id and self.service_id and service_id != self.service_id:
print("Current service_id is %s, but it will replace by service_id %s"%(str(self.service_id),str(service_id)))
elif service_id is None and self.service_id is None:
raise ValueError("service_id is need")
body = {}
return self.service_api.operate_a_service(self.session.project_id, body, service_id, action_body)
def update_request(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service update tasks result.
"""
if service_id is None:
service_id = self.service_id
body = {}
return self.service_api.update_micro_service(project_id=self.session.project_id, body=body, service_id=service_id)
def update(self, service_id=None):
result = self.update_request(service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
def delete_request(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service delete tasks result.
"""
if service_id is None:
service_id = self.service_id
body = {}
return self.service_api.delete_micro_service(project_id=self.session.project_id, body=body, service_id=service_id)
def delete(self, service_id=None):
""" change a service state.
Args:
service_id: service_id
return: Service delete tasks result.
"""
result = self.delete_request(service_id=service_id)
LOGGER.info(json.loads(result.data.decode('utf-8')))
return json.loads(result.data.decode('utf-8'))
# def update_service_config(self, service_id=None, **config_body):
# """ update a service configuration
# Args:
# service_id: service id
# config_body: service configuration parameters
# :return: Service update configuration tasks result.
# """
# service_config_body = config_body
#
# if 'config' in config_body:
# service_config_body['config'] = super(PredictorApiAccountImpl,self).convert_config_format(config_body['config'])
#
# if service_id is None:
# service_id = self.service_id
#
# return self.service_api.update_service_config(self.session.project_id, service_id, service_config_body)
#
# def get_service_monitor(self, service_id=None):
#
# """ service monitor information
# Args: service_id:
# return: monitor information
# """
# if service_id is None:
# service_id = self.service_id
#
# return self.service_api.get_service_monitor(self.session.project_id, service_id)
#
# def get_service_logs(self, service_id=None):
# """ service logs
# Args: service_id:
# return: monitor information
# """
# if service_id is None:
# service_id = self.service_id
#
# return self.service_api.get_service_logs(self.session.project_id, service_id)
| 32.132948
| 117
| 0.711009
| 1,500
| 11,118
| 5.072667
| 0.089333
| 0.18097
| 0.070049
| 0.063872
| 0.838086
| 0.807991
| 0.768169
| 0.73873
| 0.73873
| 0.706006
| 0
| 0.002621
| 0.176471
| 11,118
| 345
| 118
| 32.226087
| 0.828419
| 0.46807
| 0
| 0.607477
| 0
| 0
| 0.072221
| 0.004514
| 0
| 0
| 0
| 0
| 0
| 1
| 0.186916
| false
| 0
| 0.074766
| 0
| 0.448598
| 0.018692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3a9ce59ef357f8082e0a5a1745c5d82c5d7946d8
| 34
|
py
|
Python
|
appyratus/asynchronous/__init__.py
|
gigaquads/appyratus
|
09d88c000e0f3d254f5e1875388ccf87cec4be7c
|
[
"MIT"
] | 1
|
2021-02-26T15:31:01.000Z
|
2021-02-26T15:31:01.000Z
|
appyratus/asynchronous/__init__.py
|
gigaquads/appyratus
|
09d88c000e0f3d254f5e1875388ccf87cec4be7c
|
[
"MIT"
] | null | null | null |
appyratus/asynchronous/__init__.py
|
gigaquads/appyratus
|
09d88c000e0f3d254f5e1875388ccf87cec4be7c
|
[
"MIT"
] | null | null | null |
from .http import AsyncHttpClient
| 17
| 33
| 0.852941
| 4
| 34
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c90457cff2921decd8ebca2698bdf5105eebafc9
| 180
|
py
|
Python
|
aws_xray_lambda_segment_shim/exceptions.py
|
Sam-Martin/aws-xray-sqs-lambda-segment-shim
|
5e3fa50d9e5f6ec72e04ce417c0d8db5d95a8246
|
[
"MIT"
] | 5
|
2021-08-23T16:06:08.000Z
|
2022-03-24T20:55:44.000Z
|
aws_xray_lambda_segment_shim/exceptions.py
|
Sam-Martin/aws-xray-sqs-lambda-segment-shim
|
5e3fa50d9e5f6ec72e04ce417c0d8db5d95a8246
|
[
"MIT"
] | 1
|
2021-11-04T08:06:29.000Z
|
2021-11-05T17:19:55.000Z
|
aws_xray_lambda_segment_shim/exceptions.py
|
Sam-Martin/aws-xray-sqs-lambda-segment-shim
|
5e3fa50d9e5f6ec72e04ce417c0d8db5d95a8246
|
[
"MIT"
] | null | null | null |
class InvalidTraceHeader(Exception):
"""Thrown if a bad trace header was passed in."""
class InvalidMessageID(Exception):
"""Thrown if a bad message id was passed in."""
| 25.714286
| 53
| 0.711111
| 24
| 180
| 5.333333
| 0.625
| 0.234375
| 0.265625
| 0.28125
| 0.328125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 180
| 6
| 54
| 30
| 0.864865
| 0.472222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c911f589d4393255f7d75ac32976015e4036986d
| 6,294
|
py
|
Python
|
ActionTree/tests/timing.py
|
Python3pkg/ActionTree
|
cfa62a95bf40241e6d6b98f1bfbc1b3748e6fc78
|
[
"MIT"
] | 1
|
2021-09-10T19:29:30.000Z
|
2021-09-10T19:29:30.000Z
|
ActionTree/tests/timing.py
|
Python3pkg/ActionTree
|
cfa62a95bf40241e6d6b98f1bfbc1b3748e6fc78
|
[
"MIT"
] | null | null | null |
ActionTree/tests/timing.py
|
Python3pkg/ActionTree
|
cfa62a95bf40241e6d6b98f1bfbc1b3748e6fc78
|
[
"MIT"
] | 1
|
2017-09-03T21:25:00.000Z
|
2017-09-03T21:25:00.000Z
|
# coding: utf8
# Copyright 2013-2017 Vincent Jacques <vincent@vincent-jacques.net>
from __future__ import division, absolute_import, print_function
import unittest
from ActionTree import *
from . import *
class TimingTestCase(ActionTreeTestCase):
def test_success(self):
a = self._action("a")
report = execute(a)
self.assertIsInstance(report.get_action_status(a).pending_time, datetime.datetime)
self.assertEqual(report.get_action_status(a).ready_time, report.get_action_status(a).pending_time)
self.assertIsNone(report.get_action_status(a).cancel_time)
self.assertEqual(report.get_action_status(a).start_time, report.get_action_status(a).ready_time)
self.assertGreater(report.get_action_status(a).success_time, report.get_action_status(a).start_time)
self.assertIsNone(report.get_action_status(a).failure_time)
def test_failure(self):
a = self._action("a", exception=Exception())
report = execute(a, do_raise=False)
self.assertIsInstance(report.get_action_status(a).pending_time, datetime.datetime)
self.assertEqual(report.get_action_status(a).ready_time, report.get_action_status(a).pending_time)
self.assertIsNone(report.get_action_status(a).cancel_time)
self.assertEqual(report.get_action_status(a).start_time, report.get_action_status(a).ready_time)
self.assertIsNone(report.get_action_status(a).success_time)
self.assertGreater(report.get_action_status(a).failure_time, report.get_action_status(a).start_time)
def test_cancelation_before_ready(self):
a = self._action("a")
b = self._action("b", exception=Exception())
a.add_dependency(b)
report = execute(a, do_raise=False)
self.assertIsInstance(report.get_action_status(b).pending_time, datetime.datetime)
self.assertEqual(report.get_action_status(b).ready_time, report.get_action_status(b).pending_time)
self.assertIsNone(report.get_action_status(b).cancel_time)
self.assertEqual(report.get_action_status(b).start_time, report.get_action_status(b).ready_time)
self.assertIsNone(report.get_action_status(b).success_time)
self.assertGreater(report.get_action_status(b).failure_time, report.get_action_status(b).start_time)
self.assertIsInstance(report.get_action_status(a).pending_time, datetime.datetime)
self.assertIsNone(report.get_action_status(a).ready_time)
self.assertEqual(report.get_action_status(a).cancel_time, report.get_action_status(b).failure_time)
self.assertIsNone(report.get_action_status(a).start_time)
self.assertIsNone(report.get_action_status(a).success_time)
self.assertIsNone(report.get_action_status(a).failure_time)
def test_cancelation_with_keep_going(self):
a = self._action("a")
b = self._action("b")
a.add_dependency(b)
c = self._action("c", exception=Exception())
b.add_dependency(c)
report = execute(a, keep_going=True, do_raise=False)
self.assertEqual(report.get_action_status(b).cancel_time, report.get_action_status(c).failure_time)
self.assertEqual(report.get_action_status(a).cancel_time, report.get_action_status(b).cancel_time)
def test_leaves_have_same_ready_time(self):
a = self._action("a")
b = self._action("b")
c = self._action("c")
d = self._action("d")
a.add_dependency(b)
a.add_dependency(c)
a.add_dependency(d)
report = execute(a)
self.assertEqual(report.get_action_status(c).ready_time, report.get_action_status(b).ready_time)
self.assertEqual(report.get_action_status(d).ready_time, report.get_action_status(b).ready_time)
def test_many_dependencies_with_unlimited_cpu_cores(self):
MANY = 20
a = self._action("a")
deps = [self._action(str(i)) for i in range(MANY)]
for dep in deps:
a.add_dependency(dep)
report = execute(a, cpu_cores=UNLIMITED)
for dep in deps[1:]:
self.assertEqual(report.get_action_status(dep).start_time, report.get_action_status(deps[0]).start_time)
def test_many_dependencies_with_one_cpu_cores(self):
MANY = 20
a = self._action("a")
deps = [self._action(str(i)) for i in range(MANY)]
for dep in deps:
a.add_dependency(dep)
report = execute(a, cpu_cores=1)
# No two actions have started at the same time
start_times = set(report.get_action_status(dep).start_time for dep in deps)
self.assertEqual(len(start_times), MANY)
def test_many_dependencies_with_limited_cpu_cores(self):
MANY = 20
a = self._action("a")
deps = [self._action(str(i)) for i in range(MANY)]
for dep in deps:
a.add_dependency(dep)
report = execute(a, cpu_cores=3)
# Only the first three actions have started at the same time
start_times = set(report.get_action_status(dep).start_time for dep in deps)
self.assertEqual(len(start_times), MANY - 2)
def test_scarce_resource_with_many_cpu_cores(self):
r = Resource(1)
a = self._action("a")
b = self._action("b")
b.require_resource(r, 1)
c = self._action("c")
c.require_resource(r, 1)
a.add_dependency(b)
a.add_dependency(c)
report = execute(a, cpu_cores=6)
# @todo Start next action at the same timestamp
self.assertTrue(
report.get_action_status(b).start_time > report.get_action_status(c).success_time or
report.get_action_status(c).start_time > report.get_action_status(b).success_time
)
def test_abundant_resource_with_many_cpu_cores(self):
r = Resource(2)
a = self._action("a")
b = self._action("b")
b.require_resource(r, 1)
c = self._action("c")
c.require_resource(r, 1)
a.add_dependency(b)
a.add_dependency(c)
report = execute(a, cpu_cores=6)
self.assertEqual(report.get_action_status(c).ready_time, report.get_action_status(b).ready_time)
self.assertEqual(report.get_action_status(c).start_time, report.get_action_status(b).start_time)
| 40.87013
| 116
| 0.692882
| 884
| 6,294
| 4.633484
| 0.114253
| 0.118652
| 0.197754
| 0.276855
| 0.843018
| 0.82251
| 0.787109
| 0.744385
| 0.631836
| 0.598145
| 0
| 0.005541
| 0.197172
| 6,294
| 153
| 117
| 41.137255
| 0.805066
| 0.036225
| 0
| 0.557522
| 0
| 0
| 0.0033
| 0
| 0
| 0
| 0
| 0.006536
| 0.300885
| 1
| 0.088496
| false
| 0
| 0.035398
| 0
| 0.132743
| 0.00885
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a33063cb9d23553d95922194461fd9f2c2facb7a
| 3,980
|
py
|
Python
|
pyscf/nao/test/test_0018_fermi_energy.py
|
robert-anderson/pyscf
|
cdc56e168cb15f47e8cdc791a92d689fa9b655af
|
[
"Apache-2.0"
] | 3
|
2021-02-28T00:52:53.000Z
|
2021-03-01T06:23:33.000Z
|
pyscf/nao/test/test_0018_fermi_energy.py
|
robert-anderson/pyscf
|
cdc56e168cb15f47e8cdc791a92d689fa9b655af
|
[
"Apache-2.0"
] | 36
|
2018-08-22T19:44:03.000Z
|
2020-05-09T10:02:36.000Z
|
pyscf/nao/test/test_0018_fermi_energy.py
|
robert-anderson/pyscf
|
cdc56e168cb15f47e8cdc791a92d689fa9b655af
|
[
"Apache-2.0"
] | 4
|
2018-02-14T16:28:28.000Z
|
2019-08-12T16:40:30.000Z
|
from __future__ import print_function, division
import os,unittest,numpy as np
from pyscf.nao.m_fermi_dirac import fermi_dirac_occupations
from pyscf.nao.m_fermi_energy import fermi_energy as get_fermi_energy
class KnowValues(unittest.TestCase):
def test_fermi_energy_spin_saturated(self):
""" This is to test the determination of Fermi level"""
ee = np.arange(-10.13, 100.0, 0.1)
#print('0: ', ee.shape)
nelec = 5.0
telec = 0.01
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = 2.0*fermi_dirac_occupations(telec, ee, fermi_energy)
self.assertAlmostEqual(occ.sum(), 5.0)
self.assertAlmostEqual(fermi_energy, -9.93)
#print(occ)
#print(occ.sum())
#print(fermi_energy)
def test_fermi_energy_spin_resolved_spin1(self):
""" This is to test the determination of Fermi level"""
ee = np.linspace(-10.13, 99.97, 1102).reshape((1,1102))
#print('1: ', ee.shape)
nelec = 5.0
telec = 0.01
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = 2.0*fermi_dirac_occupations(telec, ee, fermi_energy)
self.assertAlmostEqual(occ.sum(), 5.0)
self.assertAlmostEqual(fermi_energy, -9.93)
#print(occ)
#print(occ.sum())
#print(fermi_energy)
def test_fermi_energy_spin_resolved(self):
""" This is to test the determination of Fermi level in spin-resolved case"""
ee = np.row_stack((np.linspace(-10.3, 100.0, 1003), np.linspace(-10.0, 100.0, 1003)))
nelec = 11.0
telec = 0.02
#print(ee)
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = fermi_dirac_occupations(telec, ee, fermi_energy)
self.assertAlmostEqual(occ.sum(), 11.0)
self.assertAlmostEqual(fermi_energy, -9.60016955367)
#print(occ)
#print(occ.sum())
#print(fermi_energy)
def test_fermi_energy_spin_resolved_even(self):
""" This is to test the determination of Fermi level in spin-resolved case"""
ee = np.row_stack((np.linspace(-10.3, 100.0, 1003), np.linspace(-10.0, 100.0, 1003)))
nelec = 20.0
telec = 0.02
#print(ee)
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = fermi_dirac_occupations(telec, ee, fermi_energy)
self.assertAlmostEqual(occ.sum(), 20.0)
self.assertAlmostEqual(fermi_energy, -9.10544404859)
#print(occ)
#print(occ.sum())
#print(fermi_energy)
def test_fermi_energy_spin_resolved_even_kpoints(self):
""" This is to test the determination of Fermi level in spin-resolved case"""
ee = np.row_stack((np.linspace(-10.1, 100.0, 1003),
np.linspace(-10.2, 100.0, 1003),
np.linspace(-10.3, 100.0, 1003),
np.linspace(-10.4, 100.0, 1003))).reshape((4,1,1003))
nelec = 20.0
telec = 0.02
nkpts = ee.shape[0]
nspin = ee.shape[-2]
#print(ee)
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = (3.0-nspin)*fermi_dirac_occupations(telec, ee, fermi_energy)
#print(occ)
#print(occ.sum()/nkpts)
#print(fermi_energy)
self.assertAlmostEqual(occ.sum()/nkpts, 20.0)
self.assertAlmostEqual(fermi_energy, -9.2045998319213016)
def test_fermi_energy_spin_resolved_even_kpoints_spin2(self):
""" This is to test the determination of Fermi level in spin-resolved case"""
ee = np.row_stack((np.linspace(-10.1, 100.0, 1003),
np.linspace(-10.2, 100.0, 1003),
np.linspace(-10.3, 100.0, 1003),
np.linspace(-10.4, 100.0, 1003))).reshape((2,2,1003))
nelec = 20.0
telec = 0.02
nkpts = ee.shape[0]
nspin = ee.shape[-2]
#print(ee)
fermi_energy = get_fermi_energy(ee, nelec, telec)
occ = (3.0-nspin)*fermi_dirac_occupations(telec, ee, fermi_energy)
#print(occ)
#print(occ.sum()/nkpts)
#print(fermi_energy)
self.assertAlmostEqual(occ.sum()/nkpts, 20.0)
self.assertAlmostEqual(fermi_energy, -9.2045998319213016)
if __name__ == "__main__" : unittest.main()
| 34.912281
| 89
| 0.657789
| 589
| 3,980
| 4.266553
| 0.135823
| 0.170712
| 0.062077
| 0.031834
| 0.88261
| 0.85953
| 0.846001
| 0.830879
| 0.811779
| 0.811779
| 0
| 0.095073
| 0.204523
| 3,980
| 113
| 90
| 35.221239
| 0.698673
| 0.186935
| 0
| 0.671875
| 0
| 0
| 0.002519
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.09375
| false
| 0
| 0.0625
| 0
| 0.171875
| 0.015625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a36d21a5d5d7253acf618bce2f433a993a57f559
| 195
|
py
|
Python
|
gg/__init__.py
|
willieLjohnson/pygg
|
b204e4b6ab01a5156f7b38e99e8077875d038c86
|
[
"MIT"
] | null | null | null |
gg/__init__.py
|
willieLjohnson/pygg
|
b204e4b6ab01a5156f7b38e99e8077875d038c86
|
[
"MIT"
] | null | null | null |
gg/__init__.py
|
willieLjohnson/pygg
|
b204e4b6ab01a5156f7b38e99e8077875d038c86
|
[
"MIT"
] | null | null | null |
from .display import *
from .game import *
from .ecs import *
from .gen import *
from .player import *
from .structures import *
from .style import *
from .world import *
from .particles import *
| 21.666667
| 25
| 0.728205
| 27
| 195
| 5.259259
| 0.407407
| 0.56338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 195
| 9
| 26
| 21.666667
| 0.8875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a374c049dbdd294680125fd54cc07c232b43e367
| 175
|
py
|
Python
|
plugins/__init__.py
|
wolfy1339/Python-IRC-Bot
|
87ecacf3968d310dbdd19537914be731abec599b
|
[
"MIT"
] | 3
|
2016-11-25T17:16:49.000Z
|
2017-11-10T02:19:39.000Z
|
plugins/__init__.py
|
wolfy1339/Python-IRC-Bot
|
87ecacf3968d310dbdd19537914be731abec599b
|
[
"MIT"
] | 21
|
2016-11-09T15:45:05.000Z
|
2017-10-23T16:57:37.000Z
|
plugins/__init__.py
|
wolfy1339/Python-IRC-Bot
|
87ecacf3968d310dbdd19537914be731abec599b
|
[
"MIT"
] | null | null | null |
# pylint: disable=unused-import
from . import admin
from . import channel
from . import general
from . import Hash
from . import weather
# pylint: enable=unused-import
| 21.875
| 32
| 0.737143
| 23
| 175
| 5.608696
| 0.478261
| 0.387597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188571
| 175
| 7
| 33
| 25
| 0.908451
| 0.331429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a37bbc62a7634ce8b19720602b18e06425a6e032
| 411
|
py
|
Python
|
xu/src/python/Request/Model/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
xu/src/python/Request/Model/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
xu/src/python/Request/Model/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
from xu.src.python.Request.Model.APIAnalysis import APIAnalysis
from xu.src.python.Request.Model.APILink import APILink
from xu.src.python.Request.Model.MyFile import MyFile
from xu.src.python.Request.Model.APIConfig import APIConfig
from xu.src.python.Request.Model.APIResponse import APIResponse
from xu.src.python.Request.Model.APISave import APISave
from xu.src.python.Request.Model.APIData import APIData
| 45.666667
| 63
| 0.844282
| 63
| 411
| 5.507937
| 0.222222
| 0.121037
| 0.181556
| 0.302594
| 0.544669
| 0.544669
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07056
| 411
| 8
| 64
| 51.375
| 0.908377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6e997857bc31490a820e7b3003f8aad3a0d956e5
| 2,322
|
py
|
Python
|
train/read_loss.py
|
Hypersonichen/Deep-Flow-Prediction
|
e893da63be2cdd8541cdf0da286d097a721d9686
|
[
"Apache-2.0"
] | null | null | null |
train/read_loss.py
|
Hypersonichen/Deep-Flow-Prediction
|
e893da63be2cdd8541cdf0da286d097a721d9686
|
[
"Apache-2.0"
] | null | null | null |
train/read_loss.py
|
Hypersonichen/Deep-Flow-Prediction
|
e893da63be2cdd8541cdf0da286d097a721d9686
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import math
from matplotlib import pyplot as plt
for index in range(1):
file="./L1.txt"
f=open(file,"r")
lines=f.readlines()
x = []
#z = []
#theta = []
result=[]
drag = []
drag_visc = []
i=0
for xline in lines:
if i>=1:
# tempx=(float(xline.split()[0]))
temp_result=(float(xline.split()[0]))
#temp2_result=(float(xline.split()[4]))
#temp3_result=(float(xline.split()[7]))
#x.append(tempx)
x.append(i)
result.append(temp_result)
# tempz=(float(xline.split()[5]))
# tempr = math.sqrt(tempx*tempx + tempz*tempz)
# temptheta = math.asin(tempz/tempr)*180/math.pi
# if tempx<=0:
# theta.append(temptheta)
# z.append(tempz)
# temp_result=(float(xline.split()[9]))*p_1/pt_f
# result.append(temp_result)
i=i+1
f.close()
plt.figure()
plt.plot(x,result,label="Loss")
#plt.ylim(0.4,0.65)
plt.legend()
for index in range(1):
file="./L1val.txt"
f=open(file,"r")
lines=f.readlines()
x = []
#z = []
#theta = []
result=[]
drag = []
drag_visc = []
i=0
for xline in lines:
if i>=1:
# tempx=(float(xline.split()[0]))
temp_result=(float(xline.split()[0]))
#temp2_result=(float(xline.split()[4]))
#temp3_result=(float(xline.split()[7]))
#x.append(tempx)
x.append(i)
result.append(temp_result)
# tempz=(float(xline.split()[5]))
# tempr = math.sqrt(tempx*tempx + tempz*tempz)
# temptheta = math.asin(tempz/tempr)*180/math.pi
# if tempx<=0:
# theta.append(temptheta)
# z.append(tempz)
# temp_result=(float(xline.split()[9]))*p_1/pt_f
# result.append(temp_result)
i=i+1
f.close()
# plt.figure()
plt.plot(x,result,label="Loss val.")
#plt.ylim(0.4,0.65)
plt.legend()
plt.xlim(-5,550)
plt.ylim(0,0.025)
plt.xlabel(r'Epochs', fontsize=14)
plt.ylabel(r'Loss', fontsize=14)
plt.savefig('Loss_Re1.eps', format='eps', dpi=1200)
#print [x.split(' ')[1] for xline in open(file).readlines()]
plt.show()
| 23.454545
| 62
| 0.518519
| 311
| 2,322
| 3.810289
| 0.250804
| 0.101266
| 0.151899
| 0.141772
| 0.813502
| 0.813502
| 0.779747
| 0.779747
| 0.744304
| 0.744304
| 0
| 0.040123
| 0.302326
| 2,322
| 98
| 63
| 23.693878
| 0.691358
| 0.44832
| 0
| 0.695652
| 0
| 0
| 0.047504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.065217
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6eb1530f50028ad3233e4d72226150fc6eb9bcc6
| 179
|
py
|
Python
|
custom/ewsghana/reminders/utils.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
custom/ewsghana/reminders/utils.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2022-03-12T01:03:25.000Z
|
2022-03-12T01:03:25.000Z
|
custom/ewsghana/reminders/utils.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
def user_has_reporting_location(user):
sql_location = user.sql_location
if not sql_location:
return False
return not sql_location.location_type.administrative
| 29.833333
| 56
| 0.77095
| 24
| 179
| 5.416667
| 0.5
| 0.338462
| 0.230769
| 0.353846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184358
| 179
| 5
| 57
| 35.8
| 0.890411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
6edaf5115323077480e06a77c3d846c77fd81d65
| 47
|
py
|
Python
|
web_parsers/extractors/__init__.py
|
invanalabs/web-parser
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | 1
|
2019-10-06T23:11:32.000Z
|
2019-10-06T23:11:32.000Z
|
web_parsers/extractors/__init__.py
|
crawlerflow/extraction-engine
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | 2
|
2020-03-11T09:33:03.000Z
|
2020-03-18T21:12:28.000Z
|
web_parsers/extractors/__init__.py
|
crawlerflow/extraction-engine
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | null | null | null |
from .python import PythonExtractorManifest
| 9.4
| 43
| 0.829787
| 4
| 47
| 9.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 4
| 44
| 11.75
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6edd656bf08006916687932ba107a71691b4142f
| 184
|
py
|
Python
|
src/parser/AST/statements/statement.py
|
ARtoriouSs/sanya-script
|
bb421ce0f32f99eb4f157ca91809eab37ced1630
|
[
"WTFPL"
] | 1
|
2020-09-23T21:20:47.000Z
|
2020-09-23T21:20:47.000Z
|
src/parser/AST/statements/statement.py
|
ARtoriouSs/sanya-script
|
bb421ce0f32f99eb4f157ca91809eab37ced1630
|
[
"WTFPL"
] | null | null | null |
src/parser/AST/statements/statement.py
|
ARtoriouSs/sanya-script
|
bb421ce0f32f99eb4f157ca91809eab37ced1630
|
[
"WTFPL"
] | null | null | null |
import re
class Statement:
def kind(self):
return re.sub(r'(?<!^)(?=[A-Z])', '_', self.__class__.__name__).lower()
def set_line(self, line):
self.line = line
| 20.444444
| 79
| 0.570652
| 25
| 184
| 3.8
| 0.64
| 0.168421
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228261
| 184
| 8
| 80
| 23
| 0.669014
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
28209eb44b84ee3777a3145c726e8abaa5ff544c
| 47
|
py
|
Python
|
declare_qtquick/properties/prop_sheet/__init__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | 3
|
2021-11-02T03:45:27.000Z
|
2022-03-27T05:33:36.000Z
|
declare_qtquick/properties/prop_sheet/__init__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | null | null | null |
declare_qtquick/properties/prop_sheet/__init__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | null | null | null |
from .base import PropSheet
from .api import *
| 15.666667
| 27
| 0.765957
| 7
| 47
| 5.142857
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 28
| 23.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2873be6b9e48712a7daaaf96c24f87f780b2234f
| 52
|
py
|
Python
|
calc.py
|
TakeshiKishita/python_unittest
|
f56f47deedaef1d757de305fa3ba3ab1aacd5ae8
|
[
"MIT"
] | null | null | null |
calc.py
|
TakeshiKishita/python_unittest
|
f56f47deedaef1d757de305fa3ba3ab1aacd5ae8
|
[
"MIT"
] | null | null | null |
calc.py
|
TakeshiKishita/python_unittest
|
f56f47deedaef1d757de305fa3ba3ab1aacd5ae8
|
[
"MIT"
] | null | null | null |
import numpy as np
def sum(a, b):
return a + b
| 10.4
| 18
| 0.596154
| 11
| 52
| 2.818182
| 0.818182
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 52
| 5
| 19
| 10.4
| 0.861111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2876d58897208467a12ed5b722c16ff5afaa4fa5
| 46
|
py
|
Python
|
buzzni/ai/reco/mlserving/webframeworks/falcon/__init__.py
|
BuzzniAILab/mlserving
|
8b8add9dbe5cdd6392e0c87ee789492de0a1c70e
|
[
"MIT"
] | 13
|
2020-08-23T17:35:53.000Z
|
2022-02-10T14:14:03.000Z
|
mlserving/webframeworks/falcon/__init__.py
|
orlevi111/ganesha
|
137cc388806fc98f7768298da01ebeddf03f9464
|
[
"MIT"
] | 3
|
2020-08-20T21:09:01.000Z
|
2021-06-25T15:33:54.000Z
|
mlserving/webframeworks/falcon/__init__.py
|
orlevi111/ganesha
|
137cc388806fc98f7768298da01ebeddf03f9464
|
[
"MIT"
] | 3
|
2021-04-12T01:56:22.000Z
|
2021-10-05T12:50:12.000Z
|
from .falcon_framework import FalconFramework
| 23
| 45
| 0.891304
| 5
| 46
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
953ae8228973a8c300036c2d71d528a116379cb5
| 27,127
|
py
|
Python
|
tests/integration/api_test.py
|
cruz-f/protrend
|
b72c17fa1606b4cf5ca6d60c51737b43ba3fdbc1
|
[
"MIT"
] | null | null | null |
tests/integration/api_test.py
|
cruz-f/protrend
|
b72c17fa1606b4cf5ca6d60c51737b43ba3fdbc1
|
[
"MIT"
] | 1
|
2022-02-11T18:38:39.000Z
|
2022-02-11T18:38:39.000Z
|
tests/integration/api_test.py
|
cruz-f/protrend
|
b72c17fa1606b4cf5ca6d60c51737b43ba3fdbc1
|
[
"MIT"
] | null | null | null |
import unittest
from django.contrib.auth.models import User
from django.test import TestCase
from neomodel import clear_neo4j_database, db
from data import *
import domain.dpi as dpi
from interfaces.api.urls import router
from ..utils_test_db import disable_throttling
class ApiTest(TestCase):
def setUp(self) -> None:
clear_neo4j_database(db)
disable_throttling(router)
test_user = User.objects.get_or_create(username='user_test',
is_superuser=True)[0]
self.client.force_login(test_user)
def test_api_index(self):
"""
Test if all api end-points are available.
"""
response = self.client.get('/api/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 14)
self.assertContains(response, 'interactions')
self.assertNotContains(response, 'promoters')
def test_organism(self):
"""
Test the organism API.
"""
clear_neo4j_database(db)
obj = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
post = self.client.post('/api/organisms/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(Organism,
fields=['protrend_id', 'name', 'ncbi_taxonomy'],
protrend_id='PRT.ORG.0000001').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['name'], obj.name)
self.assertEqual(post.data['ncbi_taxonomy'], obj.ncbi_taxonomy)
get = self.client.get('/api/organisms/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_obj = get.data[0]
self.assertEqual(get_obj['protrend_id'], obj.protrend_id)
get_detail = self.client.get('/api/organisms/PRT.ORG.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['name'], obj.name)
self.assertEqual(get_detail.data['ncbi_taxonomy'], obj.ncbi_taxonomy)
self.assertIsNone(get_detail.data['ncbi_assembly'])
def test_regulator(self):
"""
Test the regulator API.
"""
clear_neo4j_database(db)
obj = dict(locus_tag='b0001',
uniprot_accession='P0AD86',
name='thrL',
function='Threonine operon leader',
mechanism='transcription factor')
post = self.client.post('/api/regulators/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(Regulator,
fields=['protrend_id',
'locus_tag',
'uniprot_accession',
'name',
'mechanism'],
protrend_id='PRT.REG.0000001').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['locus_tag'], obj.locus_tag)
self.assertEqual(post.data['uniprot_accession'], obj.uniprot_accession)
self.assertEqual(post.data['name'], obj.name)
self.assertEqual(post.data['mechanism'], obj.mechanism)
get = self.client.get('/api/regulators/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_obj = get.data[0]
self.assertEqual(get_obj['protrend_id'], obj.protrend_id)
get_detail = self.client.get('/api/regulators/PRT.REG.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['locus_tag'], obj.locus_tag)
self.assertEqual(get_detail.data['uniprot_accession'], obj.uniprot_accession)
self.assertEqual(get_detail.data['name'], obj.name)
self.assertEqual(get_detail.data['mechanism'], obj.mechanism)
self.assertIsNone(get_detail.data['ncbi_gene'])
def test_gene(self):
"""
Test the gene API.
"""
clear_neo4j_database(db)
obj = dict(locus_tag='b0001',
uniprot_accession='P0AD86',
name='thrL',
function='Threonine operon leader')
post = self.client.post('/api/genes/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(locus_tag='b0002',
uniprot_accession='P00561',
name='thrA',
function='Threonine kinase')
post = self.client.post('/api/genes/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(Gene,
fields=['protrend_id',
'locus_tag',
'uniprot_accession',
'name'],
protrend_id='PRT.GEN.0000002').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['locus_tag'], obj.locus_tag)
self.assertEqual(post.data['uniprot_accession'], obj.uniprot_accession)
self.assertEqual(post.data['name'], obj.name)
get = self.client.get('/api/genes/')
self.assertEqual(get.status_code, 200)
self.assertGreater(len(get.data), 1)
get_detail = self.client.get('/api/genes/PRT.GEN.0000002/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['locus_tag'], obj.locus_tag)
self.assertEqual(get_detail.data['uniprot_accession'], obj.uniprot_accession)
self.assertEqual(get_detail.data['name'], obj.name)
self.assertIsNone(get_detail.data['ncbi_gene'])
def test_tfbs(self):
"""
Test the tfbs API.
"""
clear_neo4j_database(db)
organism = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
organism_post = self.client.post('/api/organisms/', data=organism)
self.assertEqual(organism_post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
sequence='AAACCATTTTGCGAT',
strand='forward',
start=100100,
stop=100115,
length=15)
post = self.client.post('/api/binding-sites/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(TFBS,
fields=['protrend_id',
'sequence',
'strand'],
protrend_id='PRT.TBS.0000001').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['organism'], organism_post.data['protrend_id'])
self.assertEqual(post.data['sequence'], obj.sequence)
self.assertEqual(post.data['strand'], obj.strand)
get = self.client.get('/api/binding-sites/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_obj = get.data[0]
self.assertEqual(get_obj['protrend_id'], obj.protrend_id)
get_detail = self.client.get('/api/binding-sites/PRT.TBS.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['sequence'], obj.sequence)
self.assertEqual(get_detail.data['strand'], obj.strand)
# TODO: some binding sites and interactions DPI operations are still failing
self.assertNotIn(organism_post.data['protrend_id'], get_detail.data['organism'])
self.assertNotIn(organism_post.data['protrend_id'], str(get_detail.data['data_organism']))
def test_effector(self):
"""
Test the effector API.
"""
clear_neo4j_database(db)
obj = dict(name='Threonine')
post = self.client.post('/api/effectors/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(Effector,
fields=['protrend_id', 'name'],
protrend_id='PRT.EFC.0000001').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['name'], obj.name)
get = self.client.get('/api/effectors/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_obj = get.data[0]
self.assertEqual(get_obj['protrend_id'], obj.protrend_id)
get_detail = self.client.get('/api/effectors/PRT.EFC.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['name'], obj.name)
self.assertIsNone(get_detail.data['kegg_compounds'])
patch = self.client.patch('/api/effectors/PRT.EFC.0000001/',
data={'name': 'threonine'},
content_type='application/json')
self.assertEqual(patch.status_code, 400)
self.assertEqual(patch.data['code'], 'create or update error')
patch = self.client.patch('/api/effectors/PRT.EFC.0000001/',
data={'kegg_compounds': ['CO00001']},
content_type='application/json')
self.assertEqual(patch.status_code, 200)
get_detail = self.client.get('/api/effectors/PRT.EFC.0000001/')
self.assertEqual(len(get_detail.data['kegg_compounds']), 1)
delete = self.client.delete('/api/effectors/PRT.EFC.0000001/')
self.assertEqual(delete.status_code, 204)
get = self.client.get('/api/effectors/')
self.assertEqual(get.status_code, 204)
self.assertLess(len(get.data[0]), 1)
def test_interaction(self):
"""
Test the operon API.
"""
clear_neo4j_database(db)
organism = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
organism_post = self.client.post('/api/organisms/', data=organism)
self.assertEqual(organism_post.status_code, 201)
regulator = dict(locus_tag='b0001',
uniprot_accession='P0AD86',
name='thrL',
function='Threonine operon leader',
mechanism='transcription factor')
regulator_post = self.client.post('/api/regulators/', data=regulator)
self.assertEqual(regulator_post.status_code, 201)
gene = dict(locus_tag='b0002',
uniprot_accession='P00561',
name='thrA',
function='Threonine kinase')
gene_post = self.client.post('/api/genes/', data=gene)
self.assertEqual(gene_post.status_code, 201)
tfbs = dict(organism='PRT.ORG.0000001',
sequence='AAACCATTTTGCGAT',
strand='forward',
start=100100,
stop=100115,
length=15)
tfbs_post = self.client.post('/api/binding-sites/', data=tfbs)
self.assertEqual(tfbs_post.status_code, 201)
effector = dict(name='Threonine')
effector_post = self.client.post('/api/effectors/', data=effector)
self.assertEqual(effector_post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
regulatory_effect='repression')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
regulatory_effect='dual')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
effector='PRT.EFC.0000001',
regulatory_effect='activation')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dpi.get_object(RegulatoryInteraction,
fields=['protrend_id', 'regulatory_effect'],
protrend_id='PRT.RIN.0000003').data[0]
self.assertEqual(post.data['protrend_id'], obj.protrend_id)
self.assertEqual(post.data['regulatory_effect'], obj.regulatory_effect)
self.assertEqual(post.data['organism'], organism_post.data['protrend_id'])
self.assertEqual(post.data['regulator'], regulator_post.data['protrend_id'])
self.assertEqual(post.data['gene'], gene_post.data['protrend_id'])
self.assertEqual(post.data['tfbs'], tfbs_post.data['protrend_id'])
self.assertEqual(post.data['effector'], effector_post.data['protrend_id'])
get = self.client.get('/api/interactions/')
self.assertEqual(get.status_code, 200)
self.assertGreater(len(get.data), 2)
get_detail = self.client.get('/api/interactions/PRT.RIN.0000003/')
self.assertEqual(get_detail.status_code, 200)
obj = dpi.get_object(RegulatoryInteraction,
fields=['protrend_id', 'regulatory_effect'],
protrend_id='PRT.RIN.0000003').data[0]
self.assertEqual(get_detail.data['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data['regulatory_effect'], obj.regulatory_effect)
self.assertEqual(get_detail.data['organism']['protrend_id'], organism_post.data['protrend_id'])
self.assertEqual(get_detail.data['organism']['name'], organism_post.data['name'])
self.assertEqual(get_detail.data['regulator']['protrend_id'], regulator_post.data['protrend_id'])
self.assertEqual(get_detail.data['regulator']['locus_tag'], regulator_post.data['locus_tag'])
self.assertEqual(get_detail.data['gene']['protrend_id'], gene_post.data['protrend_id'])
self.assertEqual(get_detail.data['gene']['locus_tag'], gene_post.data['locus_tag'])
self.assertEqual(get_detail.data['tfbs']['protrend_id'], tfbs_post.data['protrend_id'])
self.assertEqual(get_detail.data['tfbs']['sequence'], tfbs_post.data['sequence'])
self.assertEqual(get_detail.data['effector']['protrend_id'], effector_post.data['protrend_id'])
self.assertEqual(get_detail.data['effector']['name'], effector_post.data['name'])
self.assertIn(organism_post.data['protrend_id'], str(get_detail.data['data_organism'][0]))
self.assertIn(regulator_post.data['protrend_id'], str(get_detail.data['data_regulator'][0]))
self.assertIn(gene_post.data['protrend_id'], str(get_detail.data['data_gene'][0]))
self.assertIn(tfbs_post.data['protrend_id'], str(get_detail.data['data_tfbs'][0]))
self.assertIn(effector_post.data['protrend_id'], str(get_detail.data['data_effector'][0]))
organism_get = self.client.get('/api/organisms/PRT.ORG.0000001/')
regulator_get = self.client.get('/api/regulators/PRT.REG.0000001/')
gene_get = self.client.get('/api/genes/PRT.GEN.0000001/')
self.assertIn(organism_get.data['protrend_id'], str(regulator_get.data['organism'][0]))
self.assertIn(regulator_get.data['protrend_id'], str(organism_get.data['regulator'][0]))
self.assertIn(regulator_get.data['protrend_id'], str(gene_get.data['regulator'][0]))
self.assertIn(gene_get.data['protrend_id'], str(regulator_get.data['gene'][0]))
source = dpi.create_objects(Source, (dict(name='curation', type='curation'),))[0]
dpi.create_unique_reverse_relationship(source=source, forward_rel='regulatory_interaction',
backward_rel='data_source', target=obj,
url='https://protrend.bio.di.uminho.pt')
evidence = dict(name='RNA-seq')
self.client.post('/api/evidences/', data=evidence)
evidence = Evidence.nodes.get(protrend_id='PRT.EVI.0000001')
dpi.create_unique_reverse_relationship(source=evidence, forward_rel='regulatory_interaction',
backward_rel='evidence', target=obj)
publication = dict(pmid=1005053)
self.client.post('/api/publications/', data=publication)
publication = Publication.nodes.get(protrend_id='PRT.PUB.0000001')
dpi.create_unique_reverse_relationship(source=publication, forward_rel='regulatory_interaction',
backward_rel='publication', target=obj)
get_detail = self.client.get('/api/interactions/PRT.RIN.0000003/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(source.name, get_detail.data['data_source'][0]['name'])
self.assertEqual('https://protrend.bio.di.uminho.pt', get_detail.data['data_source'][0]['url'])
self.assertIn(evidence.protrend_id, str(get_detail.data['evidence'][0]))
self.assertIn(publication.protrend_id, str(get_detail.data['publication'][0]))
delete = self.client.delete('/api/interactions/PRT.RIN.0000003/')
self.assertEqual(delete.status_code, 204)
get = self.client.get('/api/interactions/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 2)
# TODO: These relationships are maintained as there are more interactions.
# But if there are no other interactions, these relationships should be deleted as well.
organism_get = self.client.get('/api/organisms/PRT.ORG.0000001/')
regulator_get = self.client.get('/api/regulators/PRT.REG.0000001/')
gene_get = self.client.get('/api/genes/PRT.GEN.0000001/')
self.assertIn(organism_get.data['protrend_id'], str(regulator_get.data['organism'][0]))
self.assertIn(regulator_get.data['protrend_id'], str(organism_get.data['regulator'][0]))
self.assertIn(regulator_get.data['protrend_id'], str(gene_get.data['regulator'][0]))
self.assertIn(gene_get.data['protrend_id'], str(regulator_get.data['gene'][0]))
def test_trn(self):
"""
Test the trn API.
"""
clear_neo4j_database(db)
organism = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
organism_post = self.client.post('/api/organisms/', data=organism)
self.assertEqual(organism_post.status_code, 201)
regulator = dict(locus_tag='b0001',
uniprot_accession='P0AD86',
name='thrL',
function='Threonine operon leader',
mechanism='transcription factor')
regulator_post = self.client.post('/api/regulators/', data=regulator)
self.assertEqual(regulator_post.status_code, 201)
gene = dict(locus_tag='b0002',
uniprot_accession='P00561',
name='thrA',
function='Threonine kinase')
gene_post = self.client.post('/api/genes/', data=gene)
self.assertEqual(gene_post.status_code, 201)
tfbs = dict(organism='PRT.ORG.0000001',
sequence='AAACCATTTTGCGAT',
strand='forward',
start=100100,
stop=100115,
length=15)
tfbs_post = self.client.post('/api/binding-sites/', data=tfbs)
self.assertEqual(tfbs_post.status_code, 201)
effector = dict(name='Threonine')
effector_post = self.client.post('/api/effectors/', data=effector)
self.assertEqual(effector_post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
regulatory_effect='repression')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
regulatory_effect='dual')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
effector='PRT.EFC.0000001',
regulatory_effect='activation')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
get = self.client.get('/api/trns/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_detail = self.client.get('/api/trns/PRT.ORG.0000001/')
self.assertEqual(get_detail.status_code, 200)
def test_organism_binding_sites(self):
"""
Test the organisms-binding-sites API.
"""
clear_neo4j_database(db)
organism = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
organism_post = self.client.post('/api/organisms/', data=organism)
self.assertEqual(organism_post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
sequence='AAACCATTTTGCGAT',
strand='forward',
start=100100,
stop=100115,
length=15)
post = self.client.post('/api/binding-sites/', data=obj)
self.assertEqual(post.status_code, 201)
get = self.client.get('/api/organisms-binding-sites/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_detail = self.client.get('/api/organisms-binding-sites/PRT.ORG.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(len(get_detail.data), 1)
queryset = dpi.get_object(TFBS, fields=['protrend_id', 'sequence'], protrend_id='PRT.TBS.0000001')
obj = queryset.data[0]
self.assertEqual(get_detail.data[0]['protrend_id'], obj.protrend_id)
self.assertEqual(get_detail.data[0]['sequence'], obj.sequence)
def test_regulator_binding_sites(self):
"""
Test the regulators-binding-sites API.
"""
clear_neo4j_database(db)
organism = dict(name='Escherichia coli str. K-12 substr. MG1655',
ncbi_taxonomy=511145,
species='Escherichia coli')
organism_post = self.client.post('/api/organisms/', data=organism)
self.assertEqual(organism_post.status_code, 201)
regulator = dict(locus_tag='b0001',
uniprot_accession='P0AD86',
name='thrL',
function='Threonine operon leader',
mechanism='transcription factor')
regulator_post = self.client.post('/api/regulators/', data=regulator)
self.assertEqual(regulator_post.status_code, 201)
gene = dict(locus_tag='b0002',
uniprot_accession='P00561',
name='thrA',
function='Threonine kinase')
gene_post = self.client.post('/api/genes/', data=gene)
self.assertEqual(gene_post.status_code, 201)
tfbs = dict(organism='PRT.ORG.0000001',
sequence='AAACCATTTTGCGAT',
strand='forward',
start=100100,
stop=100115,
length=15)
tfbs_post = self.client.post('/api/binding-sites/', data=tfbs)
self.assertEqual(tfbs_post.status_code, 201)
effector = dict(name='Threonine')
effector_post = self.client.post('/api/effectors/', data=effector)
self.assertEqual(effector_post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
regulatory_effect='repression')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
regulatory_effect='dual')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
obj = dict(organism='PRT.ORG.0000001',
regulator='PRT.REG.0000001',
gene='PRT.GEN.0000001',
tfbs='PRT.TBS.0000001',
effector='PRT.EFC.0000001',
regulatory_effect='activation')
post = self.client.post('/api/interactions/', data=obj)
self.assertEqual(post.status_code, 201)
get = self.client.get('/api/regulators-binding-sites/')
self.assertEqual(get.status_code, 200)
self.assertEqual(len(get.data), 1)
get_detail = self.client.get('/api/regulators-binding-sites/PRT.REG.0000001/')
self.assertEqual(get_detail.status_code, 200)
self.assertEqual(len(get_detail.data), 1)
queryset = dpi.get_object(Regulator, fields=['protrend_id', 'locus_tag'], protrend_id='PRT.REG.0000001')
obj_reg = queryset.data[0]
queryset = dpi.get_object(TFBS, fields=['protrend_id', 'sequence'], protrend_id='PRT.TBS.0000001')
tfbs_reg = queryset.data[0]
self.assertEqual(get_detail.data[0]['regulator']['protrend_id'], obj_reg.protrend_id)
self.assertEqual(get_detail.data[0]['regulator']['locus_tag'], obj_reg.locus_tag)
self.assertEqual(get_detail.data[0]['tfbs']['protrend_id'], tfbs_reg.protrend_id)
self.assertEqual(get_detail.data[0]['tfbs']['sequence'], tfbs_reg.sequence)
if __name__ == '__main__':
unittest.main()
| 45.438861
| 112
| 0.601062
| 3,050
| 27,127
| 5.18459
| 0.068197
| 0.130905
| 0.068298
| 0.068298
| 0.862012
| 0.844748
| 0.793018
| 0.76317
| 0.739139
| 0.682856
| 0
| 0.049513
| 0.26516
| 27,127
| 596
| 113
| 45.515101
| 0.743754
| 0.018506
| 0
| 0.689805
| 0
| 0
| 0.190967
| 0.030464
| 0
| 0
| 0
| 0.003356
| 0.355748
| 1
| 0.023861
| false
| 0
| 0.017354
| 0
| 0.043384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9547fec456068c032ec26891ac317db4a533d17a
| 14,708
|
py
|
Python
|
envi/tests/middleware/test_base.py
|
TeaPow/django-envi
|
77f241fd511af510c181cd6b3941447a2065c05c
|
[
"MIT"
] | 1
|
2018-11-20T23:16:22.000Z
|
2018-11-20T23:16:22.000Z
|
envi/tests/middleware/test_base.py
|
teapow/django-envi
|
77f241fd511af510c181cd6b3941447a2065c05c
|
[
"MIT"
] | 6
|
2018-01-23T09:43:40.000Z
|
2019-06-18T07:06:02.000Z
|
envi/tests/middleware/test_base.py
|
TeaPow/django-envi
|
77f241fd511af510c181cd6b3941447a2065c05c
|
[
"MIT"
] | 2
|
2017-12-22T14:16:36.000Z
|
2018-11-20T23:16:22.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from mock import MagicMock
from .base import BaseMiddlewareTestCase
from ... import constants
from ...conf import ENVIRONMENT
from ...middleware.base import EnviBaseMiddleware, EnviBaseTemplateMiddleware
KEY_ERROR_FORMAT = "Key: {k} not present in current environment"
class EnviBaseMiddlewareTestCase(BaseMiddlewareTestCase):
"""Tests for EnviBaseMiddleware."""
middleware_class = EnviBaseMiddleware
def test_init(self):
# Tests that __init__ pulls the environment from settings.
middleware = self.middleware_class()
self.assertEqual(middleware.environment, ENVIRONMENT)
def test_init_override(self):
# Tests that you can override the environment through __init__.
environment = {
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_CONTEXT: {},
}
middleware = self.middleware_class(environment=environment)
self.assertEqual(middleware.environment, environment)
def test_required_keys(self):
# Tests that the required keys are present.
environment = {
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
}
# Should not raise an exception when run.
_ = self.middleware_class(environment=environment)
def test_required_keys_exception_a(self):
# Tests that a KeyError is raised if a required key is not present.
# constants.ENVI_KEY_SHOW_IN_ADMIN missing.
environment = {constants.ENVI_KEY_SHOW_IN_SITE: True}
with self.assertRaisesMessage(KeyError, KEY_ERROR_FORMAT.format(k=constants.ENVI_KEY_SHOW_IN_ADMIN)):
_ = self.middleware_class(environment=environment)
def test_required_keys_exception_b(self):
# Tests that a KeyError is raised if a required key is not present.
# constants.ENVI_KEY_SHOW_IN_SITE missing.
environment = {constants.ENVI_KEY_SHOW_IN_ADMIN: True}
with self.assertRaisesMessage(KeyError, KEY_ERROR_FORMAT.format(k=constants.ENVI_KEY_SHOW_IN_SITE)):
_ = self.middleware_class(environment=environment)
def test_update_response_called(self):
# Test that update_response() is called if response_needs_updating
# returns True.
middleware = self.middleware_class()
middleware.response_needs_updating = MagicMock(return_value=True)
middleware.update_response = MagicMock()
request = MagicMock()
response = MagicMock()
middleware.process_response(request, response)
middleware.update_response.assert_called()
def test_update_response_not_called(self):
# Test that update_response() is not called if response_needs_updating
# returns False.
middleware = self.middleware_class()
middleware.response_needs_updating = MagicMock(return_value=False)
middleware.update_response = MagicMock()
request = MagicMock()
response = MagicMock()
middleware.process_response(request, response)
middleware.update_response.assert_not_called()
def test_update_response_raises_exception(self):
# Test that update_response() needs to be overridden in subclasses.
middleware = self.middleware_class()
middleware.response_needs_updating = MagicMock(return_value=True)
request = MagicMock()
response = MagicMock()
with self.assertRaises(NotImplementedError):
middleware.process_response(request, response)
def test_response_needs_updating_returns_false_for_ajax(self):
# Tests that response_needs_updating() returns False if the request
# is ajax.
middleware = self.middleware_class()
request = MagicMock()
request.is_ajax = MagicMock(return_value=True)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_for_streaming(self):
# Tests that response_needs_updating() returns False if the response
# is streaming.
middleware = self.middleware_class()
request = MagicMock()
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=True)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_for_gzip(self):
# Tests that response_needs_updating() returns False if the response
# is streaming.
middleware = self.middleware_class()
request = MagicMock()
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "gzip",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_for_content_type(self):
# Tests that response_needs_updating() returns False if the response
# content type is not one of text/html or application/xhtml+xml.
middleware = self.middleware_class()
request = MagicMock()
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_if_cant_infer_admin(self):
# Tests that response_needs_updating() returns False if it fails
# to resolve the request path.
middleware = self.middleware_class()
request = MagicMock() # request.path doesn't exist.
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_is_admin(self):
# Tests that response_needs_updating() returns False if the request
# is for an admin page, but the constants.ENVI_KEY_SHOW_IN_ADMIN
# is False.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: False,
constants.ENVI_KEY_SHOW_IN_SITE: True,
}
middleware = self.middleware_class(environment=environment)
request = MagicMock()
request.path = "/admin/"
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_false_is_site(self):
# Tests that response_needs_updating() returns False if the request
# is for a site page, but the constants.ENVI_KEY_SHOW_IN_SITE
# is False.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_SHOW_IN_SITE: False,
}
middleware = self.middleware_class(environment=environment)
request = MagicMock()
request.path = "/"
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertFalse(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_true_is_admin(self):
# Tests that response_needs_updating() returns True if the request
# is for an admin page, and the constants.ENVI_KEY_SHOW_IN_ADMIN
# is also True.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_SHOW_IN_SITE: False,
}
middleware = self.middleware_class(environment=environment)
request = MagicMock()
request.path = "/admin/"
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertTrue(middleware.response_needs_updating(request, response))
def test_response_needs_updating_returns_true_is_site(self):
# Tests that response_needs_updating() returns True if the request
# is for an admin page, and the constants.ENVI_KEY_SHOW_IN_SITE
# is also True.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: False,
constants.ENVI_KEY_SHOW_IN_SITE: True,
}
middleware = self.middleware_class(environment=environment)
request = MagicMock()
request.path = "/"
request.is_ajax = MagicMock(return_value=False)
response_dict = {
"Content-Encoding": "",
"Content-Type": "text/html",
}
response = MagicMock(streaming=False)
response.__getitem__.side_effect = lambda k: response_dict[k]
response.get.side_effect = lambda k, d=None: response_dict.get(k, d)
self.assertTrue(middleware.response_needs_updating(request, response))
class EnviBaseTemplateMiddlewareTestCase(BaseMiddlewareTestCase):
"""Tests for EnviBaseTemplateMiddleware."""
middleware_class = EnviBaseTemplateMiddleware
def test_required_keys(self):
# Tests that the required keys are present.
environment = {
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_CONTEXT: {},
}
# Should not raise an exception when run.
_ = self.middleware_class(environment=environment)
def test_required_keys_exception_a(self):
# Tests that a KeyError is raised if a required key is not present.
# contants.ENVI_KEY_CONTEXT missing.
environment = {
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
}
with self.assertRaisesMessage(KeyError, KEY_ERROR_FORMAT.format(k=constants.ENVI_KEY_CONTEXT)):
_ = self.middleware_class(environment=environment)
def test_required_keys_exception_b(self):
# Tests that a KeyError is raised if a required key is not present.
# contants.ENVI_KEY_SHOW_IN_ADMIN missing.
environment = {
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_CONTEXT: {},
}
with self.assertRaisesMessage(KeyError, KEY_ERROR_FORMAT.format(k=constants.ENVI_KEY_SHOW_IN_ADMIN)):
_ = self.middleware_class(environment=environment)
def test_required_keys_exception_c(self):
# Tests that a KeyError is raised if a required key is not present.
# contants.ENVI_KEY_SHOW_IN_SITE missing.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_CONTEXT: {},
}
with self.assertRaisesMessage(KeyError, KEY_ERROR_FORMAT.format(k=constants.ENVI_KEY_SHOW_IN_SITE)):
_ = self.middleware_class(environment=environment)
def test_get_context_data(self):
# Tests that get_context_data() returns the value of the current
# environment's constants.ENVI_KEY_CONTEXT.
environment = {
constants.ENVI_KEY_SHOW_IN_ADMIN: True,
constants.ENVI_KEY_SHOW_IN_SITE: True,
constants.ENVI_KEY_CONTEXT: {"hello": "world"},
}
middleware = self.middleware_class(environment=environment)
result = middleware.get_context_data()
self.assertEqual(
result[constants.ENVI_TEMPLATE_CONTEXT_ACCESSOR],
{"hello": "world"},
)
def test_update_response_returns_unmodified_response(self):
# If the response does not have a closing </head> tag, the original
# response should be returned unmodified.
markup = "<html></html>"
response = MagicMock(content=markup)
middleware = self.middleware_class()
updated_response = middleware.update_response(response)
self.assertEqual(response, updated_response)
self.assertEqual(response.content, updated_response.content)
def test_update_response_modifies_response_custom_head_html(self):
# If the response does have a closing </head> tag, inject the
# template in to the response content.
markup = "<html><head></head></html>"
head_html = "<test></test>"
response = MagicMock(content=markup)
middleware = self.middleware_class()
middleware.get_head_html = MagicMock(return_value=head_html)
updated_response = middleware.update_response(response)
self.assertEqual(response, updated_response)
self.assertEqual(len(response.content), len(markup) + len(head_html))
| 39.326203
| 109
| 0.678882
| 1,671
| 14,708
| 5.669659
| 0.08857
| 0.031032
| 0.065864
| 0.046654
| 0.836605
| 0.815073
| 0.796496
| 0.773169
| 0.751319
| 0.74847
| 0
| 0.000089
| 0.238306
| 14,708
| 373
| 110
| 39.431635
| 0.845577
| 0.168888
| 0
| 0.695473
| 0
| 0
| 0.037737
| 0.002138
| 0
| 0
| 0
| 0
| 0.098765
| 1
| 0.098765
| false
| 0
| 0.024691
| 0
| 0.139918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
955e310a15f067c907496423b142c6fc53ef1979
| 203
|
py
|
Python
|
tccli/services/cam/__init__.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cam/__init__.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cam/__init__.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from tccli.services.cam.cam_client import register_arg
from tccli.services.cam.cam_client import get_actions_info
from tccli.services.cam.cam_client import AVAILABLE_VERSION_LIST
| 40.6
| 64
| 0.827586
| 32
| 203
| 5
| 0.53125
| 0.16875
| 0.31875
| 0.375
| 0.65625
| 0.65625
| 0.65625
| 0
| 0
| 0
| 0
| 0.005376
| 0.083744
| 203
| 4
| 65
| 50.75
| 0.854839
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
95e4c7f8a161f752da1e35bc0731078e567bf475
| 1,062
|
py
|
Python
|
tests/logic/test_token.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 5
|
2020-02-13T15:25:37.000Z
|
2021-05-06T21:05:14.000Z
|
tests/logic/test_token.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 28
|
2019-11-12T14:14:08.000Z
|
2022-03-11T16:29:27.000Z
|
tests/logic/test_token.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 8
|
2019-12-10T15:46:02.000Z
|
2021-11-02T12:24:52.000Z
|
import sampledb
import sampledb.models
def test_generate_token(app):
erg = sampledb.logic.security_tokens.generate_token(['example@example.com'],'invitation',app.config['SECRET_KEY'])
result = sampledb.logic.security_tokens.verify_token(erg,'add_login',app.config['SECRET_KEY'])
# token is wrong
assert result is not 'example@example.com'
erg = sampledb.logic.security_tokens.generate_token(['example@example.com'], 'invitation',
app.config['SECRET_KEY'])
result = sampledb.logic.security_tokens.verify_token(erg, 'invitation', app.config['SECRET_KEY'])
# token is correct
assert result[0] is not 'example@example.com'
erg = sampledb.logic.security_tokens.generate_token(['xxx@example.com', 3], 'add_login',
app.config['SECRET_KEY'])
result = sampledb.logic.security_tokens.verify_token(erg, 'add_login', app.config['SECRET_KEY'])
assert result[0] is not 'xxx@example.com' or result[1] is not 3
| 37.928571
| 118
| 0.660075
| 133
| 1,062
| 5.097744
| 0.240602
| 0.115044
| 0.185841
| 0.238938
| 0.818584
| 0.758112
| 0.699115
| 0.699115
| 0.699115
| 0.699115
| 0
| 0.005995
| 0.214689
| 1,062
| 27
| 119
| 39.333333
| 0.806954
| 0.02919
| 0
| 0.285714
| 1
| 0
| 0.217137
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
251ecc568d58fcc6867797d37f39d0dbd352a75a
| 144
|
py
|
Python
|
CybORG/CybORG/Shared/Actions/ShellActionsFolder/ShellPrivilegeEscalationFolder/__init__.py
|
rafvasq/cage-challenge-1
|
95affdfa38afc1124f1a1a09c92fbc0ed5b96318
|
[
"MIT"
] | 18
|
2021-08-20T15:07:55.000Z
|
2022-03-11T12:05:15.000Z
|
CybORG/CybORG/Shared/Actions/ShellActionsFolder/ShellPrivilegeEscalationFolder/__init__.py
|
rafvasq/cage-challenge-1
|
95affdfa38afc1124f1a1a09c92fbc0ed5b96318
|
[
"MIT"
] | 7
|
2021-11-09T06:46:58.000Z
|
2022-03-31T12:35:06.000Z
|
CybORG/CybORG/Shared/Actions/ShellActionsFolder/ShellPrivilegeEscalationFolder/__init__.py
|
rafvasq/cage-challenge-1
|
95affdfa38afc1124f1a1a09c92fbc0ed5b96318
|
[
"MIT"
] | 13
|
2021-08-17T00:26:31.000Z
|
2022-03-29T20:06:45.000Z
|
from .LinuxKernelPrivilegeEscalation import LinuxKernelPrivilegeEscalation
from .DirtyCowPrivilegeEscalation import DirtyCowPrivilegeEscalation
| 48
| 74
| 0.930556
| 8
| 144
| 16.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 144
| 2
| 75
| 72
| 0.985294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
25339e9c382060a2b48f1465faa8e2cd0dedda64
| 2,807
|
py
|
Python
|
epytope/Data/pssms/smmpmbec/mat/A_29_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/smmpmbec/mat/A_29_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/smmpmbec/mat/A_29_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
A_29_02_11 = {0: {'A': 0.026, 'C': -0.126, 'E': 0.051, 'D': 0.039, 'G': -0.098, 'F': -0.404, 'I': 0.043, 'H': 0.092, 'K': 0.193, 'M': -0.09, 'L': -0.078, 'N': -0.041, 'Q': 0.219, 'P': 0.219, 'S': 0.081, 'R': 0.25, 'T': 0.097, 'W': -0.184, 'V': 0.021, 'Y': -0.311}, 1: {'A': -0.478, 'C': -0.057, 'E': 0.141, 'D': 0.149, 'G': -0.109, 'F': -0.244, 'I': -0.48, 'H': 0.269, 'K': 0.237, 'M': -0.016, 'L': -0.197, 'N': 0.138, 'Q': 0.349, 'P': 0.175, 'S': -0.197, 'R': 0.615, 'T': -0.134, 'W': 0.113, 'V': -0.398, 'Y': 0.127}, 2: {'A': 0.051, 'C': -0.011, 'E': 0.03, 'D': 0.05, 'G': -0.002, 'F': -0.118, 'I': 0.023, 'H': -0.044, 'K': -0.006, 'M': -0.035, 'L': -0.026, 'N': 0.009, 'Q': 0.049, 'P': 0.105, 'S': 0.02, 'R': -0.074, 'T': 0.048, 'W': -0.026, 'V': 0.045, 'Y': -0.089}, 3: {'A': -0.056, 'C': 0.133, 'E': 0.154, 'D': -0.138, 'G': -0.111, 'F': -0.024, 'I': -0.147, 'H': 0.22, 'K': -0.049, 'M': 0.129, 'L': 0.068, 'N': 0.115, 'Q': 0.118, 'P': -0.599, 'S': 0.192, 'R': 0.116, 'T': 0.086, 'W': 0.04, 'V': -0.245, 'Y': -0.003}, 4: {'A': 0.128, 'C': 0.048, 'E': 0.106, 'D': 0.126, 'G': 0.022, 'F': -0.216, 'I': -0.126, 'H': -0.06, 'K': -0.087, 'M': -0.144, 'L': -0.18, 'N': 0.048, 'Q': 0.077, 'P': 0.287, 'S': 0.075, 'R': -0.097, 'T': 0.101, 'W': -0.035, 'V': 0.034, 'Y': -0.105}, 5: {'A': -0.084, 'C': -0.026, 'E': -0.038, 'D': -0.009, 'G': -0.014, 'F': -0.028, 'I': -0.078, 'H': 0.054, 'K': 0.076, 'M': -0.013, 'L': -0.018, 'N': 0.016, 'Q': 0.023, 'P': 0.08, 'S': -0.05, 'R': 0.157, 'T': -0.054, 'W': 0.013, 'V': -0.052, 'Y': 0.044}, 6: {'A': -0.043, 'C': -0.039, 'E': 0.006, 'D': 0.0, 'G': -0.004, 'F': -0.054, 'I': -0.009, 'H': 0.02, 'K': 0.045, 'M': 0.032, 'L': 0.036, 'N': 0.036, 'Q': 0.059, 'P': -0.029, 'S': 0.053, 'R': 0.023, 'T': 0.009, 'W': -0.037, 'V': -0.068, 'Y': -0.036}, 7: {'A': 0.135, 'C': 0.013, 'E': 0.109, 'D': 0.048, 'G': 0.05, 'F': -0.018, 'I': -0.117, 'H': -0.248, 'K': -0.036, 'M': -0.007, 'L': 0.052, 'N': -0.009, 'Q': 0.049, 'P': 0.013, 'S': 0.068, 'R': -0.068, 'T': 0.006, 'W': -0.016, 'V': 0.011, 'Y': -0.034}, 8: {'A': 0.015, 'C': -0.005, 'E': 0.028, 'D': 0.023, 'G': 0.048, 'F': -0.116, 'I': -0.101, 'H': 0.022, 'K': 0.0, 'M': -0.036, 'L': -0.079, 'N': 0.051, 'Q': 0.07, 'P': 0.003, 'S': 0.086, 'R': 0.037, 'T': 0.061, 'W': -0.051, 'V': -0.032, 'Y': -0.026}, 9: {'A': -0.128, 'C': -0.03, 'E': -0.189, 'D': -0.015, 'G': 0.061, 'F': -0.105, 'I': -0.081, 'H': 0.097, 'K': 0.124, 'M': -0.053, 'L': -0.158, 'N': 0.146, 'Q': 0.088, 'P': 0.139, 'S': 0.025, 'R': 0.276, 'T': -0.03, 'W': 0.033, 'V': -0.109, 'Y': -0.091}, 10: {'A': 0.108, 'C': 0.028, 'E': -0.008, 'D': 0.01, 'G': 0.03, 'F': -0.615, 'I': 0.054, 'H': -0.069, 'K': 0.475, 'M': 0.244, 'L': 0.22, 'N': 0.113, 'Q': 0.387, 'P': 0.403, 'S': 0.34, 'R': 0.449, 'T': -0.207, 'W': 0.007, 'V': -0.134, 'Y': -1.836}, -1: {'con': 4.36087}}
| 2,807
| 2,807
| 0.394371
| 679
| 2,807
| 1.62592
| 0.247423
| 0.019928
| 0.009058
| 0.01087
| 0.03442
| 0.021739
| 0.021739
| 0.021739
| 0
| 0
| 0
| 0.37415
| 0.162095
| 2,807
| 1
| 2,807
| 2,807
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0.079416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2568b5e1e7f625fd3fabd5ae182198507592f981
| 34
|
py
|
Python
|
preprocess/utils/__init__.py
|
ishine/EmotionControllableTextToSpeech
|
5dcf8afe6a0c1b8d612d6f1d8de315cf419fe594
|
[
"MIT"
] | 12
|
2021-07-10T05:18:31.000Z
|
2022-03-22T01:04:41.000Z
|
preprocess/utils/__init__.py
|
ishine/EmotionControllableTextToSpeech
|
5dcf8afe6a0c1b8d612d6f1d8de315cf419fe594
|
[
"MIT"
] | null | null | null |
preprocess/utils/__init__.py
|
ishine/EmotionControllableTextToSpeech
|
5dcf8afe6a0c1b8d612d6f1d8de315cf419fe594
|
[
"MIT"
] | 3
|
2021-06-12T05:34:41.000Z
|
2022-03-15T06:44:55.000Z
|
from utils.utils import * # NOQA
| 17
| 33
| 0.705882
| 5
| 34
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 34
| 1
| 34
| 34
| 0.888889
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c2ac8b228827fc94b094364229a8fc2bc89f932a
| 15,316
|
py
|
Python
|
backend/api/fixtures/operational/0018_add_2013-2016_credit_calculation_values.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 18
|
2017-05-10T21:55:11.000Z
|
2021-03-01T16:41:32.000Z
|
backend/api/fixtures/operational/0018_add_2013-2016_credit_calculation_values.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 1,167
|
2017-03-04T00:18:43.000Z
|
2022-03-03T22:31:51.000Z
|
backend/api/fixtures/operational/0018_add_2013-2016_credit_calculation_values.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 48
|
2017-03-09T17:19:39.000Z
|
2022-02-24T16:38:17.000Z
|
from django.db import transaction
from api.management.data_script import OperationalDataScript
from api.models.CarbonIntensityLimit import CarbonIntensityLimit
from api.models.CompliancePeriod import CompliancePeriod
from api.models.DefaultCarbonIntensity import DefaultCarbonIntensity
from api.models.DefaultCarbonIntensityCategory import \
DefaultCarbonIntensityCategory
from api.models.EnergyDensity import EnergyDensity
from api.models.EnergyDensityCategory import EnergyDensityCategory
from api.models.EnergyEffectivenessRatio import EnergyEffectivenessRatio
from api.models.EnergyEffectivenessRatioCategory import \
EnergyEffectivenessRatioCategory
from api.models.FuelClass import FuelClass
from api.models.PetroleumCarbonIntensity import PetroleumCarbonIntensity
from api.models.PetroleumCarbonIntensityCategory import \
PetroleumCarbonIntensityCategory
class AddCreditCalculationValues(OperationalDataScript):
"""
Adds the Credit Calculation Values for 2013 to 2016
"""
is_revertable = False
comment = 'Adds Credit Calculation Values for 2013-2016'
def check_run_preconditions(self):
return True
@transaction.atomic
def run(self):
dates = [
("2013-07-01", "2014-12-31"),
("2015-01-01", "2015-12-31"),
("2016-01-01", "2016-12-31")
]
AddCreditCalculationValues.add_carbon_intensity_limits()
AddCreditCalculationValues.add_energy_effectiveness_ratios(dates)
AddCreditCalculationValues.add_default_carbon_intensities(dates)
AddCreditCalculationValues.add_energy_densities(dates)
@staticmethod
def add_carbon_intensity_limits():
# 2013-14
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(
description="2013-14"
),
effective_date="2013-07-01",
expiration_date="2014-12-31",
density="92.38",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(
description="2013-14"
),
effective_date="2013-07-01",
expiration_date="2014-12-31",
density="86.20",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
# 2015
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2015"),
effective_date="2015-01-01",
expiration_date="2015-12-31",
density="91.21",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2015"),
effective_date="2015-01-01",
expiration_date="2015-12-31",
density="85.11",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
# 2016
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2016"),
effective_date="2016-01-01",
expiration_date="2016-12-31",
density="90.28",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2016"),
effective_date="2016-01-01",
expiration_date="2016-12-31",
density="84.23",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
@staticmethod
def add_energy_effectiveness_ratios(dates):
for date in dates:
effective_date, expiration_date = date
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="CNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="0.9",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="CNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Electricity"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="2.7",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Electricity"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="3.4",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Hydrogen"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.9",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Hydrogen"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="2.5",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="LNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Petroleum-based diesel fuel or renewable fuel in "
"relation to diesel class fuel"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Petroleum-based gasoline, natural gas-based "
"gasoline or renewable fuel in relation to gasoline "
"class fuel"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Propane"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Propane"
),
effective_date=effective_date,
expiration_date=expiration_date,
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
@staticmethod
def add_default_carbon_intensities(dates):
for date in dates:
effective_date, expiration_date = date
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="CNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="62.14"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Electricity"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="11.00"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Hydrogen"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="95.51"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="LNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="63.26"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Natural gas-based gasoline"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="90.07"
)
PetroleumCarbonIntensity.objects.create(
category=PetroleumCarbonIntensityCategory.objects.get(
name="Petroleum-based diesel"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="93.55"
)
PetroleumCarbonIntensity.objects.create(
category=PetroleumCarbonIntensityCategory.objects.get(
name="Petroleum-based gasoline"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="87.29"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Propane"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="75.35"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Renewable Fuel in relation to diesel class "
"fuel"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="93.55"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Renewable Fuel in relation to gasoline "
"class fuel"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="87.29"
)
@staticmethod
def add_energy_densities(dates):
for date in dates:
effective_date, expiration_date = date
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Biodiesel"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="36.94"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="CNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="38.26"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Electricity"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="3.60"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Ethanol"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="23.58"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Hydrogen"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="120.00"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Hydrogenation-derived renewable diesel fuel"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="36.51"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="LNG"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="52.87"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Petroleum-based diesel fuel or diesel fuel produced "
"from biomass"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="38.65"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Petroleum-based gasoline, natural gas-based "
"gasoline or gasoline produced from biomass"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="34.69"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Propane"
),
effective_date=effective_date,
expiration_date=expiration_date,
density="25.59"
)
script_class = AddCreditCalculationValues
| 39.989556
| 79
| 0.565814
| 1,190
| 15,316
| 7.093277
| 0.120168
| 0.109347
| 0.138609
| 0.108755
| 0.83047
| 0.809857
| 0.809146
| 0.809146
| 0.738538
| 0.665206
| 0
| 0.033463
| 0.360016
| 15,316
| 382
| 80
| 40.094241
| 0.827688
| 0.004766
| 0
| 0.662953
| 0
| 0
| 0.080131
| 0.001379
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016713
| false
| 0
| 0.036212
| 0.002786
| 0.064067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c2be75367bdb6e902af7dbbd23092ca7ba155d43
| 2,300
|
py
|
Python
|
trunk/tools/rm_ip_src.py
|
HESUPING/JmeshBLE-StaticLib
|
cf0900f004026c7e2e3448ffde07e21d4af8e387
|
[
"Apache-2.0"
] | null | null | null |
trunk/tools/rm_ip_src.py
|
HESUPING/JmeshBLE-StaticLib
|
cf0900f004026c7e2e3448ffde07e21d4af8e387
|
[
"Apache-2.0"
] | null | null | null |
trunk/tools/rm_ip_src.py
|
HESUPING/JmeshBLE-StaticLib
|
cf0900f004026c7e2e3448ffde07e21d4af8e387
|
[
"Apache-2.0"
] | 3
|
2019-08-27T17:11:42.000Z
|
2021-02-04T06:38:35.000Z
|
import os
import shutil
path_list = [
'../ip/ahi/src/ahi.c',
'../ip/ahi/src/ahi_task.c',
'../ip/ea/src/ea.c',
'../ip/hci/src/hci.c',
'../ip/hci/src/hci_fc.c',
'../ip/hci/src/hci_msg.c',
'../ip/hci/src/hci_tl.c',
'../ip/hci/src/hci_util.c',
'../ip/ble/hl/src/gap/gapc/gapc.c',
'../ip/ble/hl/src/gap/gapc/gapc_hci.c',
'../ip/ble/hl/src/gap/gapc/gapc_sig.c',
'../ip/ble/hl/src/gap/gapc/gapc_task.c',
'../ip/ble/hl/src/gap/gapm/gapm.c',
'../ip/ble/hl/src/gap/gapm/gapm_hci.c',
'../ip/ble/hl/src/gap/gapm/gapm_task.c',
'../ip/ble/hl/src/gap/gapm/gapm_util.c',
'../ip/ble/hl/src/gap/smpc/smpc.c',
'../ip/ble/hl/src/gap/smpc/smpc_api.c',
'../ip/ble/hl/src/gap/smpc/smpc_crypto.c',
'../ip/ble/hl/src/gap/smpc/smpc_util.c',
'../ip/ble/hl/src/gap/smpm/smpm_api.c',
'../ip/ble/hl/src/gatt/attc/attc.c',
'../ip/ble/hl/src/gatt/attm/attm.c',
'../ip/ble/hl/src/gatt/attm/attm_db.c',
'../ip/ble/hl/src/gatt/atts/atts.c',
'../ip/ble/hl/src/gatt/gattc/gattc.c',
'../ip/ble/hl/src/gatt/gattc/gattc_task.c',
'../ip/ble/hl/src/gatt/gattm/gattm.c',
'../ip/ble/hl/src/gatt/gattm/gattm_task.c',
'../ip/ble/hl/src/l2c/l2cc/l2cc.c',
'../ip/ble/hl/src/l2c/l2cc/l2cc_lecb.c',
'../ip/ble/hl/src/l2c/l2cc/l2cc_pdu.c',
'../ip/ble/hl/src/l2c/l2cc/l2cc_sig.c',
'../ip/ble/hl/src/l2c/l2cc/l2cc_task.c',
'../ip/ble/hl/src/l2c/l2cm/l2cm.c',
'../ip/ble/hl/src/rwble_hl/rwble_hl.c',
'../ip/ble/ll/src/llc/llc.c',
'../ip/ble/ll/src/llc/llc_ch_asses.c',
'../ip/ble/ll/src/llc/llc_hci.c',
'../ip/ble/ll/src/llc/llc_llcp.c',
'../ip/ble/ll/src/llc/llc_task.c',
'../ip/ble/ll/src/llc/llc_util.c',
'../ip/ble/ll/src/lld/lld.c',
'../ip/ble/ll/src/lld/lld_evt.c',
'../ip/ble/ll/src/lld/lld_pdu.c',
'../ip/ble/ll/src/lld/lld_util.c',
'../ip/ble/ll/src/lld/lld_wlcoex.c',
'../ip/ble/ll/src/llm/llm.c',
'../ip/ble/ll/src/llm/llm_hci.c',
'../ip/ble/ll/src/llm/llm_task.c',
'../ip/ble/ll/src/llm/llm_util.c',
'../ip/ble/ll/src/em/em_buf.c',
'../ip/ble/ll/src/rwble/rwble.c',
]
mesh_path = '../freertos/app/mesh'
for path in path_list:
os.remove(os.path.join(os.getcwd(),path))
shutil.rmtree(os.path.join(os.getcwd(),mesh_path))
| 35.384615
| 50
| 0.576522
| 453
| 2,300
| 2.834437
| 0.130243
| 0.121495
| 0.21028
| 0.174455
| 0.808411
| 0.716511
| 0.6581
| 0.443925
| 0.040498
| 0
| 0
| 0.008969
| 0.127391
| 2,300
| 65
| 50
| 35.384615
| 0.630792
| 0
| 0
| 0
| 0
| 0
| 0.736202
| 0.703607
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032787
| 0
| 0.032787
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c2dc69b2ec8fd9d3e974a603c7091a19ef916750
| 174
|
py
|
Python
|
tests/conftest.py
|
tom-boyes-park/ballchaser
|
82be5fa2c14029baf7be7e5cc8e6972ff2b2e9db
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
tom-boyes-park/ballchaser
|
82be5fa2c14029baf7be7e5cc8e6972ff2b2e9db
|
[
"MIT"
] | 1
|
2022-03-28T12:07:36.000Z
|
2022-03-28T12:07:36.000Z
|
tests/conftest.py
|
tom-boyes-park/ballchaser
|
82be5fa2c14029baf7be7e5cc8e6972ff2b2e9db
|
[
"MIT"
] | null | null | null |
""" Contains fixtures used in unit tests. """
import pytest
from ballchaser.client import BallChaser
@pytest.fixture()
def ball_chaser():
return BallChaser("abc-123")
| 17.4
| 45
| 0.735632
| 22
| 174
| 5.772727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02027
| 0.149425
| 174
| 9
| 46
| 19.333333
| 0.837838
| 0.212644
| 0
| 0
| 0
| 0
| 0.054264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
6c7701484482bc04cb9551a8673780abb48592b4
| 20
|
py
|
Python
|
baidu/__init__.py
|
Panda-Home/Baidu-Translator
|
34b734829e6f7465e28d5c9f9d4bec10cb2b8d84
|
[
"MIT"
] | 7
|
2015-07-10T05:10:45.000Z
|
2017-02-15T08:46:37.000Z
|
baidu/__init__.py
|
Panda-Home/Baidu-Translator
|
34b734829e6f7465e28d5c9f9d4bec10cb2b8d84
|
[
"MIT"
] | 1
|
2017-05-19T22:52:46.000Z
|
2017-05-19T22:52:46.000Z
|
baidu/__init__.py
|
OldPanda/Baidu-Translator
|
34b734829e6f7465e28d5c9f9d4bec10cb2b8d84
|
[
"MIT"
] | 3
|
2015-12-14T12:12:42.000Z
|
2017-05-18T06:00:12.000Z
|
from . import baidu
| 10
| 19
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
66cd164c5042dd5ec7a55f0d648e6be40e41cd17
| 87
|
py
|
Python
|
cmd/expandybird/test/templates/no_properties.py
|
sparkprime/helm
|
4d7c681ba0aa43b45d122faa998c243422019be4
|
[
"Apache-2.0"
] | null | null | null |
cmd/expandybird/test/templates/no_properties.py
|
sparkprime/helm
|
4d7c681ba0aa43b45d122faa998c243422019be4
|
[
"Apache-2.0"
] | null | null | null |
cmd/expandybird/test/templates/no_properties.py
|
sparkprime/helm
|
4d7c681ba0aa43b45d122faa998c243422019be4
|
[
"Apache-2.0"
] | null | null | null |
"""Return empty resources block."""
def GenerateConfig(_):
return """resources:"""
| 14.5
| 35
| 0.666667
| 8
| 87
| 7.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 5
| 36
| 17.4
| 0.76
| 0.333333
| 0
| 0
| 1
| 0
| 0.192308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
dd0cea130ecacbd998af3c7ca9f34de42ae5c20d
| 300
|
py
|
Python
|
konlpy/konlpy/tag/__init__.py
|
ChoiSeEun/Korean-NLP-Visual
|
adee5e45f3472b969f45e6e2e991d88df0e49dcd
|
[
"Apache-2.0"
] | 1
|
2021-11-05T10:18:43.000Z
|
2021-11-05T10:18:43.000Z
|
konlpy/konlpy/tag/__init__.py
|
ChoiSeEun/Korean-NLP-Visual
|
adee5e45f3472b969f45e6e2e991d88df0e49dcd
|
[
"Apache-2.0"
] | 5
|
2021-11-04T10:23:39.000Z
|
2021-12-13T13:03:31.000Z
|
konlpy/konlpy/tag/__init__.py
|
ChoiSeEun/Korean-NLP-Visual
|
adee5e45f3472b969f45e6e2e991d88df0e49dcd
|
[
"Apache-2.0"
] | 2
|
2021-09-29T10:52:05.000Z
|
2021-09-29T11:05:33.000Z
|
from __future__ import absolute_import
import sys
import warnings
from konlpy.tag._hannanum import Hannanum
from konlpy.tag._kkma import Kkma
from konlpy.tag._komoran import Komoran
try:
from konlpy.tag._mecab import Mecab
except ImportError:
pass
from konlpy.tag._twitter import Twitter
| 18.75
| 41
| 0.813333
| 43
| 300
| 5.44186
| 0.395349
| 0.213675
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146667
| 300
| 15
| 42
| 20
| 0.914063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.090909
| 0.818182
| 0
| 0.818182
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
660c834baa65dbb1203c9cad56f77de2c68010a7
| 27
|
py
|
Python
|
devel/lib/python2.7/dist-packages/rrt_exploration/msg/__init__.py
|
pplankton/MRSLAM
|
0a16489a2cbd0c2d1511b506c540446cc670bde8
|
[
"MIT"
] | 1
|
2021-05-17T11:13:01.000Z
|
2021-05-17T11:13:01.000Z
|
devel/lib/python2.7/dist-packages/rrt_exploration/msg/__init__.py
|
pplankton/MRSLAM
|
0a16489a2cbd0c2d1511b506c540446cc670bde8
|
[
"MIT"
] | null | null | null |
devel/lib/python2.7/dist-packages/rrt_exploration/msg/__init__.py
|
pplankton/MRSLAM
|
0a16489a2cbd0c2d1511b506c540446cc670bde8
|
[
"MIT"
] | null | null | null |
from ._PointArray import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
663582017e45ebc25559ce63cde60fefc1b77662
| 5,588
|
py
|
Python
|
tests/key_params/test_dss.py
|
FloLie/openssh_key_parser
|
44cdc6c2085069e7c2612841de38f581f8ef226c
|
[
"MIT"
] | null | null | null |
tests/key_params/test_dss.py
|
FloLie/openssh_key_parser
|
44cdc6c2085069e7c2612841de38f581f8ef226c
|
[
"MIT"
] | null | null | null |
tests/key_params/test_dss.py
|
FloLie/openssh_key_parser
|
44cdc6c2085069e7c2612841de38f581f8ef226c
|
[
"MIT"
] | null | null | null |
import pytest
from cryptography.hazmat.primitives.asymmetric import dsa
from openssh_key.key_params import DSSPrivateKeyParams, DSSPublicKeyParams
from openssh_key.pascal_style_byte_stream import PascalStyleFormatInstruction
PARAMS_TEST_CASES = [
{
'cls': DSSPublicKeyParams,
'format_instructions_dict': {
'p': PascalStyleFormatInstruction.MPINT,
'q': PascalStyleFormatInstruction.MPINT,
'g': PascalStyleFormatInstruction.MPINT,
'y': PascalStyleFormatInstruction.MPINT,
},
'valid_values': [{
'p': 1,
'q': 2,
'g': 3,
'y': 4,
}]
},
{
'cls': DSSPrivateKeyParams,
'format_instructions_dict': {
'p': PascalStyleFormatInstruction.MPINT,
'q': PascalStyleFormatInstruction.MPINT,
'g': PascalStyleFormatInstruction.MPINT,
'y': PascalStyleFormatInstruction.MPINT,
'x': PascalStyleFormatInstruction.MPINT,
},
'valid_values': [{
'p': 1,
'q': 2,
'g': 3,
'y': 4,
'x': 5,
}]
}
]
def test_dss_public_convert_from_unknown():
with pytest.raises(NotImplementedError):
DSSPublicKeyParams.convert_from('random')
def test_dss_public_convert_from_cryptography_public():
private_key = dsa.generate_private_key(
DSSPrivateKeyParams.KEY_SIZE
).public_key()
public_numbers = private_key.public_numbers()
parameter_numbers = public_numbers.parameter_numbers
converted = DSSPublicKeyParams.convert_from(private_key)
assert type(converted) == DSSPublicKeyParams
assert converted == {
'p': parameter_numbers.p,
'q': parameter_numbers.q,
'g': parameter_numbers.g,
'y': public_numbers.y,
}
def test_dss_public_convert_from_cryptography_private():
private_key = dsa.generate_private_key(
DSSPrivateKeyParams.KEY_SIZE
)
private_numbers = private_key.private_numbers()
public_numbers = private_numbers.public_numbers
parameter_numbers = public_numbers.parameter_numbers
converted = DSSPublicKeyParams.convert_from(private_key)
assert type(converted) == DSSPublicKeyParams
assert converted == {
'p': parameter_numbers.p,
'q': parameter_numbers.q,
'g': parameter_numbers.g,
'y': public_numbers.y,
}
def test_dss_public_convert_to_cryptography_public():
dss_private = DSSPrivateKeyParams.generate_private_params()
dss_public = DSSPublicKeyParams({
'p': dss_private['p'],
'q': dss_private['q'],
'g': dss_private['g'],
'y': dss_private['y'],
})
converted = dss_public.convert_to(dsa.DSAPublicKey)
assert isinstance(converted, dsa.DSAPublicKey)
assert converted.public_numbers() == dsa.DSAPublicNumbers(
dss_public['y'],
dsa.DSAParameterNumbers(
dss_public['p'],
dss_public['q'],
dss_public['g']
)
)
def test_dss_private_convert_from_unknown():
with pytest.raises(NotImplementedError):
DSSPrivateKeyParams.convert_from('random')
def test_dss_private_convert_from_cryptography_private():
private_key = dsa.generate_private_key(
DSSPrivateKeyParams.KEY_SIZE
)
private_numbers = private_key.private_numbers()
public_numbers = private_numbers.public_numbers
parameter_numbers = public_numbers.parameter_numbers
converted = DSSPrivateKeyParams.convert_from(private_key)
assert type(converted) == DSSPrivateKeyParams
assert converted == {
'p': parameter_numbers.p,
'q': parameter_numbers.q,
'g': parameter_numbers.g,
'y': public_numbers.y,
'x': private_numbers.x,
}
def test_dss_private_convert_to_cryptography_private():
dss_private = DSSPrivateKeyParams.generate_private_params()
converted = dss_private.convert_to(dsa.DSAPrivateKey)
assert isinstance(converted, dsa.DSAPrivateKey)
assert converted.private_numbers() == dsa.DSAPrivateNumbers(
dss_private['x'],
dsa.DSAPublicNumbers(
dss_private['y'],
dsa.DSAParameterNumbers(
dss_private['p'],
dss_private['q'],
dss_private['g']
)
)
)
def test_dss_private_convert_to_cryptography_dssprivatekey():
dss_private = DSSPrivateKeyParams.generate_private_params()
converted = dss_private.convert_to(dsa.DSAPrivateKey)
assert isinstance(converted, dsa.DSAPrivateKey)
assert converted.private_numbers() == dsa.DSAPrivateNumbers(
dss_private['x'],
dsa.DSAPublicNumbers(
dss_private['y'],
dsa.DSAParameterNumbers(
dss_private['p'],
dss_private['q'],
dss_private['g']
)
)
)
def test_dss_private_convert_to_cryptography_public():
dss_private = DSSPrivateKeyParams.generate_private_params()
converted = dss_private.convert_to(dsa.DSAPublicKey)
assert isinstance(converted, dsa.DSAPublicKey)
assert converted.public_numbers() == dsa.DSAPublicNumbers(
dss_private['y'],
dsa.DSAParameterNumbers(
dss_private['p'],
dss_private['q'],
dss_private['g']
)
)
def test_dss_public_convert_to_not_implemented():
dss_private = DSSPrivateKeyParams.generate_private_params()
with pytest.raises(NotImplementedError):
assert dss_private.convert_to(type)
| 31.931429
| 77
| 0.656049
| 544
| 5,588
| 6.404412
| 0.117647
| 0.091848
| 0.028703
| 0.038175
| 0.817164
| 0.817164
| 0.77411
| 0.710103
| 0.710103
| 0.693743
| 0
| 0.002133
| 0.24481
| 5,588
| 174
| 78
| 32.114943
| 0.82346
| 0
| 0
| 0.601307
| 0
| 0
| 0.026306
| 0.00859
| 0
| 0
| 0
| 0
| 0.098039
| 1
| 0.065359
| false
| 0
| 0.026144
| 0
| 0.091503
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b0dc10c62a849f2ff0293b6432ed1a1496b9b6a0
| 20,794
|
py
|
Python
|
src/modeler/inceptionnetmodel.py
|
liuweiping2020/pyml
|
0b9a7a307b93f9313d7e1bb92b33ae330d681c73
|
[
"Apache-2.0"
] | null | null | null |
src/modeler/inceptionnetmodel.py
|
liuweiping2020/pyml
|
0b9a7a307b93f9313d7e1bb92b33ae330d681c73
|
[
"Apache-2.0"
] | null | null | null |
src/modeler/inceptionnetmodel.py
|
liuweiping2020/pyml
|
0b9a7a307b93f9313d7e1bb92b33ae330d681c73
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
from modeler.tfmodel import TFModel
class InceptionNetModel(TFModel):
def __init__(self):
self.slim = tf.contrib.slim
self.trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
pass
def add_placeholder(self):
batch_size = 32
height, width = 299, 299
self.inputs = tf.random_uniform((batch_size, height, width, 3))
pass
def build(self):
with self.slim.arg_scope(self.inception_v3_arg_scope()):
self.logits, self.end_points = self.inception_v3(self.inputs, is_training=False)
pass
def inception_v3_base(self,inputs, scope=None):
end_points = {}
with tf.variable_scope(scope, 'InceptionV3', [inputs]):
with self.slim.arg_scope([self.slim.conv2d, self.slim.max_pool2d, self.slim.avg_pool2d],
stride=1, padding='VALID'):
# 299 x 299 x 3
net = self.slim.conv2d(inputs, 32, [3, 3], stride=2, scope='Conv2d_1a_3x3')
# 149 x 149 x 32
net = self.slim.conv2d(net, 32, [3, 3], scope='Conv2d_2a_3x3')
# 147 x 147 x 32
net = self.slim.conv2d(net, 64, [3, 3], padding='SAME', scope='Conv2d_2b_3x3')
# 147 x 147 x 64
net = self.slim.max_pool2d(net, [3, 3], stride=2, scope='MaxPool_3a_3x3')
# 73 x 73 x 64
net = self.slim.conv2d(net, 80, [1, 1], scope='Conv2d_3b_1x1')
# 73 x 73 x 80.
net = self.slim.conv2d(net, 192, [3, 3], scope='Conv2d_4a_3x3')
# 71 x 71 x 192.
net = self.slim.max_pool2d(net, [3, 3], stride=2, scope='MaxPool_5a_3x3')
# 35 x 35 x 192.
# Inception blocks
with self.slim.arg_scope([self.slim.conv2d, self.slim.max_pool2d, self.slim.avg_pool2d],
stride=1, padding='SAME'):
# mixed: 35 x 35 x 256.
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 48, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 64, [5, 5], scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 32, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_1: 35 x 35 x 288.
with tf.variable_scope('Mixed_5c'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 48, [1, 1], scope='Conv2d_0b_1x1')
branch_1 = self.slim.conv2d(branch_1, 64, [5, 5], scope='Conv_1_0c_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_2: 35 x 35 x 288.
with tf.variable_scope('Mixed_5d'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 48, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 64, [5, 5], scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = self.slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_3: 17 x 17 x 768.
with tf.variable_scope('Mixed_6a'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 384, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_1 = self.slim.conv2d(branch_1, 96, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_1x1')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat([branch_0, branch_1, branch_2], 3)
# mixed4: 17 x 17 x 768.
with tf.variable_scope('Mixed_6b'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 128, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = self.slim.conv2d(branch_1, 192, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 128, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = self.slim.conv2d(branch_2, 128, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = self.slim.conv2d(branch_2, 128, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = self.slim.conv2d(branch_2, 192, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_5: 17 x 17 x 768.
with tf.variable_scope('Mixed_6c'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 160, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = self.slim.conv2d(branch_1, 192, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 160, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = self.slim.conv2d(branch_2, 160, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = self.slim.conv2d(branch_2, 160, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = self.slim.conv2d(branch_2, 192, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_6: 17 x 17 x 768.
with tf.variable_scope('Mixed_6d'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 160, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = self.slim.conv2d(branch_1, 192, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 160, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = self.slim.conv2d(branch_2, 160, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = self.slim.conv2d(branch_2, 160, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = self.slim.conv2d(branch_2, 192, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_7: 17 x 17 x 768.
with tf.variable_scope('Mixed_6e'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 192, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = self.slim.conv2d(branch_1, 192, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(branch_2, 192, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = self.slim.conv2d(branch_2, 192, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = self.slim.conv2d(branch_2, 192, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = self.slim.conv2d(branch_2, 192, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
end_points['Mixed_6e'] = net
# mixed_8: 8 x 8 x 1280.
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_0 = self.slim.conv2d(branch_0, 320, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = self.slim.conv2d(branch_1, 192, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = self.slim.conv2d(branch_1, 192, [7, 1], scope='Conv2d_0c_7x1')
branch_1 = self.slim.conv2d(branch_1, 192, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat([branch_0, branch_1, branch_2], 3)
# mixed_9: 8 x 8 x 2048.
with tf.variable_scope('Mixed_7b'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 320, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat([
self.slim.conv2d(branch_1, 384, [1, 3], scope='Conv2d_0b_1x3'),
self.slim.conv2d(branch_1, 384, [3, 1], scope='Conv2d_0b_3x1')], 3)
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 448, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(
branch_2, 384, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = tf.concat([
self.slim.conv2d(branch_2, 384, [1, 3], scope='Conv2d_0c_1x3'),
self.slim.conv2d(branch_2, 384, [3, 1], scope='Conv2d_0d_3x1')], 3)
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(
branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
# mixed_10: 8 x 8 x 2048.
with tf.variable_scope('Mixed_7c'):
with tf.variable_scope('Branch_0'):
branch_0 = self.slim.conv2d(net, 320, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = self.slim.conv2d(net, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat([
self.slim.conv2d(branch_1, 384, [1, 3], scope='Conv2d_0b_1x3'),
self.slim.conv2d(branch_1, 384, [3, 1], scope='Conv2d_0c_3x1')], 3)
with tf.variable_scope('Branch_2'):
branch_2 = self.slim.conv2d(net, 448, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = self.slim.conv2d(
branch_2, 384, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = tf.concat([
self.slim.conv2d(branch_2, 384, [1, 3], scope='Conv2d_0c_1x3'),
self.slim.conv2d(branch_2, 384, [3, 1], scope='Conv2d_0d_3x1')], 3)
with tf.variable_scope('Branch_3'):
branch_3 = self.slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = self.slim.conv2d(
branch_3, 192, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 3)
return net, end_points
def inception_v3(self,inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.8,
prediction_fn=tf.contrib.slim.softmax,
spatial_squeeze=True,
reuse=None,
scope='InceptionV3'):
with tf.variable_scope(scope, 'InceptionV3', [inputs, num_classes],
reuse=reuse) as scope:
with self.slim.arg_scope([self.slim.batch_norm, self.slim.dropout],
is_training=is_training):
net, end_points = self.inception_v3_base(inputs, scope=scope)
# Auxiliary Head logits
with self.slim.arg_scope([self.slim.conv2d, self.slim.max_pool2d, self.slim.avg_pool2d],
stride=1, padding='SAME'):
aux_logits = end_points['Mixed_6e']
with tf.variable_scope('AuxLogits'):
aux_logits = self.slim.avg_pool2d(
aux_logits, [5, 5], stride=3, padding='VALID',
scope='AvgPool_1a_5x5')
aux_logits = self.slim.conv2d(aux_logits, 128, [1, 1],
scope='Conv2d_1b_1x1')
# Shape of feature map before the final layer.
aux_logits = self.slim.conv2d(
aux_logits, 768, [5, 5],
weights_initializer=self.trunc_normal(0.01),
padding='VALID', scope='Conv2d_2a_5x5')
aux_logits = self.slim.conv2d(
aux_logits, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, weights_initializer=self.trunc_normal(0.001),
scope='Conv2d_2b_1x1')
if spatial_squeeze:
aux_logits = tf.squeeze(aux_logits, [1, 2], name='SpatialSqueeze')
end_points['AuxLogits'] = aux_logits
# Final pooling and prediction
with tf.variable_scope('Logits'):
net = self.slim.avg_pool2d(net, [8, 8], padding='VALID',
scope='AvgPool_1a_8x8')
# 1 x 1 x 2048
net = self.slim.dropout(net, keep_prob=dropout_keep_prob, scope='Dropout_1b')
end_points['PreLogits'] = net
# 2048
logits = self.slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='Conv2d_1c_1x1')
if spatial_squeeze:
logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze')
# 1000
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
def inception_v3_arg_scope(self,weight_decay=0.00004,
stddev=0.1,
batch_norm_var_collection='moving_vars'):
batch_norm_params = {
'decay': 0.9997,
'epsilon': 0.001,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
'variables_collections': {
'beta': None,
'gamma': None,
'moving_mean': [batch_norm_var_collection],
'moving_variance': [batch_norm_var_collection],
}
}
with self.slim.arg_scope([self.slim.conv2d, self.slim.fully_connected],
weights_regularizer=self.slim.l2_regularizer(weight_decay)):
with self.slim.arg_scope(
[self.slim.conv2d],
weights_initializer=self.trunc_normal(stddev),
activation_fn=tf.nn.relu,
normalizer_fn=self.slim.batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
| 62.63253
| 104
| 0.506204
| 2,600
| 20,794
| 3.788462
| 0.075385
| 0.112081
| 0.146396
| 0.117767
| 0.816244
| 0.764264
| 0.751878
| 0.732792
| 0.717157
| 0.717157
| 0
| 0.117782
| 0.37203
| 20,794
| 332
| 105
| 62.63253
| 0.636545
| 0.024526
| 0
| 0.568345
| 0
| 0
| 0.109389
| 0.001037
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021583
| false
| 0.010791
| 0.007194
| 0
| 0.043165
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b0f2574cffd9be886cb958a4c195295adcfeb5dc
| 4,572
|
py
|
Python
|
main.py
|
njpd57/Proyecto-Ricoloso
|
cf423460bd21933afa5b761c084d5fcf71b21780
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
njpd57/Proyecto-Ricoloso
|
cf423460bd21933afa5b761c084d5fcf71b21780
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
njpd57/Proyecto-Ricoloso
|
cf423460bd21933afa5b761c084d5fcf71b21780
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3.9
"""
____ _
| _ \ _ __ ___ _ _ ___ ___| |_ ___
| |_) | '__/ _ \| | | |/ _ \/ __| __/ _ \
| __/| | | (_) | |_| | __/ (__| || (_) |
|_| |_| \___/ \__, |\___|\___|\__\___/
|___/
____ _ _
| _ \(_) ___ ___ | | ___ ___ ___
| |_) | |/ __/ _ \| |/ _ \/ __|/ _ \
| _ <| | (_| (_) | | (_) \__ \ (_) |
|_| \_\_|\___\___/|_|\___/|___/\___/
[ 23 Septiembre 2021 ] - Team AHORASIPYTHON
All Copyrights Reserved © 2021
/* ************************************************************ */
/* ______ __ _ _____ __ __ _____ ______ ______ */
/* |___ | | |_| | / \ | |/ / / \ |___ | |___ | */
/* .-` .-` | _ | | | | \ | | | ___| |___ | */
/* |______| |__| |_| \_____/ |__|\__\ \_____/ |______| |______| */
/* */
/* [ Septiembre 2021 ] - Team TuMamáEsMarakaYEra */
/* All Copyrights Reserved © 2021 */
/* */
/* ************************************************************ */
_/ _/ _/ _/ _/_/_/
_/_/_/_/ _/_/_/_/ _/_/ _/_/_/ _/ _/_/ _/_/ _/
_/ _/ _/ _/ _/ _/ _/ _/_/ _/ _/_/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/_/ _/ _/_/ _/_/ _/_/_/_/_/ _/_/_/_/ _/ _/ _/_/_/ _/_/_/_/_/
[ 23 Septiembre 2021 ] - Team #Free_Ricoloso
All Copyrights Reserved © 2021
_ __ _ _ _____ _ _ ___
| |/ /__ _| |_ _| |__|___ / _ __| |_(_)/ _ \ _ __
| ' / \ \/ / | | | | '_ \ |_ \| '__| __| | | | | '_ \
| . \ > <| | |_| | |_) |__) | | | |_| | |_| | | | |
|_|\_\/_/\_\_|\__, |_.__/____/|_| \__|_|\___/|_| |_|
|___/
[ 23 Septiembre 2021 ] - Team CuraoNoVale
All Copyrights Reserved © 2021
⠀⠀⠀⠀⠀⣠⣶⣄⠀⠀⠀ ⠀⣠⣶⣄⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠈⠻⣿⣿⣷⡄⢠⣾⣿⣿⠟⠁⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠈⠻⣿⣿⣿⣿⠟⠁⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⢀⣴⣿⣿⣿⣿⣦⡀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⢀⣴⣿⣿⡿⠃⠘⢿⣿⣿⣦⡀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠙⡿⠋⠀⠀⠀⠀⠙⢿⠋⠀ Elm0
[ 23 Septiembre 2021 ] - Team N.Lagos
All Copyrights Reserved © 2021
Interfaz ( grafica o no )
Ingrese url
ingresar ( ESCRIBA FIN PARA EMPEZAR A DESCARGAR )
ELMO:
for ( cantidad de links )
pipes 1
iniciar
with youtube_dl.YoutubeDL() as ydl:
ydl.download(['https://www.youtube.com/watch?v=eB3wlW96CQs'])
"""
import sys
from youtube import DescargarVideo as DVL
from cli import interfaz
if __name__ == "__main__":
print(" ### ## ## ### ### ### ")
print(" ## ## ## ##")
print(" ## ### #### ##### ## ## #### ##### ### ## #### #### ###### #### ###### ##### ## ## ## ##")
print(" ##### ## ## ## ## ## ## ## ## ## ## ## ## ##### ## ## ## ## ## ## ## ## ## ## ## ##### ## ## #####")
print(" ## ## ## ###### ## ## ## ## ###### ## ## ## ## ## ## ## ##### ## ## ## ## ## ## ## ## ## ## ## ## ##")
print(" ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ##### ## ## ## ## ## ## ## ## ## ## ##")
print(" ###### #### ##### ## ## ## ##### ## ## #### ###### #### ##### ## #### #### ## ## ### ## ###### ######")
interfaz()
| 50.8
| 193
| 0.209974
| 122
| 4,572
| 5.868852
| 0.581967
| 0.083799
| 0.125698
| 0.153631
| 0.286313
| 0.177374
| 0.128492
| 0.128492
| 0.128492
| 0
| 0
| 0.026908
| 0.552931
| 4,572
| 89
| 194
| 51.370787
| 0.264188
| 0.619204
| 0
| 0
| 0
| 0.333333
| 0.847251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.583333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
b0f92973ac5b228d853c6dc53d5b6bb8bbc50be3
| 120
|
py
|
Python
|
GPopt/utils/__init__.py
|
Techtonique/GPopt
|
37eb7cbd55679b67b0f0f39dddb310309531e5ca
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-07-14T11:56:32.000Z
|
2021-07-14T11:56:32.000Z
|
GPopt/utils/__init__.py
|
Techtonique/GPopt
|
37eb7cbd55679b67b0f0f39dddb310309531e5ca
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
GPopt/utils/__init__.py
|
Techtonique/GPopt
|
37eb7cbd55679b67b0f0f39dddb310309531e5ca
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
from .progress_bar import Progbar
from .nodesimulation import generate_sobol2
__all__ = ["Progbar", "generate_sobol2"]
| 24
| 43
| 0.808333
| 14
| 120
| 6.428571
| 0.642857
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018692
| 0.108333
| 120
| 4
| 44
| 30
| 0.82243
| 0
| 0
| 0
| 1
| 0
| 0.183333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b040d9104205424c5158e808096d5c47161a331c
| 39
|
py
|
Python
|
src/compas/datastructures/graph/__init__.py
|
funkchaser/compas
|
b58de8771484aa0c6068d43df78b1679503215de
|
[
"MIT"
] | 235
|
2017-11-07T07:33:22.000Z
|
2022-03-25T16:20:00.000Z
|
src/compas/datastructures/graph/__init__.py
|
funkchaser/compas
|
b58de8771484aa0c6068d43df78b1679503215de
|
[
"MIT"
] | 770
|
2017-09-22T13:42:06.000Z
|
2022-03-31T21:26:45.000Z
|
src/compas/datastructures/graph/__init__.py
|
funkchaser/compas
|
b58de8771484aa0c6068d43df78b1679503215de
|
[
"MIT"
] | 99
|
2017-11-06T23:15:28.000Z
|
2022-03-25T16:05:36.000Z
|
from .graph import Graph # noqa: F401
| 19.5
| 38
| 0.717949
| 6
| 39
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0.205128
| 39
| 1
| 39
| 39
| 0.806452
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c67c17dae772365779c1d0f21a3813ec49f7d982
| 118
|
py
|
Python
|
tests/context.py
|
Sergey190309/211019-JF
|
70e6198447181b45f444b3e8a84205e44e4af018
|
[
"MIT"
] | null | null | null |
tests/context.py
|
Sergey190309/211019-JF
|
70e6198447181b45f444b3e8a84205e44e4af018
|
[
"MIT"
] | null | null | null |
tests/context.py
|
Sergey190309/211019-JF
|
70e6198447181b45f444b3e8a84205e44e4af018
|
[
"MIT"
] | null | null | null |
import sys
import os
sys.path.index(0, os.path.abspath(os.path.join((os.path.dirname(__file__), '..'))))
import src
| 16.857143
| 83
| 0.70339
| 20
| 118
| 3.95
| 0.55
| 0.227848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.09322
| 118
| 6
| 84
| 19.666667
| 0.728972
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c68915543d403e05b5ebd5f855336910b2ff5224
| 44
|
py
|
Python
|
editorcontainer/rightclickmenu/__init__.py
|
AutomataRaven/azaharTEA
|
2d5a7d96b37bca9b3a914e305e493824a0f60207
|
[
"MIT"
] | 5
|
2019-03-10T16:33:21.000Z
|
2021-04-07T17:24:32.000Z
|
editorcontainer/rightclickmenu/__init__.py
|
Errantgod/azaharTEA
|
2d5a7d96b37bca9b3a914e305e493824a0f60207
|
[
"MIT"
] | 8
|
2017-02-11T06:21:28.000Z
|
2017-02-22T05:50:35.000Z
|
editorcontainer/rightclickmenu/__init__.py
|
Errantgod/azaharTEA
|
2d5a7d96b37bca9b3a914e305e493824a0f60207
|
[
"MIT"
] | 2
|
2019-10-05T20:20:15.000Z
|
2020-06-28T18:46:58.000Z
|
__all__ = ['rightclickmenu.RightClickMenu']
| 22
| 43
| 0.795455
| 3
| 44
| 10.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0.659091
| 0.659091
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c6cc7a05fbf040623558bcea004a995a21e68f35
| 2,685
|
py
|
Python
|
tests/test_vx_or_vy=0.py
|
leanderkirkeland/2d_propagating_blobs
|
1857b951a603c2bc21f84c78ae6ccf64eb58a469
|
[
"MIT"
] | null | null | null |
tests/test_vx_or_vy=0.py
|
leanderkirkeland/2d_propagating_blobs
|
1857b951a603c2bc21f84c78ae6ccf64eb58a469
|
[
"MIT"
] | null | null | null |
tests/test_vx_or_vy=0.py
|
leanderkirkeland/2d_propagating_blobs
|
1857b951a603c2bc21f84c78ae6ccf64eb58a469
|
[
"MIT"
] | null | null | null |
from blobmodel import Model, BlobFactory, Blob
import numpy as np
class CustomBlobFactoryVy0(BlobFactory):
def __init__(self) -> None:
pass
def sample_blobs(
self, Ly: float, T: float, num_blobs: int, blob_shape: str, t_drain: float
) -> list[Blob]:
# set custom parameter distributions
__amp = np.ones(num_blobs)
__width = np.ones(num_blobs)
__vx = np.ones(num_blobs)
__vy = np.zeros(num_blobs)
__posx = np.zeros(num_blobs)
__posy = np.ones(num_blobs) * Ly / 2
__t_init = np.ones(num_blobs) * 0
return [
Blob(
id=i,
blob_shape=blob_shape,
amplitude=__amp[i],
width_prop=__width[i],
width_perp=__width[i],
v_x=__vx[i],
v_y=__vy[i],
pos_x=__posx[i],
pos_y=__posy[i],
t_init=__t_init[i],
t_drain=t_drain,
)
for i in range(num_blobs)
]
class CustomBlobFactoryVx0(BlobFactory):
def __init__(self) -> None:
pass
def sample_blobs(
self, Ly: float, T: float, num_blobs: int, blob_shape: str, t_drain: float
) -> list[Blob]:
# set custom parameter distributions
__amp = np.ones(num_blobs)
__width = np.ones(num_blobs)
__vx = np.zeros(num_blobs)
__vy = np.ones(num_blobs)
__posx = np.zeros(num_blobs)
__posy = np.ones(num_blobs) * Ly / 2
__t_init = np.ones(num_blobs) * 0
return [
Blob(
id=i,
blob_shape=blob_shape,
amplitude=__amp[i],
width_prop=__width[i],
width_perp=__width[i],
v_x=__vx[i],
v_y=__vy[i],
pos_x=__posx[i],
pos_y=__posy[i],
t_init=__t_init[i],
t_drain=t_drain,
)
for i in range(num_blobs)
]
bf_vy_0 = CustomBlobFactoryVy0()
bm_vy_0 = Model(
Nx=10,
Ny=10,
Lx=10,
Ly=10,
dt=1,
T=1,
periodic_y=True,
blob_shape="exp",
num_blobs=1,
blob_factory=bf_vy_0,
t_drain=1e10,
)
bf_vx_0 = CustomBlobFactoryVx0()
bm_vx_0 = Model(
Nx=10,
Ny=10,
Lx=10,
Ly=10,
dt=1,
T=1,
periodic_y=True,
blob_shape="exp",
num_blobs=1,
blob_factory=bf_vx_0,
t_drain=1e10,
)
def test_vy_0():
assert bm_vy_0.make_realization(speed_up=True, error=1e-2)
def test_vx_0():
assert bm_vx_0.make_realization(speed_up=True, error=1e-2)
test_vx_0()
test_vy_0()
| 22.375
| 82
| 0.532588
| 362
| 2,685
| 3.519337
| 0.209945
| 0.125589
| 0.070644
| 0.10989
| 0.77708
| 0.77708
| 0.77708
| 0.77708
| 0.77708
| 0.722135
| 0
| 0.030268
| 0.360149
| 2,685
| 119
| 83
| 22.563025
| 0.711292
| 0.025698
| 0
| 0.723404
| 0
| 0
| 0.002296
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 1
| 0.06383
| false
| 0.021277
| 0.021277
| 0
| 0.12766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c6e60b0ef37a415c9652bd72fb7d581a064ad1a0
| 21
|
py
|
Python
|
blendcollection/blendcollection/__init__.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
blendcollection/blendcollection/__init__.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
blendcollection/blendcollection/__init__.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
from .blends import *
| 21
| 21
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05a864cba2e065764976e0104e18d8899df62434
| 119
|
py
|
Python
|
test_CaesarCode.py
|
an-dreaa/SIM-Homework-1
|
aec1451b84c391506ef33764121898813f72ace3
|
[
"Apache-2.0"
] | null | null | null |
test_CaesarCode.py
|
an-dreaa/SIM-Homework-1
|
aec1451b84c391506ef33764121898813f72ace3
|
[
"Apache-2.0"
] | null | null | null |
test_CaesarCode.py
|
an-dreaa/SIM-Homework-1
|
aec1451b84c391506ef33764121898813f72ace3
|
[
"Apache-2.0"
] | null | null | null |
from function_CaesarCode import caesar
def test_caesar():
assert caesar('Hslh qhjah lza', 7) == 'alea jacta est'
| 19.833333
| 58
| 0.722689
| 17
| 119
| 4.941176
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.176471
| 119
| 5
| 59
| 23.8
| 0.846939
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05e54d29558bf3b3ae4babc24b9a1fb09cb134b4
| 187
|
py
|
Python
|
tests/_testsite/importerror_app/forum/dummy.py
|
OneRainbowDev/django-machina
|
7354cc50f58dcbe49eecce7e1f019f6fff21d690
|
[
"BSD-3-Clause"
] | 1
|
2021-10-08T03:31:24.000Z
|
2021-10-08T03:31:24.000Z
|
tests/_testsite/importerror_app/forum/dummy.py
|
OneRainbowDev/django-machina
|
7354cc50f58dcbe49eecce7e1f019f6fff21d690
|
[
"BSD-3-Clause"
] | null | null | null |
tests/_testsite/importerror_app/forum/dummy.py
|
OneRainbowDev/django-machina
|
7354cc50f58dcbe49eecce7e1f019f6fff21d690
|
[
"BSD-3-Clause"
] | 1
|
2019-04-20T05:26:27.000Z
|
2019-04-20T05:26:27.000Z
|
# -*- coding: utf-8 -*-
# Standard library imports
# Third party imports
from x import bad_import # noqa
# Local application / specific library imports
class Dummy(object):
pass
| 15.583333
| 46
| 0.705882
| 24
| 187
| 5.458333
| 0.833333
| 0.21374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006711
| 0.203209
| 187
| 11
| 47
| 17
| 0.872483
| 0.620321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
af268153ce3eb5628d4af8654240f1d7ab29b05d
| 141
|
py
|
Python
|
chapter04/test/test_module01.py
|
codingEzio/code_python_book_001
|
d9dd7a3d40c4c34c2ae8d08222aba989631abd88
|
[
"Unlicense"
] | null | null | null |
chapter04/test/test_module01.py
|
codingEzio/code_python_book_001
|
d9dd7a3d40c4c34c2ae8d08222aba989631abd88
|
[
"Unlicense"
] | null | null | null |
chapter04/test/test_module01.py
|
codingEzio/code_python_book_001
|
d9dd7a3d40c4c34c2ae8d08222aba989631abd88
|
[
"Unlicense"
] | null | null | null |
# type this to run tests
# 'python -m nose FILENAME -v'
# 'nosetests FILENAME -v'
def test_case01():
assert 'aaa'.upper() == 'AAA'
| 17.625
| 33
| 0.617021
| 20
| 141
| 4.3
| 0.85
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0.22695
| 141
| 7
| 34
| 20.142857
| 0.770642
| 0.560284
| 0
| 0
| 0
| 0
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af57a83f0eb088afcab7e1110461e42ea2c0d52e
| 136
|
py
|
Python
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/comparisonUnification/data/in_1_comparison.py
|
JetBrains-Research/ast-transformations
|
0ab408af3275b520cc87a473f418c4b4dfcb0284
|
[
"MIT"
] | 8
|
2021-01-19T21:15:54.000Z
|
2022-02-23T19:16:25.000Z
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/comparisonUnification/data/in_1_comparison.py
|
JetBrains-Research/ast-transformations
|
0ab408af3275b520cc87a473f418c4b4dfcb0284
|
[
"MIT"
] | 4
|
2020-11-17T14:28:25.000Z
|
2022-02-24T07:54:28.000Z
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/comparisonUnification/data/in_1_comparison.py
|
nbirillo/ast-transformations
|
717706765a2da29087a0de768fc851698886dd65
|
[
"MIT"
] | 1
|
2022-02-23T19:16:30.000Z
|
2022-02-23T19:16:30.000Z
|
_ = 1 < 3
_ = 1 <= 3
_ = 1 < input() and input() > 3
_ = 1 < input() and input() >= 3
_ = 1 < input() and 1 <= input() and input() >= 3
| 22.666667
| 49
| 0.463235
| 22
| 136
| 2.636364
| 0.181818
| 0.137931
| 0.62069
| 0.517241
| 0.948276
| 0.689655
| 0.689655
| 0.689655
| 0.689655
| 0
| 0
| 0.115789
| 0.301471
| 136
| 5
| 50
| 27.2
| 0.494737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af7db0103645a1926e46acb65be4224b274784b2
| 1,486
|
py
|
Python
|
content/wagtail_hooks.py
|
bikramtuladhar/covid-19-procurement-explorer-admin
|
9bba473c8b83c8651e3178b6fba01af74d8b27dc
|
[
"BSD-3-Clause"
] | null | null | null |
content/wagtail_hooks.py
|
bikramtuladhar/covid-19-procurement-explorer-admin
|
9bba473c8b83c8651e3178b6fba01af74d8b27dc
|
[
"BSD-3-Clause"
] | null | null | null |
content/wagtail_hooks.py
|
bikramtuladhar/covid-19-procurement-explorer-admin
|
9bba473c8b83c8651e3178b6fba01af74d8b27dc
|
[
"BSD-3-Clause"
] | null | null | null |
from wagtail.core import hooks
@hooks.register("construct_main_menu")
def hide_reports_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != "reports"]
@hooks.register("construct_main_menu")
def hide_settings_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != "settings"]
@hooks.register("construct_main_menu")
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != "explorer"]
@hooks.register("construct_page_action_menu")
def remove_submit_to_moderator_option(menu_items, request, context):
menu_items[:] = [item for item in menu_items if item.name != "action-submit"]
@hooks.register("construct_page_action_menu")
def remove_delete_option(menu_items, request, context):
menu_items[:] = [item for item in menu_items if item.name != "action-delete"]
@hooks.register("construct_page_action_menu")
def remove_lock_option(menu_items, request, context):
menu_items[:] = [item for item in menu_items if item.name != "action-lock"]
@hooks.register("construct_page_action_menu")
def remove_unpublish_option(menu_items, request, context):
menu_items[:] = [item for item in menu_items if item.name != "action-unpublish"]
@hooks.register("construct_page_action_menu")
def remove_save_draft_option(menu_items, request, context):
menu_items[:] = [item for item in menu_items if item.name != "action-save-draft"]
| 35.380952
| 85
| 0.759758
| 219
| 1,486
| 4.849315
| 0.155251
| 0.20339
| 0.165725
| 0.120527
| 0.857815
| 0.857815
| 0.857815
| 0.753296
| 0.541431
| 0.541431
| 0
| 0
| 0.119112
| 1,486
| 41
| 86
| 36.243902
| 0.811306
| 0
| 0
| 0.32
| 0
| 0
| 0.188425
| 0.087483
| 0
| 0
| 0
| 0
| 0
| 1
| 0.32
| false
| 0
| 0.04
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
afa7c4d432faa321fef3cc83b5707a5bc4b3b72c
| 36
|
py
|
Python
|
pgsql_connector/__init__.py
|
Ilwapoi/pgsql_connector
|
b9db727485dcb6bd4dd5634ae0d69e286743fc7c
|
[
"MIT"
] | null | null | null |
pgsql_connector/__init__.py
|
Ilwapoi/pgsql_connector
|
b9db727485dcb6bd4dd5634ae0d69e286743fc7c
|
[
"MIT"
] | null | null | null |
pgsql_connector/__init__.py
|
Ilwapoi/pgsql_connector
|
b9db727485dcb6bd4dd5634ae0d69e286743fc7c
|
[
"MIT"
] | null | null | null |
from .POSTGRESQL_connection import *
| 36
| 36
| 0.861111
| 4
| 36
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
afb933a9a1b11a300168f54f6dbc5105c8b491c6
| 1,806
|
py
|
Python
|
tests/utils/test_encrypt_decrypt.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 18
|
2017-01-09T22:17:49.000Z
|
2022-01-24T20:46:42.000Z
|
tests/utils/test_encrypt_decrypt.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 84
|
2017-04-11T13:47:49.000Z
|
2022-03-21T20:12:57.000Z
|
tests/utils/test_encrypt_decrypt.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 43
|
2017-01-05T20:40:26.000Z
|
2022-03-31T19:18:02.000Z
|
"""Test the TcEx Utils Module."""
# pylint: disable=no-self-use
class TestBool:
"""Test the TcEx Utils Module."""
def test_utils_encrypt(self, tcex):
"""Test writing a temp file to disk.
Args:
tcex (TcEx, fixture): An instantiated instance of TcEx object.
"""
key = 'ajfmuyodhscwegea'
plaintext = 'blah'
encrypted_data = tcex.utils.encrypt_aes_cbc(key, plaintext)
assert encrypted_data == b'0\x8e`\x8d%\x9f\x8c\xdf\x004\xc1\x1a\x82\xbd\x89\n'
def test_utils_encrypt_iv_string(self, tcex):
"""Test writing a temp file to disk.
Args:
tcex (TcEx, fixture): An instantiated instance of TcEx object.
"""
key = 'ajfmuyodhscwegea'
plaintext = 'blah'
encrypted_data = tcex.utils.encrypt_aes_cbc(key, plaintext, iv='\0' * 16)
assert encrypted_data == b'0\x8e`\x8d%\x9f\x8c\xdf\x004\xc1\x1a\x82\xbd\x89\n'
def test_utils_decrypt(self, tcex):
"""Test writing a temp file to disk.
Args:
tcex (TcEx, fixture): An instantiated instance of TcEx object.
"""
key = 'ajfmuyodhscwegea'
ciphertext = b'0\x8e`\x8d%\x9f\x8c\xdf\x004\xc1\x1a\x82\xbd\x89\n'
decrypted_data = tcex.utils.decrypt_aes_cbc(key, ciphertext)
assert decrypted_data.decode() == 'blah'
def test_utils_decrypt_iv_string(self, tcex):
"""Test writing a temp file to disk.
Args:
tcex (TcEx, fixture): An instantiated instance of TcEx object.
"""
key = 'ajfmuyodhscwegea'
ciphertext = b'0\x8e`\x8d%\x9f\x8c\xdf\x004\xc1\x1a\x82\xbd\x89\n'
decrypted_data = tcex.utils.decrypt_aes_cbc(key, ciphertext, iv='\0' * 16)
assert decrypted_data.decode() == 'blah'
| 32.836364
| 86
| 0.616279
| 241
| 1,806
| 4.502075
| 0.236515
| 0.04977
| 0.04424
| 0.070046
| 0.910599
| 0.81659
| 0.81659
| 0.81659
| 0.81659
| 0.81659
| 0
| 0.046303
| 0.258583
| 1,806
| 54
| 87
| 33.444444
| 0.764003
| 0.285714
| 0
| 0.571429
| 0
| 0.190476
| 0.245039
| 0.172563
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.190476
| false
| 0
| 0
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bb981e8b64818ad0fa1689a93c7582162a9771aa
| 2,326
|
py
|
Python
|
cauldron/test/cli/commands/test_steps_insert.py
|
JohnnyPeng18/cauldron
|
09120c2a4cef65df46f8c0c94f5d79395b3298cd
|
[
"MIT"
] | 90
|
2016-09-02T15:11:10.000Z
|
2022-01-02T11:37:57.000Z
|
cauldron/test/cli/commands/test_steps_insert.py
|
JohnnyPeng18/cauldron
|
09120c2a4cef65df46f8c0c94f5d79395b3298cd
|
[
"MIT"
] | 86
|
2016-09-23T16:52:22.000Z
|
2022-03-31T21:39:56.000Z
|
cauldron/test/cli/commands/test_steps_insert.py
|
JohnnyPeng18/cauldron
|
09120c2a4cef65df46f8c0c94f5d79395b3298cd
|
[
"MIT"
] | 261
|
2016-12-22T05:36:48.000Z
|
2021-11-26T12:40:42.000Z
|
import cauldron
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestStepsInsert(scaffolds.ResultsTest):
"""..."""
def test_before(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candice')
support.add_step(self)
support.add_step(self, position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types_many(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self)
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
def test_multiple_file_types_named(self):
"""Should properly rename customized filenames."""
support.create_project(self, 'candera')
support.add_step(self, name='A')
support.add_step(self, name='B')
support.add_step(self, name='C')
support.add_step(self, name='D.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01-D'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
| 34.205882
| 62
| 0.662511
| 271
| 2,326
| 5.553506
| 0.188192
| 0.079734
| 0.111628
| 0.143522
| 0.869767
| 0.773422
| 0.773422
| 0.773422
| 0.749502
| 0.749502
| 0
| 0.021494
| 0.199914
| 2,326
| 67
| 63
| 34.716418
| 0.787211
| 0.074807
| 0
| 0.636364
| 0
| 0
| 0.037159
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.090909
| false
| 0
| 0.068182
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bbb2cf340e9d1926e2694d66d74a42efdecdfc9c
| 167
|
py
|
Python
|
digdag-cli/src/main/resources/digdag/cli/tasks/__init__.py
|
komamitsu/digdag
|
e7de984d92d834d61a0f7a292d4a1a0b8c7456e5
|
[
"Apache-2.0"
] | null | null | null |
digdag-cli/src/main/resources/digdag/cli/tasks/__init__.py
|
komamitsu/digdag
|
e7de984d92d834d61a0f7a292d4a1a0b8c7456e5
|
[
"Apache-2.0"
] | null | null | null |
digdag-cli/src/main/resources/digdag/cli/tasks/__init__.py
|
komamitsu/digdag
|
e7de984d92d834d61a0f7a292d4a1a0b8c7456e5
|
[
"Apache-2.0"
] | null | null | null |
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
| 18.555556
| 58
| 0.634731
| 21
| 167
| 4.761905
| 0.714286
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.245509
| 167
| 8
| 59
| 20.875
| 0.769841
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
bbc980b88d31cf8f88256beb6a7f3f2befa65e32
| 269
|
py
|
Python
|
amc_dl/torch_plus/__init__.py
|
wiaderwek/musebert
|
64fb8cf06b0a723a19c26e8bcc44deb6cfe69568
|
[
"MIT"
] | 25
|
2020-10-12T06:01:23.000Z
|
2022-03-27T11:37:16.000Z
|
amc_dl/torch_plus/__init__.py
|
wiaderwek/musebert
|
64fb8cf06b0a723a19c26e8bcc44deb6cfe69568
|
[
"MIT"
] | 2
|
2021-11-11T01:08:54.000Z
|
2022-03-27T11:56:19.000Z
|
amc_dl/torch_plus/__init__.py
|
wiaderwek/musebert
|
64fb8cf06b0a723a19c26e8bcc44deb6cfe69568
|
[
"MIT"
] | 5
|
2020-08-17T06:20:14.000Z
|
2022-02-24T04:32:28.000Z
|
from .module import PytorchModel, TrainingInterface
from .scheduler import ConstantScheduler, TeacherForcingScheduler, \
OptimizerScheduler, ParameterScheduler
from .manager import LogPathManager, DataLoaders, SummaryWriters
from .example import MinExponentialLR
| 33.625
| 68
| 0.851301
| 22
| 269
| 10.409091
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107807
| 269
| 7
| 69
| 38.428571
| 0.954167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.