hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
013642c4408fb08df7ea0fdc7d6b57d74283b155
| 88
|
py
|
Python
|
karp/infrastructure/sql/__init__.py
|
spraakbanken/karp-backend-v6-tmp
|
e5b78157bd999df18c188973ae2a337015b6f35d
|
[
"MIT"
] | 1
|
2021-12-08T15:33:42.000Z
|
2021-12-08T15:33:42.000Z
|
karp/infrastructure/sql/__init__.py
|
spraakbanken/karp-backend-v6-tmp
|
e5b78157bd999df18c188973ae2a337015b6f35d
|
[
"MIT"
] | null | null | null |
karp/infrastructure/sql/__init__.py
|
spraakbanken/karp-backend-v6-tmp
|
e5b78157bd999df18c188973ae2a337015b6f35d
|
[
"MIT"
] | null | null | null |
# from . import sql_entry_repository, sql_search_service
from . import sql_unit_of_work
| 29.333333
| 56
| 0.840909
| 14
| 88
| 4.785714
| 0.714286
| 0.298507
| 0.38806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 88
| 2
| 57
| 44
| 0.858974
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
096e7b3cea85789603ac97c540b3fe55db8d98f4
| 19,199
|
py
|
Python
|
tests/engine/routers/test_plan.py
|
pronovic/vplan
|
aee40c5f9ed72c11cd0d24631b8530af65961bc9
|
[
"Apache-2.0"
] | null | null | null |
tests/engine/routers/test_plan.py
|
pronovic/vplan
|
aee40c5f9ed72c11cd0d24631b8530af65961bc9
|
[
"Apache-2.0"
] | null | null | null |
tests/engine/routers/test_plan.py
|
pronovic/vplan
|
aee40c5f9ed72c11cd0d24631b8530af65961bc9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# vim: set ft=python ts=4 sw=4 expandtab:
# pylint: disable=too-many-public-methods:
from unittest.mock import MagicMock, patch
import pytest
from fastapi.testclient import TestClient
from sqlalchemy.exc import IntegrityError, NoResultFound
from vplan.engine.exception import InvalidPlanError
from vplan.engine.server import API
from vplan.interface import Device, Plan, PlanSchema, Status, SwitchState
CLIENT = TestClient(API)
PLAN_URL = "/plan"
class TestRoutes:
@patch("vplan.engine.routers.plan.db_retrieve_all_plans")
def test_retrieve_all_plans(self, db_retrieve_all_plans):
plans = ["a"]
db_retrieve_all_plans.return_value = plans
response = CLIENT.get(url="/plan")
assert response.status_code == 200
assert response.json() == plans
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_retrieve_plan(self, db_retrieve_plan):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_retrieve_plan.return_value = schema
response = CLIENT.get(url="/plan/xxx")
assert response.status_code == 200
assert PlanSchema.parse_raw(response.text) == schema
db_retrieve_plan.assert_called_once_with(plan_name="xxx")
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_create_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_create_plan(self, validate_plan, db_create_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
response = CLIENT.post(url="/plan", data=schema.json())
assert response.status_code == 201
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_create_plan.assert_called_once_with(schema=schema)
schedule_immediate_refresh.assert_called_once_with(plan_name="name", location="location")
schedule_daily_refresh.assert_called_once_with(plan_name="name", location="location", refresh_time="00:30", time_zone="UTC")
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_create_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_create_plan_invalid(self, validate_plan, db_create_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
validate_plan.side_effect = InvalidPlanError("error")
response = CLIENT.post(url="/plan", data=schema.json())
assert response.status_code == 422
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_create_plan.assert_not_called()
schedule_immediate_refresh.assert_not_called()
schedule_daily_refresh.assert_not_called()
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_create_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_create_plan_duplicate(self, validate_plan, db_create_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_create_plan.side_effect = IntegrityError("x", "y", "z")
response = CLIENT.post(url="/plan", data=schema.json())
assert response.status_code == 409
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_create_plan.assert_called_once_with(schema=schema)
schedule_immediate_refresh.assert_not_called()
schedule_daily_refresh.assert_not_called()
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_update_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_update_plan(self, validate_plan, db_update_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
response = CLIENT.put(url="/plan", data=schema.json())
assert response.status_code == 204
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_update_plan.assert_called_once_with(schema=schema)
schedule_immediate_refresh.assert_called_once_with(plan_name="name", location="location")
schedule_daily_refresh.assert_called_once_with(plan_name="name", location="location", refresh_time="00:30", time_zone="UTC")
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_update_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_update_plan_invalid(self, validate_plan, db_update_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
validate_plan.side_effect = InvalidPlanError("error")
response = CLIENT.put(url="/plan", data=schema.json())
assert response.status_code == 422
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_update_plan.assert_not_called()
schedule_immediate_refresh.assert_not_called()
schedule_daily_refresh.assert_not_called()
@patch("vplan.engine.routers.plan.schedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_update_plan")
@patch("vplan.engine.routers.plan.validate_plan")
def test_update_plan_not_found(self, validate_plan, db_update_plan, schedule_immediate_refresh, schedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_update_plan.side_effect = NoResultFound("hello")
response = CLIENT.put(url="/plan", data=schema.json())
assert response.status_code == 404
assert not response.text
validate_plan.assert_called_once_with(schema=schema)
db_update_plan.assert_called_once_with(schema=schema)
schedule_immediate_refresh.assert_not_called()
schedule_daily_refresh.assert_not_called()
@patch("vplan.engine.routers.plan.unschedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
@patch("vplan.engine.routers.plan.db_delete_plan")
def test_delete_plan(self, db_delete_plan, db_retrieve_plan, schedule_immediate_refresh, unschedule_daily_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_retrieve_plan.return_value = schema
response = CLIENT.delete(url="/plan/thename")
assert response.status_code == 204
assert not response.text
db_retrieve_plan.assert_called_once_with("thename")
db_delete_plan.assert_called_once_with(plan_name="name")
schedule_immediate_refresh.assert_called_once_with(plan_name="name", location="location")
unschedule_daily_refresh.assert_called_once_with(plan_name="name")
@patch("vplan.engine.routers.plan.unschedule_daily_refresh")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
@patch("vplan.engine.routers.plan.db_delete_plan")
def test_delete_plan_not_found(self, db_delete_plan, db_retrieve_plan, schedule_immediate_refresh, unschedule_daily_refresh):
db_retrieve_plan.side_effect = NoResultFound("hello")
response = CLIENT.delete(url="/plan/thename")
assert response.status_code == 404
assert not response.text
db_retrieve_plan.assert_called_once_with("thename")
db_delete_plan.assert_not_called()
schedule_immediate_refresh.assert_not_called()
unschedule_daily_refresh.assert_not_called()
@pytest.mark.parametrize("enabled", [True, False])
@patch("vplan.engine.routers.plan.db_retrieve_plan_enabled")
def test_retrieve_status(self, db_retrieve_plan_enabled, enabled):
db_retrieve_plan_enabled.return_value = enabled
response = CLIENT.get(url="/plan/name/status")
assert response.status_code == 200
assert Status.parse_raw(response.text) == Status(enabled=enabled)
db_retrieve_plan_enabled.assert_called_once_with(plan_name="name")
@patch("vplan.engine.routers.plan.db_retrieve_plan_enabled")
def test_retrieve_status_not_found(self, db_retrieve_plan_enabled):
db_retrieve_plan_enabled.side_effect = NoResultFound("hello")
response = CLIENT.get(url="/plan/name/status")
assert response.status_code == 404
assert not response.text
db_retrieve_plan_enabled.assert_called_once_with(plan_name="name")
@pytest.mark.parametrize("enabled", [True, False])
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
@patch("vplan.engine.routers.plan.db_update_plan_enabled")
def test_update_status(self, db_update_plan_enabled, db_retrieve_plan, schedule_immediate_refresh, enabled):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_retrieve_plan.return_value = schema
status = Status(enabled=enabled)
response = CLIENT.put(url="/plan/thename/status", data=status.json())
assert response.status_code == 204
assert not response.text
db_retrieve_plan.assert_called_once_with(plan_name="thename")
db_update_plan_enabled.assert_called_once_with(plan_name="name", enabled=enabled)
schedule_immediate_refresh.assert_called_once_with(plan_name="name", location="location")
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
@patch("vplan.engine.routers.plan.db_update_plan_enabled")
def test_update_status_not_found(self, db_update_plan_enabled, db_retrieve_plan, schedule_immediate_refresh):
db_retrieve_plan.side_effect = NoResultFound("hello")
status = Status(enabled=True)
response = CLIENT.put(url="/plan/thename/status", data=status.json())
assert response.status_code == 404
assert not response.text
db_retrieve_plan.assert_called_once_with(plan_name="thename")
db_update_plan_enabled.assert_not_called()
schedule_immediate_refresh.assert_not_called()
@patch("vplan.engine.routers.plan.schedule_immediate_refresh")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_refresh_plan(self, db_retrieve_plan, schedule_immediate_refresh):
schema = PlanSchema(version="1.0.0", plan=Plan(name="name", location="location", refresh_time="00:30"))
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/thename/refresh")
assert response.status_code == 204
assert not response.text
db_retrieve_plan.assert_called_once_with(plan_name="thename")
schedule_immediate_refresh.assert_called_once_with(plan_name="name", location="location")
@patch("vplan.engine.routers.plan.toggle_devices")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_toggle_group(
self,
db_retrieve_plan,
toggle_devices,
):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
params = {
"toggles": 4,
"delay_sec": 10,
}
response = CLIENT.post(url="/plan/xxx/test/group/yyy", params=params)
assert response.status_code == 204
assert not response.text
schema.devices.assert_called_once_with(group_name="yyy")
toggle_devices.assert_called_once_with(location="bbb", devices=[device], toggles=4, delay_sec=10)
@patch("vplan.engine.routers.plan.toggle_devices")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_toggle_group_not_found(self, db_retrieve_plan, toggle_devices):
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[])
db_retrieve_plan.return_value = schema
params = {
"toggles": 4,
"delay_sec": 10,
}
response = CLIENT.post(url="/plan/xxx/test/group/yyy", params=params)
assert response.status_code == 404
assert not response.text
schema.devices.assert_called_once_with(group_name="yyy")
toggle_devices.assert_not_called()
@patch("vplan.engine.routers.plan.toggle_devices")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_toggle_device(self, db_retrieve_plan, toggle_devices):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
params = {
"toggles": 4,
"delay_sec": 10,
}
response = CLIENT.post(url="/plan/xxx/test/device/yyy/zzz", params=params)
assert response.status_code == 204
assert not response.text
toggle_devices.assert_called_once_with(location="bbb", devices=[device], toggles=4, delay_sec=10)
@patch("vplan.engine.routers.plan.toggle_devices")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_toggle_device_not_found(self, db_retrieve_plan, toggle_devices):
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[]) # our device is not in this list, by definition
db_retrieve_plan.return_value = schema
params = {
"toggles": 4,
"delay_sec": 10,
}
response = CLIENT.post(url="/plan/xxx/test/device/yyy/zzz", params=params)
assert response.status_code == 404
assert not response.text
toggle_devices.assert_not_called()
@pytest.mark.parametrize("state", ["on", "off"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_group(self, db_retrieve_plan, set_device_state, state):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/group/yyy" % state)
assert response.status_code == 204
assert not response.text
schema.devices.assert_called_once_with(group_name="yyy")
set_device_state.assert_called_once_with(location="bbb", devices=[device], state=SwitchState(state))
@pytest.mark.parametrize("state", ["on", "off"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_group_not_found(self, db_retrieve_plan, set_device_state, state):
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[])
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/group/yyy" % state)
assert response.status_code == 404
assert not response.text
schema.devices.assert_called_once_with(group_name="yyy")
set_device_state.assert_not_called()
@pytest.mark.parametrize("state", ["ON", "OFF", "bad"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_group_bad_state(self, db_retrieve_plan, set_device_state, state):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/group/yyy" % state)
assert response.status_code == 400
assert not response.text
schema.devices.assert_called_once_with(group_name="yyy")
set_device_state.assert_not_called()
@pytest.mark.parametrize("state", ["on", "off"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_device(self, db_retrieve_plan, set_device_state, state):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/device/yyy/zzz" % state)
assert response.status_code == 204
assert not response.text
set_device_state.assert_called_once_with(location="bbb", devices=[device], state=SwitchState(state))
@pytest.mark.parametrize("state", ["on", "off"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_device_not_found(self, db_retrieve_plan, set_device_state, state):
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[]) # our device is not in this list, by definition
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/device/yyy/zzz" % state)
assert response.status_code == 404
assert not response.text
set_device_state.assert_not_called()
@pytest.mark.parametrize("state", ["ON", "OFF", "bad"])
@patch("vplan.engine.routers.plan.set_device_state")
@patch("vplan.engine.routers.plan.db_retrieve_plan")
def test_switch_device_bad_state(self, db_retrieve_plan, set_device_state, state):
device = Device(room="yyy", device="zzz")
plan = MagicMock(location="bbb")
schema = MagicMock(plan=plan)
schema.devices = MagicMock(return_value=[device])
db_retrieve_plan.return_value = schema
response = CLIENT.post(url="/plan/xxx/%s/device/yyy/zzz" % state)
assert response.status_code == 400
assert not response.text
set_device_state.assert_not_called()
| 52.313351
| 132
| 0.715558
| 2,474
| 19,199
| 5.261116
| 0.056993
| 0.055778
| 0.078672
| 0.113092
| 0.939766
| 0.929779
| 0.911494
| 0.903196
| 0.896051
| 0.887907
| 0
| 0.010863
| 0.165686
| 19,199
| 366
| 133
| 52.456284
| 0.801723
| 0.010105
| 0
| 0.792169
| 0
| 0
| 0.205958
| 0.163956
| 0
| 0
| 0
| 0
| 0.325301
| 1
| 0.075301
| false
| 0
| 0.021084
| 0
| 0.099398
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09d14850ee74271e56961c33bf17e84e75ef2467
| 33
|
py
|
Python
|
Beer2U_Calculator/app.py
|
dih-ves/exam
|
5ba111afb31c55f38bf775d5a83c1b96d5af4baf
|
[
"CC0-1.0"
] | null | null | null |
Beer2U_Calculator/app.py
|
dih-ves/exam
|
5ba111afb31c55f38bf775d5a83c1b96d5af4baf
|
[
"CC0-1.0"
] | null | null | null |
Beer2U_Calculator/app.py
|
dih-ves/exam
|
5ba111afb31c55f38bf775d5a83c1b96d5af4baf
|
[
"CC0-1.0"
] | null | null | null |
def config_app():
return True
| 16.5
| 17
| 0.69697
| 5
| 33
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 33
| 2
| 18
| 16.5
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
09dccf423b679c6589b9a6d9e351dd7580c775d8
| 6,058
|
py
|
Python
|
examples/data_proc.py
|
mikedwhite/microstructural-fingerprinting-tools
|
969ac9d032f82ca002846ac39017b7de04f50e85
|
[
"BSD-3-Clause"
] | null | null | null |
examples/data_proc.py
|
mikedwhite/microstructural-fingerprinting-tools
|
969ac9d032f82ca002846ac39017b7de04f50e85
|
[
"BSD-3-Clause"
] | null | null | null |
examples/data_proc.py
|
mikedwhite/microstructural-fingerprinting-tools
|
969ac9d032f82ca002846ac39017b7de04f50e85
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
def cross_validation_split_dataset1(micro_list, label_list, niter):
r"""Split :math:`N` micrographs into train/test sets for :math:`k`-fold cross-validation, where :math:`k` is
equivalent to the parameter **niter**.
Parameters
----------
micro_list : list
List of micrograph file names for the whole dataset.
label_list : list
List of labels corresponding to micrograph class labels.
niter : int
Number of train/test split iterations to perform.
Returns
-------
micro_list_train_stack : ndarray
Lists of micrograph filenames to comprise training sets stacked into an array of shape
(**niter**, :math:`N`(**niter** - 1)/**niter**)
micro_list_test_stack : ndarray
Lists of micrograph filenames to comprise test sets stacked into an array of shape
(**niter**, :math:`N`/**niter**)
label_list_train_stack : ndarray
Array of class labels corresponding to **micro_list_train_stack**
label_list_test_stack : ndarray
Array of class labels corresponding to **micro_list_test_stack**
"""
micro_list = np.array(micro_list)
label_list = np.array(label_list)
nimage = label_list.size
ntest = int(np.round(nimage / niter))
ntrain = int(nimage - ntest)
shuffle_order_bimod = np.random.choice(20, 20, replace=False)
shuffle_order_lamel = np.random.choice(20, 20, replace=False) + 20
micro_list_train_stack = np.zeros(([niter, ntrain]))
label_list_train_stack = np.zeros(([niter, ntrain]))
micro_list_test_stack = np.zeros(([niter, ntest]))
label_list_test_stack = np.zeros(([niter, ntest]))
for n in range(niter):
test_ind = np.concatenate((shuffle_order_bimod[n*2:(n+1)*2], shuffle_order_lamel[n*2:(n+1)*2]))
label_list_test_stack[n, :] = label_list[test_ind]
all_ind = range(nimage)
train_ind = np.array([element for element in all_ind if element not in test_ind])
label_list_train_stack[n, :] = label_list[train_ind]
if n == 0:
micro_list_test_stack = test_ind
micro_list_train_stack = train_ind
else:
micro_list_test_stack = np.vstack((micro_list_test_stack, test_ind))
micro_list_train_stack = np.vstack((micro_list_train_stack, train_ind))
for n in range(niter):
shuffle_order_train = np.random.choice(ntrain, ntrain, replace=False)
shuffle_order_test = np.random.choice(ntest, ntest, replace=False)
micro_list_train_stack[n, :] = micro_list_train_stack[n, shuffle_order_train]
label_list_train_stack[n, :] = label_list_train_stack[n, shuffle_order_train]
micro_list_test_stack[n, :] = micro_list_test_stack[n, shuffle_order_test]
label_list_test_stack[n, :] = label_list_test_stack[n, shuffle_order_test]
return micro_list_train_stack, micro_list_test_stack, label_list_train_stack, label_list_test_stack
def cross_validation_split_dataset2(micro_list, label_list, niter):
r"""Split :math:`N` micrographs into train/test sets for :math:`k`-fold cross-validation, where :math:`k` is
equivalent to the parameter **niter**.
Parameters
----------
micro_list : list
List of micrograph file names for the whole dataset.
label_list : list
List of labels corresponding to micrograph class labels.
niter : int
Number of train/test split iterations to perform.
Returns
-------
micro_list_train_stack : ndarray
Lists of micrograph filenames to comprise training sets stacked into an array of shape
(**niter**, :math:`N`(**niter** - 1)/**niter**)
micro_list_test_stack : ndarray
Lists of micrograph filenames to comprise test sets stacked into an array of shape
(**niter**, :math:`N`/**niter**)
label_list_train_stack : ndarray
Array of class labels corresponding to **micro_list_train_stack**
label_list_test_stack : ndarray
Array of class labels corresponding to **micro_list_test_stack**
"""
micro_list = np.array(micro_list)
label_list = np.array(label_list)
nimage = label_list.size
ntest = int(np.round(nimage / niter))
ntrain = int(nimage - ntest)
shuffle_order_carbn = np.random.choice(200, 200, replace=False)
shuffle_order_pearl = np.random.choice(200, 200, replace=False) + 200
shuffle_order_spher = np.random.choice(200, 200, replace=False) + 400
micro_list_train_stack = np.zeros(([niter, ntrain]))
label_list_train_stack = np.zeros(([niter, ntrain]))
micro_list_test_stack = np.zeros(([niter, ntest]))
label_list_test_stack = np.zeros(([niter, ntest]))
for n in range(niter):
test_ind = np.concatenate((shuffle_order_carbn[n*20:(n+1)*20], shuffle_order_pearl[n*20:(n+1)*20],
shuffle_order_spher[n*20:(n+1)*20]))
label_list_test_stack[n, :] = label_list[test_ind]
all_ind = range(nimage)
train_ind = np.array([element for element in all_ind if element not in test_ind])
label_list_train_stack[n, :] = label_list[train_ind]
if n == 0:
micro_list_test_stack = test_ind
micro_list_train_stack = train_ind
else:
micro_list_test_stack = np.vstack((micro_list_test_stack, test_ind))
micro_list_train_stack = np.vstack((micro_list_train_stack, train_ind))
for n in range(niter):
shuffle_order_train = np.random.choice(ntrain, ntrain, replace=False)
shuffle_order_test = np.random.choice(ntest, ntest, replace=False)
micro_list_train_stack[n, :] = micro_list_train_stack[n, shuffle_order_train]
label_list_train_stack[n, :] = label_list_train_stack[n, shuffle_order_train]
micro_list_test_stack[n, :] = micro_list_test_stack[n, shuffle_order_test]
label_list_test_stack[n, :] = label_list_test_stack[n, shuffle_order_test]
return micro_list_train_stack, micro_list_test_stack, label_list_train_stack, label_list_test_stack
| 45.208955
| 112
| 0.689667
| 869
| 6,058
| 4.49252
| 0.103567
| 0.101434
| 0.107582
| 0.087602
| 0.953125
| 0.94877
| 0.94877
| 0.899078
| 0.899078
| 0.899078
| 0
| 0.012698
| 0.206999
| 6,058
| 133
| 113
| 45.548872
| 0.799958
| 0.310168
| 0
| 0.811594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.014493
| 0
| 0.072464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09e68a08330d838d0dc05790d4b55a15224851a6
| 160
|
py
|
Python
|
gym_electric_motor/envs/gym_synrm/__init__.py
|
magic-alt/gym-electric-motor
|
39b63e2de79840528c24515703777a92e95edd40
|
[
"MIT"
] | 1
|
2021-03-29T07:47:32.000Z
|
2021-03-29T07:47:32.000Z
|
gym_electric_motor/envs/gym_synrm/__init__.py
|
magic-alt/gym-electric-motor
|
39b63e2de79840528c24515703777a92e95edd40
|
[
"MIT"
] | null | null | null |
gym_electric_motor/envs/gym_synrm/__init__.py
|
magic-alt/gym-electric-motor
|
39b63e2de79840528c24515703777a92e95edd40
|
[
"MIT"
] | null | null | null |
from .syn_reluctance_motor_env import DiscSynchronousReluctanceMotorEnvironment
from .syn_reluctance_motor_env import ContSynchronousReluctanceMotorEnvironment
| 53.333333
| 79
| 0.9375
| 14
| 160
| 10.285714
| 0.571429
| 0.097222
| 0.236111
| 0.305556
| 0.430556
| 0.430556
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 160
| 2
| 80
| 80
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
09f28f823b013c5f418858210dbbe1bfaf919b89
| 15,913
|
py
|
Python
|
test/pytest/test_match_condition_fun.py
|
showipintbri/ttp
|
10b8767e67ec39ed4e30769d36e6fb6e5b0ed265
|
[
"MIT"
] | 254
|
2019-09-23T15:37:13.000Z
|
2022-03-24T18:56:56.000Z
|
test/pytest/test_match_condition_fun.py
|
showipintbri/ttp
|
10b8767e67ec39ed4e30769d36e6fb6e5b0ed265
|
[
"MIT"
] | 71
|
2019-09-26T16:32:55.000Z
|
2022-03-31T15:57:12.000Z
|
test/pytest/test_match_condition_fun.py
|
showipintbri/ttp
|
10b8767e67ec39ed4e30769d36e6fb6e5b0ed265
|
[
"MIT"
] | 38
|
2019-10-18T03:43:42.000Z
|
2022-01-19T20:03:33.000Z
|
import sys
sys.path.insert(0, "../..")
import pprint
from ttp import ttp
def test_contains_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | contains_re("Port-Channel") }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
pprint.pprint(res)
assert res == [
[[{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False}]]
]
# test_contains_re_inline()
def test_contains_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management_2
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port-.+"
</vars>
<group>
interface {{ interface | contains_re(var_1) }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{
"description": "Storage",
"interface": "Port-Channel11",
"is_lag": True,
"is_loopback": False,
},
{
"description": "Management_2",
"interface": "Port-Channel12",
"is_lag": True,
"is_loopback": False,
},
]
]
]
# test_contains_re_from_vars()
def test_startswith_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | startswith_re(r"Por\\S") }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res)
assert res == [
[[{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False}]]
]
# test_startswith_re_inline()
def test_startswith_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management_2
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port-.+"
</vars>
<group>
interface {{ interface | startswith_re(var_1) }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{
"description": "Storage",
"interface": "Port-Channel11",
"is_lag": True,
"is_loopback": False,
},
{
"description": "Management_2",
"interface": "Port-Channel12",
"is_lag": True,
"is_loopback": False,
},
]
]
]
# test_startswith_re_from_vars()
def test_endswith_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | endswith_re(r"Channel\\d+") }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res)
assert res == [
[[{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False}]]
]
def test_endswith_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<vars>
var_1 = r"Channel\\d+"
</vars>
<group>
interface {{ interface | endswith_re(var_1) }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res)
assert res == [
[[{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False}]]
]
def test_notstartswith_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | notstartswith_re(r"Loop\\S+") }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False},
{
"description": "Management",
"interface": "Vlan777",
"is_lag": True,
"is_loopback": False,
},
]
]
]
# test_notstartswith_re_inline()
def test_notstartswith_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<vars>
var_1 = r"Loop\\S+"
</vars>
<group>
interface {{ interface | notstartswith_re(var_1) }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False},
{
"description": "Management",
"interface": "Vlan777",
"is_lag": True,
"is_loopback": False,
},
]
]
]
def test_notendswith_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | notendswith_re(r"back\\d+") }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False},
{
"description": "Management",
"interface": "Vlan777",
"is_lag": True,
"is_loopback": False,
},
]
]
]
def test_notendswith_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<vars>
var_1 = r"back\\d+|lan\\d+"
</vars>
<group>
interface {{ interface | notendswith_re(var_1) }}
description {{ description }}
{{ is_lag | set(True) }}
{{ is_loopback| set(False) }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[[{"interface": "Port-Channel11", "is_lag": True, "is_loopback": False}]]
]
# test_notendswith_re_from_vars()
def test_exclude_re_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | exclude_re(r"back.+") }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
# test_exclude_re_inline()
def test_exclude_re_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<vars>
var_1 = r"back\\d+|lan\\d+"
</vars>
<group>
interface {{ interface | exclude_re(var_1) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [[[{"description": "Storage", "interface": "Port-Channel11"}]]]
# test_exclude_re_from_vars()
def test_exclude_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<group>
interface {{ interface | exclude(Loop) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
def test_exclude_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage Management
interface Loopback0
description RID
interface Vlan777
description Management
</input>
<vars>
var_1 = "Loop"
</vars>
<group>
interface {{ interface | exclude(var_1) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"interface": "Port-Channel11"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
def test_contains_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<group>
interface {{ interface | contains(Port) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"description": "Storage", "interface": "Port-Channel11"},
{"description": "Management", "interface": "Port-Channel12"},
]
]
]
# test_contains_inline()
def test_contains_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port"
</vars>
<group>
interface {{ interface | contains(var_1) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"description": "Storage", "interface": "Port-Channel11"},
{"description": "Management", "interface": "Port-Channel12"},
]
]
]
def test_contains_multi():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port"
</vars>
<group>
interface {{ interface | contains(var_1, Vlan) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"description": "Storage", "interface": "Port-Channel11"},
{"description": "Management", "interface": "Port-Channel12"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
# test_contains_multi()
def test_equal_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<group>
interface {{ interface | equal("Port-Channel12") }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
pprint.pprint(res, width=150)
assert res == [[[{"description": "Management", "interface": "Port-Channel12"}]]]
# test_equal_inline()
def test_equal_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port-Channel12"
</vars>
<group>
interface {{ interface | equal(var_1) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [[[{"description": "Management", "interface": "Port-Channel12"}]]]
def test_notequal_inline():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<group>
interface {{ interface | notequal("Port-Channel12") }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"description": "Storage", "interface": "Port-Channel11"},
{"description": "RID", "interface": "Loopback0"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
# test_notequal_inline()
def test_notequal_from_vars():
template = """
<input load="text">
interface Port-Channel11
description Storage
interface Loopback0
description RID
interface Port-Channel12
description Management
interface Vlan777
description Management
</input>
<vars>
var_1 = "Port-Channel12"
</vars>
<group>
interface {{ interface | notequal(var_1) }}
description {{ description }}
</group>
"""
parser = ttp(template=template)
parser.parse()
res = parser.result()
# pprint.pprint(res, width=150)
assert res == [
[
[
{"description": "Storage", "interface": "Port-Channel11"},
{"description": "RID", "interface": "Loopback0"},
{"description": "Management", "interface": "Vlan777"},
]
]
]
# test_notequal_inline()
| 21.217333
| 86
| 0.600452
| 1,530
| 15,913
| 6.120915
| 0.040523
| 0.077736
| 0.093967
| 0.102189
| 0.956647
| 0.926001
| 0.920021
| 0.914148
| 0.914148
| 0.907742
| 0
| 0.026466
| 0.261547
| 15,913
| 749
| 87
| 21.245661
| 0.770488
| 0.055112
| 0
| 0.778862
| 0
| 0
| 0.573752
| 0.01653
| 0
| 0
| 0
| 0
| 0.034146
| 1
| 0.034146
| false
| 0
| 0.004878
| 0
| 0.039024
| 0.004878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1141076a59dcfeea144bb669a6786e898829fedd
| 151
|
py
|
Python
|
alfredcmd/cloud/__init__.py
|
GustavoKatel/alfred
|
f64b59747d235ad232c1869cc819f46a0d5d4d49
|
[
"MIT"
] | null | null | null |
alfredcmd/cloud/__init__.py
|
GustavoKatel/alfred
|
f64b59747d235ad232c1869cc819f46a0d5d4d49
|
[
"MIT"
] | 2
|
2019-12-13T05:25:42.000Z
|
2019-12-13T06:24:37.000Z
|
alfredcmd/cloud/__init__.py
|
GustavoKatel/alfredcmd-legacy
|
f64b59747d235ad232c1869cc819f46a0d5d4d49
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from alfredcmd.cloud.cloud import *
from alfredcmd.cloud.cloud_exception import *
from alfredcmd.cloud.cloud_provider import *
| 30.2
| 45
| 0.768212
| 20
| 151
| 5.7
| 0.45
| 0.342105
| 0.473684
| 0.605263
| 0.508772
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007463
| 0.112583
| 151
| 4
| 46
| 37.75
| 0.843284
| 0.139073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3a216c09341b0b8a45692c24ec2c43612b5fa0ae
| 46
|
py
|
Python
|
mla/perceptron/__init__.py
|
qianlv/MachineLearningAlgorithm
|
c66d37bc9c0c1bebf97cdc142213b96cb6ceb989
|
[
"MIT"
] | null | null | null |
mla/perceptron/__init__.py
|
qianlv/MachineLearningAlgorithm
|
c66d37bc9c0c1bebf97cdc142213b96cb6ceb989
|
[
"MIT"
] | null | null | null |
mla/perceptron/__init__.py
|
qianlv/MachineLearningAlgorithm
|
c66d37bc9c0c1bebf97cdc142213b96cb6ceb989
|
[
"MIT"
] | null | null | null |
from .PLA import DualPLA
from .PLA import PLA
| 15.333333
| 24
| 0.782609
| 8
| 46
| 4.5
| 0.5
| 0.388889
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 25
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3a3deb4ea6cccf3ff5953da192b41de903f794c6
| 12,525
|
py
|
Python
|
loss.py
|
mangye16/ReID-Label-Noise
|
89aa11f68c275a0bcff232d9a5c3ae152c9276af
|
[
"MIT"
] | 11
|
2020-04-03T09:01:36.000Z
|
2022-03-11T08:12:16.000Z
|
loss.py
|
mangye16/ReID-Label-Noise
|
89aa11f68c275a0bcff232d9a5c3ae152c9276af
|
[
"MIT"
] | null | null | null |
loss.py
|
mangye16/ReID-Label-Noise
|
89aa11f68c275a0bcff232d9a5c3ae152c9276af
|
[
"MIT"
] | 3
|
2020-12-18T11:53:05.000Z
|
2022-01-12T16:35:45.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.autograd as autograd
import math
from torch.nn.modules import loss
def class_select(logits, target):
batch_size, num_classes = logits.size()
if target.is_cuda:
device = target.data.get_device()
one_hot_mask = autograd.Variable(torch.arange(0, num_classes)
.long()
.repeat(batch_size, 1)
.cuda(device)
.eq(target.data.repeat(num_classes, 1).t()))
else:
one_hot_mask = autograd.Variable(torch.arange(0, num_classes)
.long()
.repeat(batch_size, 1)
.eq(target.data.repeat(num_classes, 1).t()))
return logits.masked_select(one_hot_mask)
class FocalLoss(nn.Module):
def __init__(self, num_classes, gamma=2, alpha=0.25, aggregate='mean'):
super(FocalLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.alpha = alpha
# self.alpha = Variable(torch.ones(num_classes)*alpha)
self.gamma = gamma
self.num_classes = num_classes
print('Initializing FocalLoss for training: alpha={}, gamma={}'.format(self.alpha, self.gamma))
def forward(self, input, target, weights=None):
assert input.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
logpt = F.log_softmax(input, dim=1)
logpt_gt = logpt.gather(1,target.unsqueeze(1))
logpt_gt = logpt_gt.view(-1)
pt_gt = logpt_gt.exp()
assert logpt_gt.size() == pt_gt.size()
loss = -self.alpha*(torch.pow((1-pt_gt), self.gamma))*logpt_gt
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class InstanceCrossEntropyLoss(nn.Module):
"""
Cross entropy with instance-wise weights. Leave `aggregate` to None to obtain a loss
vector of shape (batch_size,).
"""
def __init__(self, aggregate='mean', weighted=0):
super(InstanceCrossEntropyLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.weighted = weighted
print('Initializing InstanceCrossEntropyLoss for training: with weights{}'.format(self.weighted))
if self.weighted == 1:
print('Weighted loss is used...')
def forward(self, logits, target, weights=None):
assert logits.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
softmax_result = F.log_softmax(logits, dim=1)
loss = class_select(-softmax_result, target)
if self.weighted == 1 or self.weighted == 2:
assert list(loss.size()) == list(weights.size())
loss = weights * loss
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class SmoothlabelCrossEntropyLoss(nn.Module):
def __init__(self, beta=1.0, aggregate='mean', weighted=0):
super(SmoothlabelCrossEntropyLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.weighted = weighted
self.beta = beta
print('Initializing SmoothlabelCrossEntropyLoss for training: beta={}, weights={}'.format(self.beta, self.weighted))
if self.weighted == 1:
print('Weighted loss is used...')
def forward(self, input, target, weights=None):
assert input.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
logpt = F.log_softmax(input, dim=1)
logpt_gt = logpt.gather(1,target.unsqueeze(1))
logpt_gt = logpt_gt.view(-1)
logpt_pred,_ = torch.max(logpt,1)
logpt_pred = logpt_pred.view(-1)
assert logpt_gt.size() == logpt_pred.size()
loss = - logpt_gt - self.beta* logpt_pred
if self.weighted == 1 or self.weighted == 2:
assert list(loss.size()) == list(weights.size())
loss = loss * weights
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class SmoothlabelClassCrossEntropyLoss(nn.Module):
def __init__(self, beta=0.0, aggregate='mean', weighted=0):
super(SmoothlabelClassCrossEntropyLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.weighted = weighted
self.beta = beta
print('Initializing SmoothlabelClassCrossEntropyLoss for training: beta={}, weights={}'.format(self.beta, self.weighted))
if self.weighted == 1:
print('Weighted loss is used...')
def forward(self, input, target, weights=None):
assert input.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
logpt = F.log_softmax(input, dim=1)
logpt_gt = logpt.gather(1,target.unsqueeze(1))
logpt_gt = logpt_gt.view(-1)
logpt_pred,_ = torch.max(logpt,1)
logpt_pred = logpt_pred.view(-1)
assert logpt_gt.size() == logpt_pred.size()
loss = - (1-self.beta)*logpt_gt - self.beta* logpt_pred
if self.weighted == 1:
assert list(loss.size()) == list(weights.size())
loss = loss * weights.exp()
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class LabelRefineLoss(nn.Module):
def __init__(self, lambda1=0.0, aggregate='mean'):
super(LabelRefineLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.lambda1 = lambda1
print('Initializing LabelRefineLoss for training: lambda1={}'.format(self.lambda1))
def forward(self, input, target, lambda1):
assert input.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
logpt = F.log_softmax(input, dim=1)
logpt_gt = logpt.gather(1,target.unsqueeze(1))
logpt_gt = logpt_gt.view(-1)
logpt_pred,_ = torch.max(logpt,1)
logpt_pred = logpt_pred.view(-1)
assert logpt_gt.size() == logpt_pred.size()
loss = - (1-lambda1)*logpt_gt - lambda1* logpt_pred
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class InstanceWeightLoss(nn.Module):
"""
Cross entropy with instance-wise weights. Leave `aggregate` to None to obtain a loss
vector of shape (batch_size,).
"""
def __init__(self, aggregate='mean', weighted=0):
super(InstanceWeightLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.weighted = weighted
print('Initializing Instance Weight for training: with weights{}'.format(self.weighted))
if self.weighted == 1:
print('Weighted loss is used...')
def forward(self, logits, target, weights=None):
assert logits.dim() == 2
assert not target.requires_grad
target = target.squeeze(1) if target.dim() == 2 else target
assert target.dim() == 1
softmax_result = F.log_softmax(logits, dim=1)
loss = class_select(-softmax_result, target)
if self.weighted == 1 or self.weighted == 2:
assert list(loss.size()) == list(weights.size())
# pdb.set_trace()
loss = weights * loss
if self.aggregate == 'sum':
return loss.sum()
elif self.aggregate == 'mean':
return loss.mean()
elif self.aggregate is None:
return loss
class CoRefineLoss(loss._Loss):
def __init__(self, lambda1=0.0, aggregate='mean'):
super(CoRefineLoss, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.lambda1 = lambda1
"""The KL-Divergence loss for the model and refined labels output.
output must be a pair of (model_output, refined_labels), both NxC tensors.
The rows of refined_labels must all add up to one (probability scores);
however, model_output must be the pre-softmax output of the network."""
def forward(self, output1, output2, target, lambdaKL = 0):
# Target is ignored at training time. Loss is defined as KL divergence
# between the model output and the refined labels.
if output2.requires_grad:
raise ValueError("Refined labels should not require gradients.")
output1_log_prob = F.log_softmax(output1, dim=1)
output2_prob = F.softmax(output2, dim=1)
_, pred_label = output2_prob.max(1)
# Loss is normal cross entropy loss
base_loss = F.cross_entropy(output1, pred_label)
# Loss is -dot(model_output_log_prob, refined_labels). Prepare tensors
# for batch matrix multiplicatio
model_output1_log_prob = output1_log_prob.unsqueeze(2)
model_output2_prob = output2_prob.unsqueeze(1)
# Compute the loss, and average/sum for the batch.
kl_loss = -torch.bmm(model_output2_prob, model_output1_log_prob)
if self.aggregate == 'mean':
loss_co = base_loss.mean() + lambdaKL * kl_loss.mean()
else:
loss_co = base_loss.sum() + lambdaKL * kl_loss.sum()
return loss_co
class CoRefineLossPLus(loss._Loss):
def __init__(self, lambda1=0.0, aggregate='mean'):
super(CoRefineLossPLus, self).__init__()
assert aggregate in ['sum', 'mean', None]
self.aggregate = aggregate
self.lambda1 = lambda1
"""The KL-Divergence loss for the model and refined labels output.
output must be a pair of (model_output, refined_labels), both NxC tensors.
The rows of refined_labels must all add up to one (probability scores);
however, model_output must be the pre-softmax output of the network."""
def forward(self, output1, output2, target, lambdaKL=0):
# Target is ignored at training time. Loss is defined as KL divergence
# between the model output and the refined labels.
if output2.requires_grad:
raise ValueError("Refined labels should not require gradients.")
output1_log_prob = F.log_softmax(output1, dim=1)
output2_prob = F.softmax(output2, dim=1)
_, pred_label2 = output2_prob.max(1)
_, pred_label1 = output1_log_prob.max(1)
# compute the mask
mask = pred_label2.eq(pred_label1)
# Loss is normal cross entropy loss
base_loss = F.cross_entropy(output1, pred_label2)
base_loss = base_loss * mask.float()
# Loss is -dot(model_output_log_prob, refined_labels). Prepare tensors
# for batch matrix multiplicatio
model_output1_log_prob = output1_log_prob.unsqueeze(2)
model_output2_prob = output2_prob.unsqueeze(1)
# Compute the loss, and average/sum for the batch.
kl_loss = -torch.bmm(model_output2_prob, model_output1_log_prob)
if self.aggregate == 'mean':
loss_co = base_loss.mean() + lambdaKL * kl_loss.mean()
else:
loss_co = base_loss.sum() + lambdaKL * kl_loss.sum()
return loss_co
| 39.888535
| 129
| 0.600719
| 1,515
| 12,525
| 4.808581
| 0.107591
| 0.053535
| 0.028003
| 0.025257
| 0.830885
| 0.823747
| 0.809746
| 0.809746
| 0.80151
| 0.796294
| 0
| 0.016829
| 0.293094
| 12,525
| 314
| 130
| 39.888535
| 0.805963
| 0.073453
| 0
| 0.755459
| 0
| 0
| 0.064634
| 0.007599
| 0
| 0
| 0
| 0
| 0.148472
| 1
| 0.074236
| false
| 0
| 0.026201
| 0
| 0.227074
| 0.043668
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28c39fddad729eda9fd7a1d08de6b30abe101aaf
| 34,212
|
py
|
Python
|
v0/aia_eis_v0/ml_sl/rf/dt_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/ml_sl/rf/dt_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/ml_sl/rf/dt_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
import sys
import copy
import os
import pickle
import random
import math
from ml_sl.ml_data_wrapper import pack_list_2_list, reform_labeled_dataset_list
def cal_entropy(reformed_labeled_dataset_list):
"""
:param
label_count_dict:
{'label 0' : 8, 'label 1': 3, ...}
:return:
the entropy of this node (before any division)
"""
label_count_dict = {}
for reformed_labeled_data_list in reformed_labeled_dataset_list:
if reformed_labeled_data_list[0] not in label_count_dict.keys():
label_count_dict[reformed_labeled_data_list[0]] = 1
else:
label_count_dict[reformed_labeled_data_list[0]] += 1
entropy = 0.0
data_amount = len(reformed_labeled_dataset_list)
for value in label_count_dict.values():
p = value / data_amount
entropy += - p * math.log(p, 2)
return entropy, label_count_dict
def cal_node_accuracy(col_index, T, left_label, right_label, reformed_vali_data_list):
accuracy = 0
for v_d_list in reformed_vali_data_list:
x = v_d_list[1][col_index]
if (x <= T) and (left_label == v_d_list[0]):
accuracy += 1
elif (x > T) and (right_label == v_d_list[0]):
accuracy += 1
return accuracy
class Node:
def __init__(self, reformed_labeled_dataset_list, level, leaf_label=None):
"""
:param
reformed_labeled_dataset_list
[
[label1, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
[label3, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
[label4, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
...
]
level
int
record the level in the tree, start from the root (level 0)
prune_flag
boolean
mark the status of this node, if it is pruned, flag = True, else flag = False
t
因为属性值为连续属性,需要寻找阈值T,对该属性的范围进行划分
child_left_node
属性值 小于 阈值T 的数据 归属到 左侧子分支
child_right_node
属性值 大于 阈值T 的数据 归属到 右侧子分支
"""
self.reformed_labeled_dataset_list = reformed_labeled_dataset_list
self.prune_flag = False
self.level = level
self.leaf_label = leaf_label
self.child_left_node = None
self.child_right_node = None
self.col_index = None
self.T = None
self.gain = None
self.entropy, self.label_count_dict = cal_entropy(self.reformed_labeled_dataset_list)
# 1-当此节点为叶节点时,无需计算分割数据产生的增益
# 2-当此节点为叶节点 and 叶节点只有一条数据时,计算分割产生的增益时,会因无处分割而报错
if type(self.leaf_label) != int:
self.cal_gain()
def cal_gain(self):
# all_col_max_gain_list = [(column_index, threshold T in this column, maximum Gain with this T), ...]
all_col_max_gain_list = []
# 遍历样本的每一个属性(列)
for col_index in range(len(self.reformed_labeled_dataset_list[0][1])):
col_list = [data_list[1][col_index] for data_list in self.reformed_labeled_dataset_list]
# 对属性进行排序(reverse = False ==> Ascending),计算相邻两点间的平均值(可能的阈值T)
col_list.sort(reverse=False)
# 找出该连续属性所有可能的分割点(阈值)T
T_candidate_list = [(col_list[i] + col_list[i+1]) / 2 for i in range(len(col_list) - 1)]
gain_list = []
# 遍历每个可能的阈值T
for T in T_candidate_list:
left_dataset_list = [data_list for data_list in self.reformed_labeled_dataset_list\
if data_list[1][col_index] < T]
right_dataset_list = [data_list for data_list in self.reformed_labeled_dataset_list\
if data_list[1][col_index] >= T]
left_entropy, left_label_count_dict = cal_entropy(left_dataset_list)
right_entropy, right_label_count_dict = cal_entropy(right_dataset_list)
# 根据阈值T分割后的结果计算增益Gain
gain = self.entropy \
- len(left_dataset_list) * left_entropy / len(self.reformed_labeled_dataset_list) \
- len(right_dataset_list) * right_entropy / len(self.reformed_labeled_dataset_list)
gain_list.append(gain)
max_gain_index = gain_list.index(max(gain_list))
all_col_max_gain_list.append((col_index, T_candidate_list[max_gain_index], max(gain_list)))
# 对增益gain list进行排序找出最大的gain及其对应的属性(列)
# reverse = True 降序, reverse = False 升序(默认)
all_col_max_gain_list.sort(key=lambda data: data[2], reverse=True)
self.col_index, self.T, self.gain = all_col_max_gain_list[0]
def create_child_node(self):
# 按照T分割数据
# x < T
left_dataset_list = [data for data in self.reformed_labeled_dataset_list if data[1][self.col_index] < self.T]
# x >= T
right_dataset_list = [data for data in self.reformed_labeled_dataset_list if data[1][self.col_index] >= self.T]
"""
如果分割所得的子集中:1-子集中数据标签种类是否相同;2-子集样本数量;按照这两条标准区分可得一下四种情况,两种结果:
label同异 样本数量>1(>=2) 结果
同(label_num=1) 是(data_amount>1) 叶节点
同 否 叶节点
-------------------------------------------
否(label_num>1) 是 子树的根节点
否 否(data_amount=1) 【不存在这种情况】
"""
left_label_num = len(set([data[0] for data in left_dataset_list]))
if left_label_num == 1:
self.child_left_node = Node(left_dataset_list, self.level+1, leaf_label = left_dataset_list[0][0]) # an int
elif left_label_num > 1:
self.child_left_node = Node(left_dataset_list, self.level+1)
self.child_left_node.create_child_node()
right_label_num = len(set([data[0] for data in right_dataset_list]))
if right_label_num == 1:
self.child_right_node = Node(right_dataset_list, self.level+1, leaf_label = right_dataset_list[0][0])
elif right_label_num > 1:
"""
当出现数据相同,标签不同的错误数据时,这些数据会被分到right_dataset_list,此时把他们分到一个叶节点里
训练集中,标签不同,数据相同
[4, [0.010820171959196951, 1.0, 0.01630376465973397, 0.9887005649439885, 0.02175795455276722, 0.9830508474904379, 0.04348650644694115, 0.9774011298879769, 0.07609403546858824, 0.9661016948319653, 0.10327677740470584, 0.9548022599248643, 0.1413090938097033, 0.9435028248688527, 0.17391662298026075, 0.9322033898128411, 0.1793708128732941, 0.9265536723592906, 0.18489850941281064, 0.9067796610484979, 0.21208125120001783, 0.8954802259924864, 0.27738451751691284, 0.8559322033709013, 0.23924929180683874, 0.8870056497377053, 0.28279460342205615, 0.8587570621721318, 0.3046995711188796, 0.8192090395505465, 0.31571086035893314, 0.7881355931837424, 0.33211753417483336, 0.7627118644193992, 0.3376011267264599, 0.7514124293633876, 0.26106605152788226, 0.8644067796256824, 0.3594472891060968, 0.723163841797814, 0.36513670026787665, 0.6723163841202172, 0.40340423779271034, 0.6158192089890699, 0.40356595226606323, 0.5847457627711762, 0.4363204947295874, 0.545197740149591, 0.4364822092029403, 0.5141242937827868, 0.46928085550544396, 0.46610169490642056, 0.4748673573621472, 0.4350282485396164, 0.4966988184124874, 0.40960451977527323, 0.49142104432210365, 0.3813559322096996, 0.5024323334132468, 0.35028248584289545, 0.5188831112170371, 0.3163841808237712, 0.5244549117444437, 0.28813559325819754, 0.5245725223788169, 0.2655367231461745, 0.5356426167871468, 0.22316384187226915, 0.54665390587829, 0.19209039550546497, 0.54674211385407, 0.17514124299590292, 0.5577681042745097, 0.14124293782786826, 0.5579298187478627, 0.11016949160997452, 0.5526961486453689, 0.07344632778961978, 0.5582973518313689, 0.03954802262158512, 0.569352744910402, 0.0, 0.585568301594356, 0.011299435056011466, 0.5355838114699601, 0.23446327692828073, 0.5079012247844873, 0.34180790958811436, 0.46935436215192733, 0.45197740119808893, 0.4365557158494236, 0.5000000000744552, 0.4253974134653138, 0.5593220338579226, 0.4143567217155773, 0.5960451976782773, 0.3870122653061068, 0.6384180791010929, 0.37603037887355695, 0.6638418078654361, 0.37578045642442404, 0.7118644067418025, 0.34839189602706355, 0.7627118644193992, 0.32642812301305346, 0.8135593219480854, 0.2882193908054065, 0.8587570621721318, 0.2991571732500663, 0.8418079095136592, 0.2719156261456725, 0.8644067796256824, 0.25012826908322233, 0.8813559321352443, 0.20661235997768793, 0.9039548022472674, 0.1631258535307468, 0.9209039547568296, 0.14676328370273656, 0.9378531072663917, 0.0870024154035652, 0.9548022599248643, 0.07612343812718159, 0.9604519773784149, 0.010937782593570287, 0.9774011298879769, 2.9402658593325923e-05, 0.9887005649439885, 0.0, 0.994350282397539, 0.0326222304998542, 0.9802259886892074, 0.07610873679788491, 0.9632768361796452, 0.14127969115110997, 0.9491525423224033, 0.19022038749108433, 0.9265536723592906, 0.2174325319368848, 0.909604519700818, 0.2610366488692889, 0.8700564972281432, 0.2938499965010893, 0.8192090395505465, 0.3429377061340303, 0.7683615818729498, 0.35935908127922717, 0.7401129943073761, 0.3702527597359971, 0.731638418052595, 0.38673294019838056, 0.6920903954310099, 0.4362175854245107, 0.5649717514603836, 0.4691779462003674, 0.48587570621721315, 0.5023882295742672, 0.35875706209767655, 0.547271361410929, 0.07344632778961978]]
[8, [0.010820171959196951, 1.0, 0.01630376465973397, 0.9887005649439885, 0.02175795455276722, 0.9830508474904379, 0.04348650644694115, 0.9774011298879769, 0.07609403546858824, 0.9661016948319653, 0.10327677740470584, 0.9548022599248643, 0.1413090938097033, 0.9435028248688527, 0.17391662298026075, 0.9322033898128411, 0.1793708128732941, 0.9265536723592906, 0.18489850941281064, 0.9067796610484979, 0.21208125120001783, 0.8954802259924864, 0.27738451751691284, 0.8559322033709013, 0.23924929180683874, 0.8870056497377053, 0.28279460342205615, 0.8587570621721318, 0.3046995711188796, 0.8192090395505465, 0.31571086035893314, 0.7881355931837424, 0.33211753417483336, 0.7627118644193992, 0.3376011267264599, 0.7514124293633876, 0.26106605152788226, 0.8644067796256824, 0.3594472891060968, 0.723163841797814, 0.36513670026787665, 0.6723163841202172, 0.40340423779271034, 0.6158192089890699, 0.40356595226606323, 0.5847457627711762, 0.4363204947295874, 0.545197740149591, 0.4364822092029403, 0.5141242937827868, 0.46928085550544396, 0.46610169490642056, 0.4748673573621472, 0.4350282485396164, 0.4966988184124874, 0.40960451977527323, 0.49142104432210365, 0.3813559322096996, 0.5024323334132468, 0.35028248584289545, 0.5188831112170371, 0.3163841808237712, 0.5244549117444437, 0.28813559325819754, 0.5245725223788169, 0.2655367231461745, 0.5356426167871468, 0.22316384187226915, 0.54665390587829, 0.19209039550546497, 0.54674211385407, 0.17514124299590292, 0.5577681042745097, 0.14124293782786826, 0.5579298187478627, 0.11016949160997452, 0.5526961486453689, 0.07344632778961978, 0.5582973518313689, 0.03954802262158512, 0.569352744910402, 0.0, 0.585568301594356, 0.011299435056011466, 0.5355838114699601, 0.23446327692828073, 0.5079012247844873, 0.34180790958811436, 0.46935436215192733, 0.45197740119808893, 0.4365557158494236, 0.5000000000744552, 0.4253974134653138, 0.5593220338579226, 0.4143567217155773, 0.5960451976782773, 0.3870122653061068, 0.6384180791010929, 0.37603037887355695, 0.6638418078654361, 0.37578045642442404, 0.7118644067418025, 0.34839189602706355, 0.7627118644193992, 0.32642812301305346, 0.8135593219480854, 0.2882193908054065, 0.8587570621721318, 0.2991571732500663, 0.8418079095136592, 0.2719156261456725, 0.8644067796256824, 0.25012826908322233, 0.8813559321352443, 0.20661235997768793, 0.9039548022472674, 0.1631258535307468, 0.9209039547568296, 0.14676328370273656, 0.9378531072663917, 0.0870024154035652, 0.9548022599248643, 0.07612343812718159, 0.9604519773784149, 0.010937782593570287, 0.9774011298879769, 2.9402658593325923e-05, 0.9887005649439885, 0.0, 0.994350282397539, 0.0326222304998542, 0.9802259886892074, 0.07610873679788491, 0.9632768361796452, 0.14127969115110997, 0.9491525423224033, 0.19022038749108433, 0.9265536723592906, 0.2174325319368848, 0.909604519700818, 0.2610366488692889, 0.8700564972281432, 0.2938499965010893, 0.8192090395505465, 0.3429377061340303, 0.7683615818729498, 0.35935908127922717, 0.7401129943073761, 0.3702527597359971, 0.731638418052595, 0.38673294019838056, 0.6920903954310099, 0.4362175854245107, 0.5649717514603836, 0.4691779462003674, 0.48587570621721315, 0.5023882295742672, 0.35875706209767655, 0.547271361410929, 0.07344632778961978]]
"""
if left_label_num == 0:
leaf_label_set = set([d[0] for d in right_dataset_list])
left_leaf_label_list = [i for i in leaf_label_set if i != right_dataset_list[0][0]]
self.child_right_node = Node(right_dataset_list, self.level + 1, leaf_label =right_dataset_list[0][0])
self.child_left_node = Node(left_dataset_list, self.level + 1, leaf_label = left_leaf_label_list[0])
else:
self.child_right_node = Node(right_dataset_list, self.level+1)
self.child_right_node.create_child_node()
def get_tree_depth(self, max_level=0):
self.max_level = max_level
if isinstance(self.child_left_node.leaf_label, int):
if self.level > max_level:
self.max_level = self.level
return self.max_level
elif self.child_left_node.leaf_label == None:
tmp_max_level = self.child_left_node.get_tree_depth(max_level=self.max_level)
if self.max_level < tmp_max_level:
self.max_level = tmp_max_level
return self.max_level
if isinstance(self.child_right_node.leaf_label, int):
if self.level > max_level:
self.max_level = self.level
return self.max_level
elif self.child_right_node.leaf_label == None:
tmp_max_level = self.child_right_node.get_tree_depth(max_level=self.max_level)
if self.max_level < tmp_max_level:
self.max_level = tmp_max_level
return self.max_level
def root_post_pruning(self, reformed_validation_dataset_list):
self.get_tree_depth(max_level=0)
prune_loop_time = pow(2, self.max_level) - 1
for i in range(prune_loop_time):
self.post_pruning_1(reformed_validation_dataset_list)
def post_pruning_1(self, reformed_validation_dataset_list):
vali_left_dataset_list = [data for data in reformed_validation_dataset_list if data[1][self.col_index] < self.T]
vali_right_dataset_list = [data for data in reformed_validation_dataset_list if data[1][self.col_index] >= self.T]
"""
节点的可能性: 对应处理
leaf node 不用处理
left leaf, right leaf 对当前节点剪枝
left leaf, right node 对右侧子节点剪枝
left node, right leaf 对左侧子节点剪枝
left node, right node 对两侧子节点剪枝
"""
# AttributeError: 'NoneType' object has no attribute 'leaf_label'
try:
# left leaf, right leaf 对当前节点剪枝 (条件:1-左侧为叶节点;2-右侧为叶节点;3-未曾修过枝)
if (not self.prune_flag) and isinstance(self.child_left_node.leaf_label, int) and isinstance(self.child_right_node.leaf_label, int):
# no pruning: 有叶子节点分支对于验证数据集上的正确率
old_accuracy = cal_node_accuracy(col_index=self.col_index, T=self.T,\
left_label=self.child_left_node.leaf_label, \
right_label=self.child_right_node.leaf_label,\
reformed_vali_data_list=reformed_validation_dataset_list)
# pruning: 当前节点原有数据集最多的标签为最终标签,对验证集数据计算正确率
most_label = max(self.label_count_dict, key=self.label_count_dict.get)
new_accuracy = sum([1 for data in reformed_validation_dataset_list if data[0] == most_label])
if new_accuracy >= old_accuracy:
self.child_left_node = None
self.child_right_node = None
self.leaf_label = most_label
self.prune_flag = True
return
else:
self.prune_flag = True
except AttributeError as e:
print('Current Node: prune_flag {0}, leaf_label {1}'.format(self.prune_flag, self.leaf_label))
print('left node: type {0}, content {1}'.format(type(self.child_left_node), self.child_left_node))
print('right node: type {0}, content {1}'.format(type(self.child_right_node), self.child_right_node))
print(e)
# left leaf, right node 对右侧子节点剪枝
if (not self.prune_flag) and isinstance(self.child_left_node.leaf_label, int) and isinstance(self.child_right_node, Node):
self.child_right_node.post_pruning_1(vali_right_dataset_list)
return
# left node, right leaf 对左侧子节点剪枝
if (not self.prune_flag) and isinstance(self.child_left_node, Node) and isinstance(self.child_right_node.leaf_label, int):
self.child_left_node.post_pruning_1(vali_left_dataset_list)
return
# left node, right node 对两侧子节点剪枝
if (not self.prune_flag) and isinstance(self.child_left_node, Node) and isinstance(self.child_right_node, Node):
if self.child_left_node.prune_flag == False:
self.child_left_node.post_pruning_1(vali_left_dataset_list)
if self.child_right_node.prune_flag == False:
self.child_right_node.post_pruning_1(vali_right_dataset_list)
return
def classify(self, unlabeled_data_list):
"""
:param
unlabeled_data_list:
[x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]
不选下方的数据格式:每次递归调用都要转换一下,浪费时间
[(x0, y0), (x1, y1), (x2, y2), ..., (xn-2, yn-2), (xn-1, yn-1)]
:return:
label
"""
# 判断当前节点是否为叶节点
x = unlabeled_data_list[self.col_index]
# 子树节点+小于阈值
if x < self.T:
# 叶节点
if isinstance(self.child_left_node.leaf_label, int):
return self.child_left_node.leaf_label
elif isinstance(self.child_left_node, Node):
return self.child_left_node.classify(unlabeled_data_list)
# 子树节点+大于阈值
elif x >= self.T:
# 叶节点
if isinstance(self.child_right_node.leaf_label, int):
return self.child_right_node.leaf_label
elif isinstance(self.child_right_node, Node):
return self.child_right_node.classify(unlabeled_data_list)
def save_node(node, file_name='node_pickle.file', file_path='./'):
file_abs_path = os.path.join(file_path, file_name)
with open(file_abs_path, 'wb') as file:
pickle.dump(node, file)
def load_node(file_name='node_pickle.file', file_path='./'):
file_abs_path = os.path.join(file_path, file_name)
with open(file_abs_path, 'rb') as file:
node = pickle.load(file)
return node
# ---------------------------- Test of decision tree ----------------------------
# if __name__ == '__main__':
# labeled_data_list = [
# # 5 行 1
# [1, [(1,1) for i in range(4)]],
# [1, [(2,2) for i in range(4)]],
# [1, [(3,3) for i in range(4)]],
# [1, [(4,4) for i in range(4)]],
# [1, [(5,5) for i in range(4)]],
# # 6 行 2
# [2, [(6,6) for i in range(4)]],
# [2, [(7,7) for i in range(4)]],
# [2, [(8,8) for i in range(4)]],
# [2, [(9,9) for i in range(4)]],
# [2, [(10,10) for i in range(4)]],
# [2, [(11,11) for i in range(4)]],
# # 7 行 3
# [3, [(12,12) for i in range(4)]],
# [3, [(13,13) for i in range(4)]],
# [3, [(14,14) for i in range(4)]],
# [3, [(15,15) for i in range(4)]],
# [3, [(16,16) for i in range(4)]],
# [3, [(17,17) for i in range(4)]],
# [3, [(18,18) for i in range(4)]],
# ]
# reformed_labeled_data_list = reform_labeled_dataset_list(labeled_data_list)
# node = Node(reformed_labeled_data_list, level=0)
# node.create_child_node()
#
# # max_level = node.get_tree_depth(max_level=0)
# # print(max_level)
#
# # 此验证数据集为刻意设置为正确率为0(无论是否剪枝),但是根据奥卡姆剃刀原则,结果一样,越简单(剪枝)越好
# vali_data_list = [
# [3, [(8, 8) for i in range(4)]],
# [3, [(9, 9) for i in range(4)]],
# [3, [(10, 10) for i in range(4)]],
# [2, [(11, 11) for i in range(4)]],
# [2, [(12, 12) for i in range(4)]],
# [2, [(13, 13) for i in range(4)]],
# ]
# reformed_vali_data_list = reform_labeled_dataset_list(vali_data_list)
# node.post_pruning_1(reformed_vali_data_list)
#
# test_unlabeled_data = [6.5, 6.5, 6.5, 6.5, 6.5, 6.5, 6.5, 6.5]
# label = node.classify(test_unlabeled_data)
# print('label :',label)
# ---------------------------- Test of decision tree ----------------------------
def data_duplicate_checker(data_list):
"""
Function: 当子树被分配的数据集中含有两个及以上的标签种类,此时要再查看每条数据是否相同
标签不同,数据相同:
数据标签错误,将这批数据创建在一个叶节点中,标签以多数标签为准
返回 False
标签不同,数据不同:
正常情况,继续分割数据集,创建子树
返回 True
:param:
label_num, int, Number of label type
data_list = [
[label1(int), [num,num,num,num,]]
[label1(int), [num,num,num,num,]]
[label2(int), [num,num,num,num,]]
[label2(int), [num,num,num,num,]]
[label2(int), [num,num,num,num,]]
[label3(int), [num,num,num,num,]]
...
[label2(int), [num,num,num,num,]]
]
:return:
"""
label_count_dict = {}
existed_data_set = set()
for d in data_list:
label = d[0]
if label not in label_count_dict.keys():
label_count_dict[label] = 1
else:
label_count_dict[label] += 1
num_list = d[1]
num_tuple = tuple(num_list)
if num_tuple not in existed_data_set:
existed_data_set.add(num_tuple)
# Have duplication: labels are different, but data are the same
if len(existed_data_set) == 1:
selected_label = None
for k, v in label_count_dict.items():
if v == max(label_count_dict.values()):
selected_label = k
break
return False, selected_label
# No duplication: labels are different, but data are different too.
elif len(existed_data_set) > 1:
return True, None
class Random_Tree:
def __init__(self, reformed_labeled_dataset_list, leaf_label=None):
"""
:param
reformed_labeled_dataset_list
[
[label1, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
[label3, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
[label4, [x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]]
...
]
level
int
record the level in the tree, start from the root (level 0)
prune_flag
boolean
mark the status of this node, if it is pruned, flag = True, else flag = False
t
因为属性值为连续属性,需要寻找阈值T,对该属性的范围进行划分
child_left_node
属性值 小于 阈值T 的数据 归属到 左侧子分支
child_right_node
属性值 大于 阈值T 的数据 归属到 右侧子分支
"""
self.reformed_labeled_dataset_list = reformed_labeled_dataset_list
self.leaf_label = leaf_label
self.child_left_node = None
self.child_right_node = None
self.col_index = None
self.T = None
self.gain = None
# 样本属性数量
try:
# IndexError: list index out of range
self.attribute_num = len(self.reformed_labeled_dataset_list[0][1])
except IndexError as e:
print(e)
# sys.exit(1)
# 在每一节点选用K个属性比较信息增益,选择增益最大的属性进行划分
self.k = int(math.log(self.attribute_num, 2) + 1)
self.entropy, self.label_count_dict = cal_entropy(self.reformed_labeled_dataset_list)
if type(self.leaf_label) != int:
self.cal_gain()
def cal_gain(self):
# 随机选取K个属性,用随机数发生器可能会选重复
self.random_attribute_index = []
# -------- This chunk of code selects duplicated attributes, and is wrong -----------
# while len(set(self.random_attribute_index)) < self.k:
# r_a_i = int(random.uniform(0, len(self.reformed_labeled_dataset_list[0][1])))
# self.random_attribute_index.append(r_a_i)
# -------- This chunk of code selects duplicated attributes, and is wrong -----------
# -------- This chunk of code selects unduplicated attributes, and is right --------
while len(self.random_attribute_index) < self.k:
r_a_i = int(random.uniform(0, len(self.reformed_labeled_dataset_list[0][1])))
if r_a_i not in self.random_attribute_index:
self.random_attribute_index.append(r_a_i)
# all_col_max_gain_list = [(column_index, threshold T in this column, maximum Gain with this T), ...]
all_col_max_gain_list = []
# 遍历样本的每一个属性(列)
for col_index in self.random_attribute_index:
col_list = [data_list[1][col_index] for data_list in self.reformed_labeled_dataset_list]
"""
一列中有重复数字出现
1- 随机树的数据来源于重采样,一批样本中会有重复的数据出现, 每一列中也可能会出现重复的数字,在分割阈值的时候就会出现该数字,
此处应将重复的数字删除
2- 在EIS数据的末尾,基本上数据最大值的地方,有较大的概率出现【在一个节点上取到最后几列158/159等,数字很可能均为1;或者在在一个节点渠道最初的几列,数字很可能均为0.0】,
此时col_unique_list = []
"""
col_unique_list = list(set(col_list))
if len(col_unique_list) < 2:
continue
# 对属性进行排序(reverse=False==>Ascending),计算相邻两点间的平均值(可能的阈值T)
col_unique_list.sort(reverse=False)
# 找出该连续属性所有可能的分割点(阈值)T
T_candidate_list = [(col_unique_list[i] + col_unique_list[i+1]) / 2 for i in range(len(col_unique_list) - 1)]
gain_list = []
# 遍历每个可能的阈值T
for T in T_candidate_list:
left_dataset_list = [data_list for data_list in self.reformed_labeled_dataset_list if data_list[1][col_index] <= T]
right_dataset_list = [data_list for data_list in self.reformed_labeled_dataset_list if data_list[1][col_index] > T]
left_entropy, left_label_count_dict = cal_entropy(left_dataset_list)
right_entropy, right_label_count_dict = cal_entropy(right_dataset_list)
# 根据阈值T分割后的结果计算增益Gain
gain = self.entropy - len(left_dataset_list) * left_entropy / len(self.reformed_labeled_dataset_list) - len(right_dataset_list) * right_entropy / len(self.reformed_labeled_dataset_list)
gain_list.append(gain)
try:
# ValueError: max() arg is an empty sequence
max_gain_index = gain_list.index(max(gain_list))
except ValueError as e:
print(e)
sys.exit(1)
all_col_max_gain_list.append((col_index, T_candidate_list[max_gain_index], max(gain_list)))
# 对增益gain list进行排序找出最大的gain及其对应的属性(列)
all_col_max_gain_list.sort(key=lambda data: data[2], reverse = True)
try:
self.col_index, self.T, self.gain = all_col_max_gain_list[0]
except IndexError as e:
label_count_dict = {}
for d in self.reformed_labeled_dataset_list:
label = d[0]
if label not in label_count_dict.keys():
label_count_dict[label] = 1
else:
label_count_dict[label] += 1
for k, v in label_count_dict.items():
if v == max(label_count_dict.values()):
self.leaf_label = k
print(e)
# sys.exit(1)
def create_child_node(self):
# 按照T分割数据
# x < T
# TypeError: list indices must be integers or slices, not NoneType
try:
left_dataset_list = [data for data in self.reformed_labeled_dataset_list if data[1][self.col_index] < self.T]
except TypeError as e:
print(e)
sys.exit(1)
# x >= T
try:
right_dataset_list = [data for data in self.reformed_labeled_dataset_list if data[1][self.col_index] >= self.T]
except TypeError as e:
print(e)
sys.exit(1)
"""
如果分割所得的子集中:1-子集中数据标签种类是否相同;2-子集样本数量;按照这两条标准区分可得一下四种情况,两种结果:
label同异 样本数量>1(>=2) 结果
同(label_num=1) 是(data_amount>1) 叶节点
同 否 叶节点
-------------------------------------------
否(label_num>1) 是 子树的根节点
否 否(data_amount=1) 【不存在这种情况】
"""
left_label_num = len(set([data[0] for data in left_dataset_list]))
if left_label_num == 1:
self.child_left_node = Random_Tree(left_dataset_list, leaf_label=left_dataset_list[0][0]) # an int
elif left_label_num > 1:
checker, selected_label = data_duplicate_checker(left_dataset_list)
if checker:
self.child_left_node = Random_Tree(left_dataset_list)
self.child_left_node.create_child_node()
else:
self.child_left_node = Random_Tree(left_dataset_list, leaf_label=selected_label)
right_label_num = len(set([data[0] for data in right_dataset_list]))
if right_label_num == 1:
self.child_right_node = Random_Tree(right_dataset_list, leaf_label=right_dataset_list[0][0])
elif right_label_num > 1:
# if left_label_num == 0:
# leaf_label_set = set([d[0] for d in right_dataset_list])
# left_leaf_label_list = [i for i in leaf_label_set if i != right_dataset_list[0][0]]
# self.child_right_node = Random_Tree(right_dataset_list, leaf_label =right_dataset_list[0][0])
# self.child_left_node = Random_Tree(left_dataset_list, leaf_label = left_leaf_label_list[0])
# else:
# self.child_right_node = Random_Tree(right_dataset_list)
# if type(self.child_right_node.leaf_label) != int:
# self.child_right_node.create_child_node()
checker, selected_label = data_duplicate_checker(right_dataset_list)
if checker:
self.child_right_node = Random_Tree(right_dataset_list)
self.child_right_node.create_child_node()
else:
self.child_right_node = Random_Tree(right_dataset_list, leaf_label=selected_label)
def classify(self, unlabeled_data_list):
"""
:param
unlabeled_data_list:
[x0, y0, x1, y1, x2, y2, ..., xn-2, yn-2, xn-1, yn-1]
不选下方的数据格式:每次递归调用都要转换一下,浪费时间
[(x0, y0), (x1, y1), (x2, y2), ..., (xn-2, yn-2), (xn-1, yn-1)]
:return:
label
"""
# 判断当前节点是否为叶节点
x = unlabeled_data_list[self.col_index]
# 子树节点+小于阈值
if x < self.T:
# 叶节点
try:
# AttributeError: 'NoneType' object has no attribute 'leaf_label'
if isinstance(self.child_left_node.leaf_label, int):
return self.child_left_node.leaf_label
elif isinstance(self.child_left_node, Random_Tree):
return self.child_left_node.classify(unlabeled_data_list)
except AttributeError as e:
print('Leaf label:',self.leaf_label)
print('child_left_node', self.child_left_node)
print('child_right_node', self.child_right_node)
sys.exit(1)
# 子树节点+大于阈值
elif x >= self.T:
# 叶节点
if isinstance(self.child_right_node.leaf_label, int):
return self.child_right_node.leaf_label
elif isinstance(self.child_right_node, Random_Tree):
return self.child_right_node.classify(unlabeled_data_list)
# ---------------------------- Test of Random decision tree ----------------------------
# if __name__ == '__main__':
# labeled_data_list = [
# # 5 行 1
# [1, [(1,1) for i in range(4)]],
# [1, [(2,2) for i in range(4)]],
# [1, [(3,3) for i in range(4)]],
# [1, [(4,4) for i in range(4)]],
# [1, [(5,5) for i in range(4)]],
# # 6 行 2
# [2, [(6,6) for i in range(4)]],
# [2, [(7,7) for i in range(4)]],
# [2, [(8,8) for i in range(4)]],
# [2, [(9,9) for i in range(4)]],
# [2, [(10,10) for i in range(4)]],
# [2, [(11,11) for i in range(4)]],
# # 7 行 3
# [3, [(12,12) for i in range(4)]],
# [3, [(13,13) for i in range(4)]],
# [3, [(14,14) for i in range(4)]],
# [3, [(15,15) for i in range(4)]],
# [3, [(16,16) for i in range(4)]],
# [3, [(17,17) for i in range(4)]],
# [3, [(18,18) for i in range(4)]],
# ]
# reformed_labeled_data_list = reform_labeled_dataset_list(labeled_data_list)
# rt = Random_Tree(reformed_labeled_data_list)
# rt.create_child_node()
#
# test_unlabeled_data = [6.5, 6.5, 6.5, 6.5, 6.5, 6.5, 6.5, 6.5]
# label = rt.classify(test_unlabeled_data)
# print('label :',label)
# ---------------------------- Test of Random decision tree ----------------------------
| 55.359223
| 3,228
| 0.620484
| 4,299
| 34,212
| 4.697604
| 0.112352
| 0.052835
| 0.013964
| 0.024511
| 0.858133
| 0.826195
| 0.803565
| 0.772964
| 0.75489
| 0.71285
| 0
| 0.238557
| 0.267538
| 34,212
| 618
| 3,229
| 55.359223
| 0.567341
| 0.239507
| 0
| 0.551839
| 0
| 0
| 0.011081
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053512
| false
| 0
| 0.023411
| 0
| 0.153846
| 0.040134
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
28f74f51e608087a26c00af524dffbf2899fb446
| 7,315
|
py
|
Python
|
test/test_knee_registration.py
|
uncbiag/ICON
|
2c34a1e876726cf2de105157675213ffb2f640ba
|
[
"Apache-2.0"
] | 5
|
2022-01-22T16:29:53.000Z
|
2022-03-03T14:36:58.000Z
|
test/test_knee_registration.py
|
uncbiag/ICON
|
2c34a1e876726cf2de105157675213ffb2f640ba
|
[
"Apache-2.0"
] | 7
|
2021-10-13T14:36:35.000Z
|
2022-03-11T07:33:45.000Z
|
test/test_knee_registration.py
|
uncbiag/ICON
|
2c34a1e876726cf2de105157675213ffb2f640ba
|
[
"Apache-2.0"
] | null | null | null |
import unittest
class TestKneeRegistration(unittest.TestCase):
def test_knee_registration(self):
print("OAI ICON")
import icon_registration.pretrained_models
from icon_registration.mermaidlite import compute_warped_image_multiNC
from icon_registration.inverseConsistentNet import flips
import torch
import numpy as np
import subprocess
print("Downloading test data)")
import icon_registration.test_utils
icon_registration.test_utils.download_test_data()
t_ds = torch.load(icon_registration.test_utils.TEST_DATA_DIR / "icon_example_data")
batched_ds = list(zip(*[t_ds[i::2] for i in range(2)]))
net = icon_registration.pretrained_models.OAI_knees_registration_model(
pretrained=True
)
# Run on the four downloaded image pairs
with torch.no_grad():
dices = []
folds_list = []
for x in batched_ds[:]:
# Seperate the image data used for registration from the segmentation used for evaluation,
# and shape it for passing to the network
x = list(zip(*x))
x = [torch.cat(r, 0).cuda().float() for r in x]
fixed_image, fixed_cartilage = x[0], x[2]
moving_image, moving_cartilage = x[1], x[3]
# Run the registration.
# Our network expects batches of two pairs,
# moving_image.size = torch.Size([2, 1, 80, 192, 192])
# fixed_image.size = torch.Size([2, 1, 80, 192, 192])
# intensity normalized to have min 0 and max 1.
net(moving_image, fixed_image)
# Once registration is run, net.phi_AB and net.phi_BA are functions that map
# tensors of coordinates from image B to A and A to B respectively.
# Evaluate the registration
# First, evaluate phi_AB on a tensor of coordinates to get an explicit map.
phi_AB_vectorfield = net.phi_AB(net.identityMap)
fat_phi = torch.nn.Upsample(
size=moving_cartilage.size()[2:], mode="trilinear",
align_corners=False
)(phi_AB_vectorfield[:, :3])
sz = np.array(fat_phi.size())
spacing = 1.0 / (sz[2::] - 1)
# Warp the cartilage of one image to match the other using the explicit map.
warped_moving_cartilage = compute_warped_image_multiNC(
moving_cartilage.float(), fat_phi, spacing, 1
)
# Binarize the segmentations
wmb = warped_moving_cartilage > 0.5
fb = fixed_cartilage > 0.5
# Compute the dice metric
intersection = wmb * fb
dice = (
2
* torch.sum(intersection, [1, 2, 3, 4]).float()
/ (torch.sum(wmb, [1, 2, 3, 4]) + torch.sum(fb, [1, 2, 3, 4]))
)
print("Batch DICE:", dice)
dices.append(dice)
# Compute the folds metric
f = [flips(phi[None]).item() for phi in phi_AB_vectorfield]
print("Batch folds per image:", f)
folds_list.append(f)
mean_dice = torch.mean(torch.cat(dices).cpu())
print("Mean DICE SCORE:", mean_dice)
self.assertTrue(mean_dice.item() > 0.68)
mean_folds = np.mean(folds_list)
print("Mean folds per image:", mean_folds)
self.assertTrue(mean_folds < 300)
def test_knee_registration_gradICON(self):
print("OAI gradICON")
import icon_registration.pretrained_models
from icon_registration.mermaidlite import compute_warped_image_multiNC
from icon_registration.inverseConsistentNet import flips
import torch
import numpy as np
import subprocess
print("Downloading test data)")
import icon_registration.test_utils
icon_registration.test_utils.download_test_data()
t_ds = torch.load(icon_registration.test_utils.TEST_DATA_DIR / "icon_example_data")
batched_ds = list(zip(*[t_ds[i::2] for i in range(2)]))
net = icon_registration.pretrained_models.OAI_knees_gradICON_model(
pretrained=True
)
# Run on the four downloaded image pairs
with torch.no_grad():
dices = []
folds_list = []
for x in batched_ds[:]:
# Seperate the image data used for registration from the segmentation used for evaluation,
# and shape it for passing to the network
x = list(zip(*x))
x = [torch.cat(r, 0).cuda().float() for r in x]
fixed_image, fixed_cartilage = x[0], x[2]
moving_image, moving_cartilage = x[1], x[3]
# Run the registration.
# Our network expects batches of two pairs,
# moving_image.size = torch.Size([2, 1, 80, 192, 192])
# fixed_image.size = torch.Size([2, 1, 80, 192, 192])
# intensity normalized to have min 0 and max 1.
net(moving_image, fixed_image)
# Once registration is run, net.phi_AB and net.phi_BA are functions that map
# tensors of coordinates from image B to A and A to B respectively.
# Evaluate the registration
# First, evaluate phi_AB on a tensor of coordinates to get an explicit map.
phi_AB_vectorfield = net.phi_AB(net.identityMap)
fat_phi = torch.nn.Upsample(
size=moving_cartilage.size()[2:], mode="trilinear",
align_corners=False
)(phi_AB_vectorfield[:, :3])
sz = np.array(fat_phi.size())
spacing = 1.0 / (sz[2::] - 1)
# Warp the cartilage of one image to match the other using the explicit map.
warped_moving_cartilage = compute_warped_image_multiNC(
moving_cartilage.float(), fat_phi, spacing, 1
)
# Binarize the segmentations
wmb = warped_moving_cartilage > 0.5
fb = fixed_cartilage > 0.5
# Compute the dice metric
intersection = wmb * fb
dice = (
2
* torch.sum(intersection, [1, 2, 3, 4]).float()
/ (torch.sum(wmb, [1, 2, 3, 4]) + torch.sum(fb, [1, 2, 3, 4]))
)
print("Batch DICE:", dice)
dices.append(dice)
# Compute the folds metric
f = [flips(phi[None]).item() for phi in phi_AB_vectorfield]
print("Batch folds per image:", f)
folds_list.append(f)
mean_dice = torch.mean(torch.cat(dices).cpu())
print("Mean DICE SCORE:", mean_dice)
self.assertTrue(mean_dice.item() > 0.68)
mean_folds = np.mean(folds_list)
print("Mean folds per image:", mean_folds)
self.assertTrue(mean_folds < 300)
| 41.5625
| 106
| 0.553383
| 873
| 7,315
| 4.47079
| 0.177549
| 0.057392
| 0.030746
| 0.038432
| 0.957725
| 0.957725
| 0.957725
| 0.957725
| 0.957725
| 0.957725
| 0
| 0.025192
| 0.359672
| 7,315
| 175
| 107
| 41.8
| 0.80807
| 0.211757
| 0
| 0.854545
| 0
| 0
| 0.044654
| 0
| 0
| 0
| 0
| 0
| 0.036364
| 1
| 0.018182
| false
| 0
| 0.136364
| 0
| 0.163636
| 0.109091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9074e80e9f88d2ac0d6d73585fc494be14fb590
| 118
|
py
|
Python
|
tensorboard/build_with_tf.py
|
ml7/tensorboard
|
6f3988ecdb3ae719585e6f278d875e381b616783
|
[
"Apache-2.0"
] | null | null | null |
tensorboard/build_with_tf.py
|
ml7/tensorboard
|
6f3988ecdb3ae719585e6f278d875e381b616783
|
[
"Apache-2.0"
] | null | null | null |
tensorboard/build_with_tf.py
|
ml7/tensorboard
|
6f3988ecdb3ae719585e6f278d875e381b616783
|
[
"Apache-2.0"
] | null | null | null |
try:
from tensorboard.tf_disabled import use_tf
except ImportError:
from tensorboard.tf_enabled import use_tf
| 23.6
| 46
| 0.805085
| 17
| 118
| 5.352941
| 0.588235
| 0.32967
| 0.373626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161017
| 118
| 4
| 47
| 29.5
| 0.919192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e9243c6a0ddd241226739ed51088a19c9ca0603a
| 10,594
|
py
|
Python
|
trainingjsonfile.py
|
abhayychoudhary/Dialogflow
|
1fcba155cc2c1ee9dc018e245373ce43b708fe10
|
[
"Apache-2.0"
] | 10
|
2020-05-15T10:15:25.000Z
|
2021-06-11T09:59:38.000Z
|
trainingjsonfile.py
|
abhayychoudhary/Dialogflow
|
1fcba155cc2c1ee9dc018e245373ce43b708fe10
|
[
"Apache-2.0"
] | 3
|
2021-01-27T17:24:55.000Z
|
2021-03-03T09:44:20.000Z
|
trainingjsonfile.py
|
abhayychoudhary/Dialogflow
|
1fcba155cc2c1ee9dc018e245373ce43b708fe10
|
[
"Apache-2.0"
] | 2
|
2021-01-29T09:56:43.000Z
|
2021-06-02T09:41:22.000Z
|
import json
from uuid import uuid4
def userSays(row):
userSays = []
for i in row[12:]:
if(i):
userSays.append({"id": "","data": [{"text": i,"userDefined": "false"}],"isTemplate": "false","count": 0,"lang":row[10] or "en","updated": 0})
return userSays
def noFollowup(row):
def webhook(row):
if(row=="" or row.lower()=="false"):
return False
else:
return True
noFollowup = {
"id": str(uuid4()),
"name": row[0],
"auto": True,
"contexts": [],
"responses": [
{
"resetContexts": "false",
"affectedContexts": [],
"parameters": [],
"messages": [
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
}
],
"defaultResponsePlatforms": {},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "false",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
return noFollowup
def inputContext(row):
def webhook(row):
if(row=="" or row.lower()=="false"):
return False
else:
return True
inputContext = {
"id": "",
"name": row[0],
"auto": "true",
"contexts": [],
"responses": [
{
"resetContexts": "false",
"affectedContexts": [
{
"name": row[3],
"parameters": {},
"lifespan": 2
}
],
"parameters": [],
"messages": [
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
}
],
"defaultResponsePlatforms": {},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "false",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
return inputContext
def outputContext(row):
def webhook(row):
if(row=="" or row.lower()=="false"):
return False
else:
return True
outputContext = {
"id": "",
"name": row[0],
"auto": "true",
"contexts": [
row[4]
],
"responses": [
{
"resetContexts": "false",
"affectedContexts": [],
"parameters": [],
"messages": [
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
}
],
"defaultResponsePlatforms": {},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "false",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
return outputContext
def outputOutputContext(row):
def webhook(row):
if(row=="" or row.lower()=="false"):
return False
else:
return True
def chip(row):
chip = []
for i in row.split("/"):
chip.append({"text": i})
return chip
def chipgoogle(row):
chipgoogle = []
for i in row.split("/"):
chipgoogle.append({"title": i})
return chipgoogle
def outputnewcontext(row):
outputcontextadd = []
for i in row.split("/"):
outputcontextadd.append(
{"name": i.split("=")[0], "parameters": {}, "lifespan": i.split("=")[1]})
return outputcontextadd
def inputnewcontext(row):
inputnewcontext = []
for i in row.split("/"):
inputnewcontext.append(i)
return inputnewcontext
data = chip(row[6])[0].get("text", "")
if data:
outputOutputContext = {
"id": row[7] or "",
"name": row[0],
"auto": "true",
"contexts": inputnewcontext(row[4]),
"responses": [
{
"resetContexts": "false",
"affectedContexts": outputnewcontext(row[3]),
"parameters": [],
"messages": [
{
"type": "suggestion_chips",
"platform": "google",
"lang": row[10] or "en",
"condition": "",
"suggestions": chipgoogle(row[6])
},
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
},
{
"type": 4,
"lang": row[10] or "en",
"condition": "",
"payload": {
"richContent": [
[
{
"type": "chips",
"options": chip(row[6])
}
]
]
}
}
],
"defaultResponsePlatforms": {
"google": "true"
},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "false",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
else:
outputOutputContext = {
"id": row[7] or "",
"name": row[0],
"auto": "true",
"contexts": inputnewcontext(row[4]),
"responses": [
{
"resetContexts": "false",
"affectedContexts": outputnewcontext(row[3]),
"parameters": [],
"messages": [
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
}
],
"defaultResponsePlatforms": {
"google": "true"
},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "false",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
return outputOutputContext
def defaultcontext(row):
def webhook(row):
if(row=="" or row.lower()=="false"):
return False
else:
return True
def chip(row):
chip = []
for i in row.split("/"):
chip.append({"text": i})
return chip
def chipgoogle(row):
chipgoogle = []
for i in row.split("/"):
chipgoogle.append({"title": i})
return chipgoogle
def inputnewcontext(row):
inputnewcontext = []
for i in row.split("/"):
inputnewcontext.append(i)
return inputnewcontext
defaultcontext = {
"id": "",
"parentId": row[7] or "",
"rootParentId": row[7] or "",
"name": row[0],
"auto": "false",
"contexts": inputnewcontext(row[4]),
"responses": [
{
"resetContexts": "false",
"action": "",
"affectedContexts": [],
"parameters": [],
"messages": [
# {
# "type": "suggestion_chips",
# "platform": "google",
# "lang": row[10] or "en",
# "condition": "",
# "suggestions": chipgoogle(row[6])
# },
{
"type": 0,
"lang": row[10] or "en",
"condition": "",
"speech": row[1]
},
# {
# "type": 4,
# "lang": row[10] or "en",
# "condition": "",
# "payload": {
# "richContent": [
# [
# {
# "type": "chips",
# "options": chip(row[6])
# }
# ]
# ]
# }
# }
],
"defaultResponsePlatforms": {
"google": "true"
},
"speech": []
}
],
"priority": 500000,
"webhookUsed": webhook(row[8]),
"webhookForSlotFilling": "false",
"fallbackIntent": "true",
"events": [],
"conditionalResponses": [],
"condition": "",
"conditionalFollowupEvents": []
}
return defaultcontext
| 29.842254
| 157
| 0.342741
| 607
| 10,594
| 5.978583
| 0.140033
| 0.021218
| 0.02728
| 0.033343
| 0.837696
| 0.815652
| 0.810692
| 0.783136
| 0.783136
| 0.783136
| 0
| 0.021611
| 0.519539
| 10,594
| 354
| 158
| 29.926554
| 0.691356
| 0.039834
| 0
| 0.690554
| 0
| 0
| 0.186816
| 0.041383
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058632
| false
| 0
| 0.006515
| 0
| 0.140065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a66194b1fbc9df02670dfcc5a622dc2c01ddd24
| 9,968
|
py
|
Python
|
scripts/hmm_lib_jax.py
|
agupta83/pyprobml
|
f1fc9a26fec8724462970a81712eeac480ad9263
|
[
"MIT"
] | null | null | null |
scripts/hmm_lib_jax.py
|
agupta83/pyprobml
|
f1fc9a26fec8724462970a81712eeac480ad9263
|
[
"MIT"
] | null | null | null |
scripts/hmm_lib_jax.py
|
agupta83/pyprobml
|
f1fc9a26fec8724462970a81712eeac480ad9263
|
[
"MIT"
] | null | null | null |
# Implementation of the Hidden Markov Model for discrete observations with Jax.
# This file is based on https://github.com/probml/pyprobml/blob/master/scripts/hmm_lib.py
# Author: Gerardo Duran-Martin (@gerdm), Aleyna Kara (@karalleyna)
from jax import lax
import jax
import jax.numpy as jnp
class HMMDiscrete:
def __init__(self, A, px, pi):
"""
This class simulates a Hidden Markov Model with
categorical distribution
Parameters
----------
A: array(state_size, state_size)
State transition matrix
px: array(state_size, observation_size)
Matrix of conditional categorical probabilities
of obsering the ith category
pi: array(state_size)
Array of initial-state probabilities
"""
self.A = A
self.px = px
self.pi = pi
self.state_size, self.observation_size = px.shape
def sample(self, n_samples, rng_key):
rng_key, key_x, key_z = jax.random.split(rng_key, 3)
latent_states = jnp.arange(self.state_size)
obs_states = jnp.arange(self.observation_size)
zt = jax.random.choice(key_z, latent_states, p=self.pi)
xt = jax.random.choice(key_x, obs_states, p=self.px[zt])
z_hist = jnp.array([zt])
x_hist = jnp.array([xt])
for t in range(1, n_samples):
rng_key, key_x, key_z = jax.random.split(rng_key, 3)
zt = jax.random.choice(key_z, latent_states, p=self.A[zt])
xt = jax.random.choice(key_x, obs_states, p=self.px[zt])
z_hist = jnp.append(z_hist, jnp.array([zt]))
x_hist = jnp.append(x_hist, jnp.array([xt]))
return z_hist, x_hist
def forwards(self, x_hist):
"""
Calculates a belief state
Parameters
----------
x_hist: array(n_samples)
History of observed states
Returns
-------
* array(n_samples, n_hidden) :
All alpha values found for each sample
* float
The loglikelihood giving log(p(x|model))
"""
n_samples = len(x_hist)
alpha_hist = jnp.zeros((n_samples, self.state_size))
c_elements = jnp.zeros(n_samples)
alpha_n = self.pi * self.px[:, x_hist[0]]
cn = alpha_n.sum()
alpha_n = alpha_n / cn
alpha_hist = jax.ops.index_update(alpha_hist, jax.ops.index[0, :], alpha_n)
c_elements = jax.ops.index_update(c_elements, jax.ops.index[0], cn) # normalization constants
def scan_fn(alpha_with_norm_const, t):
alpha_hist, c_elements = alpha_with_norm_const
alpha_n = self.px[:, x_hist[t]] * (alpha_hist[t - 1, :].reshape((-1, 1)) * self.A).sum(axis=0)
cn = alpha_n.sum()
alpha_n = alpha_n / cn
alpha_hist = jax.ops.index_update(alpha_hist, jax.ops.index[t, : ], alpha_n)
c_elements = jax.ops.index_update(c_elements, jax.ops.index[t], cn)
return (alpha_hist, c_elements), jnp.zeros((0,))
(alpha_hist, c_elements), _ = lax.scan(scan_fn, (alpha_hist, c_elements), jnp.arange(1, n_samples))
return alpha_hist, jnp.sum(jnp.log(c_elements))
def backwards_filtering(self, x_hist):
n_samples = len(x_hist)
beta_next = jnp.ones(self.state_size)
beta_hist = jnp.zeros((n_samples, self.state_size))
beta_hist = jax.ops.index_update(beta_hist, jax.ops.index[-1, :], beta_next)
def scan_fn(beta_hist, t):
beta_next = (beta_hist[-t + 1] * self.px[:, x_hist[-t + 1]] * self.A).sum(axis=1)
beta_hist = jax.ops.index_update(beta_hist, jax.ops.index[-t, :], beta_next / beta_next.sum())
return beta_hist, jnp.zeros((0,))
beta_hist, _ = lax.scan(scan_fn, beta_hist, jnp.arange(2, n_samples + 1))
return beta_hist
def forwards_backwards(self, x_hist, alpha_hist=None, beta_hist=None):
if alpha_hist is None:
alpha_hist, _ = self.forwards(x_hist)
if beta_hist is None:
beta_hist = self.backwards_filtering(x_hist)
gamma = alpha_hist * beta_hist
return gamma / gamma.sum(axis=1).reshape((-1, 1))
def map_state(self, x_hist):
"""
Compute the most probable sequence of states
Parameters
----------
x_hist: array(n_samples)
History of observed states
Returns
-------
* array(n_samples)
Sequence of most MAP probable sequence of states
"""
n_samples = len(x_hist)
wn = jnp.log(self.A) + jnp.log(self.pi) + jnp.log(self.px[:, x_hist[0]])
wn = wn.max(axis=1)
logp_hist = jnp.array(wn)
for t in range(1, n_samples):
wn = jnp.log(self.A) + jnp.log(self.px[:, x_hist[t]]) + wn
wn = wn.max(axis=1)
logp_hist = jnp.vstack((logp_hist, jnp.array(wn)))
return logp_hist.argmax(axis=1)# Implementation of the Hidden Markov Model for discrete observations with Jax.
# This file is based on https://github.com/probml/pyprobml/blob/master/scripts/hmm_lib.py
# Author: Gerardo Duran-Martin (@gerdm), Aleyna Kara (@karalleyna)
from jax import lax
import jax
import jax.numpy as jnp
class HMMDiscrete:
def __init__(self, A, px, pi):
"""
This class simulates a Hidden Markov Model with
categorical distribution
Parameters
----------
A: array(state_size, state_size)
State transition matrix
px: array(state_size, observation_size)
Matrix of conditional categorical probabilities
of obsering the ith category
pi: array(state_size)
Array of initial-state probabilities
"""
self.A = A
self.px = px
self.pi = pi
self.state_size, self.observation_size = px.shape
def sample(self, n_samples, rng_key):
rng_key, key_x, key_z = jax.random.split(rng_key, 3)
latent_states = jnp.arange(self.state_size)
obs_states = jnp.arange(self.observation_size)
zt = jax.random.choice(key_z, latent_states, p=self.pi)
xt = jax.random.choice(key_x, obs_states, p=self.px[zt])
z_hist = jnp.array([zt])
x_hist = jnp.array([xt])
for t in range(1, n_samples):
rng_key, key_x, key_z = jax.random.split(rng_key, 3)
zt = jax.random.choice(key_z, latent_states, p=self.A[zt])
xt = jax.random.choice(key_x, obs_states, p=self.px[zt])
z_hist = jnp.append(z_hist, jnp.array([zt]))
x_hist = jnp.append(x_hist, jnp.array([xt]))
return z_hist, x_hist
def forwards(self, x_hist):
"""
Calculates a belief state
Parameters
----------
x_hist: array(n_samples)
History of observed states
Returns
-------
* array(n_samples, n_hidden) :
All alpha values found for each sample
* float
The loglikelihood giving log(p(x|model))
"""
n_samples = len(x_hist)
alpha_hist = jnp.zeros((n_samples, self.state_size))
c_elements = jnp.zeros(n_samples)
alpha_n = self.pi * self.px[:, x_hist[0]]
cn = alpha_n.sum()
alpha_n = alpha_n / cn
alpha_hist = jax.ops.index_update(alpha_hist, jax.ops.index[0, :], alpha_n)
c_elements = jax.ops.index_update(c_elements, jax.ops.index[0], cn) # normalization constants
def scan_fn(alpha_with_norm_const, t):
alpha_hist, c_elements = alpha_with_norm_const
alpha_n = self.px[:, x_hist[t]] * (alpha_hist[t - 1, :].reshape((-1, 1)) * self.A).sum(axis=0)
cn = alpha_n.sum()
alpha_n = alpha_n / cn
alpha_hist = jax.ops.index_update(alpha_hist, jax.ops.index[t, : ], alpha_n)
c_elements = jax.ops.index_update(c_elements, jax.ops.index[t], cn)
return (alpha_hist, c_elements), jnp.zeros((0,))
(alpha_hist, c_elements), _ = lax.scan(scan_fn, (alpha_hist, c_elements), jnp.arange(1, n_samples))
return alpha_hist, jnp.sum(jnp.log(c_elements))
def backwards_filtering(self, x_hist):
n_samples = len(x_hist)
beta_next = jnp.ones(self.state_size)
beta_hist = jnp.zeros((n_samples, self.state_size))
beta_hist = jax.ops.index_update(beta_hist, jax.ops.index[-1, :], beta_next)
def scan_fn(beta_hist, t):
beta_next = (beta_hist[-t + 1] * self.px[:, x_hist[-t + 1]] * self.A).sum(axis=1)
beta_hist = jax.ops.index_update(beta_hist, jax.ops.index[-t, :], beta_next / beta_next.sum())
return beta_hist, jnp.zeros((0,))
beta_hist, _ = lax.scan(scan_fn, beta_hist, jnp.arange(2, n_samples + 1))
return beta_hist
def forwards_backwards(self, x_hist, alpha_hist=None, beta_hist=None):
if alpha_hist is None:
alpha_hist, _ = self.forwards(x_hist)
if beta_hist is None:
beta_hist = self.backwards_filtering(x_hist)
gamma = alpha_hist * beta_hist
return gamma / gamma.sum(axis=1).reshape((-1, 1))
def map_state(self, x_hist):
"""
Compute the most probable sequence of states
Parameters
----------
x_hist: array(n_samples)
History of observed states
Returns
-------
* array(n_samples)
Sequence of most MAP probable sequence of states
"""
n_samples = len(x_hist)
wn = jnp.log(self.A) + jnp.log(self.pi) + jnp.log(self.px[:, x_hist[0]])
wn = wn.max(axis=1)
logp_hist = jnp.array(wn)
for t in range(1, n_samples):
wn = jnp.log(self.A) + jnp.log(self.px[:, x_hist[t]]) + wn
wn = wn.max(axis=1)
logp_hist = jnp.vstack((logp_hist, jnp.array(wn)))
return logp_hist.argmax(axis=1)
| 36.782288
| 118
| 0.597111
| 1,438
| 9,968
| 3.919332
| 0.099444
| 0.035486
| 0.046842
| 0.042583
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.007521
| 0.279695
| 9,968
| 271
| 119
| 36.782288
| 0.777437
| 0.212681
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112676
| false
| 0
| 0.042254
| 0
| 0.267606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c91003edf2a6d8f761e3bb668245389c2fd60e93
| 2,046
|
py
|
Python
|
cpcctool/cpcc_code_docx.py
|
l2m2/cpcc-tool
|
48404e1c228f06edfde697069641d722823955f3
|
[
"MIT"
] | 1
|
2021-06-15T10:16:01.000Z
|
2021-06-15T10:16:01.000Z
|
cpcctool/cpcc_code_docx.py
|
l2m2/cpcc-tool
|
48404e1c228f06edfde697069641d722823955f3
|
[
"MIT"
] | null | null | null |
cpcctool/cpcc_code_docx.py
|
l2m2/cpcc-tool
|
48404e1c228f06edfde697069641d722823955f3
|
[
"MIT"
] | null | null | null |
'''
@File: cpcc_code_docx.py
@Description: Generate source code word document
@Author: leon.li(l2m2lq@gmail.com)
@Date: 2019-12-17 15:29:51
'''
import os
import uuid
import tempfile
import win32com.client as win32
from .source_tie import tie
from .txt2docx import txt2docx
def docx_first_n_pages(docx_file, dst_file, n):
app = win32.DispatchEx("Word.Application")
app.Visible = 0
app.DisplayAlerts = 0
app.Documents.Open(docx_file)
try:
doc = app.ActiveDocument
doc.Repaginate()
page_count = doc.ComputeStatistics(2)
app.Selection.GoTo(1, 1, n)
r = doc.Bookmarks("\\Page").Range
app.Selection.GoTo(1, 1, page_count)
r.End = doc.Bookmarks("\\Page").Range.End
r.Delete()
doc.SaveAs(dst_file, 16)
doc.Close(SaveChanges=0)
finally:
app.Quit()
def docx_last_n_pages(docx_file, dst_file, n):
app = win32.DispatchEx("Word.Application")
app.Visible = 0
app.DisplayAlerts = 0
app.Documents.Open(docx_file)
try:
doc = app.ActiveDocument
doc.Repaginate()
page_count = doc.ComputeStatistics(2)
app.Selection.GoTo(1, 1, 1)
r = doc.Bookmarks("\\Page").Range
app.Selection.GoTo(1, 1, page_count - n)
r.End = doc.Bookmarks("\\Page").Range.End
r.Delete()
doc.SaveAs(dst_file, 16)
doc.Close(SaveChanges=0)
finally:
app.Quit()
def docx_sandwich(docx_file, dst_file, first_n, last_n):
app = win32.DispatchEx("Word.Application")
app.Visible = 0
app.DisplayAlerts = 0
app.Documents.Open(docx_file)
try:
doc = app.ActiveDocument
doc.Repaginate()
page_count = doc.ComputeStatistics(2)
app.Selection.GoTo(1, 1, first_n + 1)
r = doc.Bookmarks("\\Page").Range
app.Selection.GoTo(1, 1, page_count - last_n)
r.End = doc.Bookmarks("\\Page").Range.End
r.Delete()
doc.SaveAs(dst_file, 16)
doc.Close(SaveChanges=0)
finally:
app.Quit()
def gen_code_docx(src_dirs, dst_file):
tmp_txt_file = tempfile.gettempdir() + os.sep + str(uuid.uuid4())
tie(src_dirs, tmp_txt_file)
txt2docx(tmp_txt_file, dst_file)
| 27.28
| 67
| 0.68915
| 308
| 2,046
| 4.435065
| 0.25974
| 0.040996
| 0.070278
| 0.074671
| 0.718155
| 0.718155
| 0.718155
| 0.718155
| 0.718155
| 0.718155
| 0
| 0.0366
| 0.172043
| 2,046
| 75
| 68
| 27.28
| 0.769776
| 0.065982
| 0
| 0.703125
| 1
| 0
| 0.044118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.09375
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9291c85e129b9fac101acac404ae725bf939532
| 150
|
py
|
Python
|
misc_module/welcomes/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | 1
|
2021-12-12T02:50:20.000Z
|
2021-12-12T02:50:20.000Z
|
misc_module/welcomes/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | 17
|
2020-02-07T23:40:36.000Z
|
2020-12-22T16:38:44.000Z
|
misc_module/welcomes/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | null | null | null |
# from misc_module.welcomes.commands.welcome_control import welcome_control
from misc_module.welcomes.commands.welcome_on_join import welcome_on_join
| 50
| 75
| 0.893333
| 22
| 150
| 5.727273
| 0.454545
| 0.126984
| 0.222222
| 0.349206
| 0.587302
| 0.587302
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 150
| 2
| 76
| 75
| 0.893617
| 0.486667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c93ee75f3e07ed6bc2aa8d8e2e9d8c15c9c1a478
| 58,776
|
py
|
Python
|
test/data.py
|
kaushikacharya/PyStanfordDependencies
|
43d8f38a19e40087f273330087918c87df6d4d8f
|
[
"Apache-2.0"
] | 69
|
2015-01-04T02:15:10.000Z
|
2021-09-04T04:16:55.000Z
|
test/data.py
|
kaushikacharya/PyStanfordDependencies
|
43d8f38a19e40087f273330087918c87df6d4d8f
|
[
"Apache-2.0"
] | 27
|
2015-01-08T03:38:18.000Z
|
2020-12-21T13:57:24.000Z
|
test/data.py
|
kaushikacharya/PyStanfordDependencies
|
43d8f38a19e40087f273330087918c87df6d4d8f
|
[
"Apache-2.0"
] | 19
|
2015-07-05T11:12:20.000Z
|
2020-07-11T16:54:20.000Z
|
# this file contains all the string data (inputs and outputs) for tests
# the SD trees were originally produced on SD 3.4.1 but they work up
# to (at least) SD 3.5.2. the UD trees were produced using UD 3.5.2.
# tests now require SD/UD 3.5.2 (and thus Java 1.8). downside of this
# is that we can't test JPype on older versions of SD since it can only
# be (safely) initialized once.
class trees_sd:
tree1 = '(S1 (NP (DT a) (NN cow)))'
tree1_out = '''
Token(index=1, form='a', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cow', cpos='NN', pos='NN', head=0, deprel='root')
'''.strip()
tree2 = '(S1 (NP (NP (NP (DT A) (NN cat)) (CC and) (NP (DT a) ' \
'(NN mouse))) (. .)))'
tree2_out_basic = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cat', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='a', cpos='DT', pos='DT', head=5, deprel='det')
Token(index=5, form='mouse', cpos='NN', pos='NN', head=2, deprel='conj')
Token(index=6, form='.', cpos='.', pos='.', head=2, deprel='punct')'''.strip()
tree2_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cat', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='a', cpos='DT', pos='DT', head=5, deprel='det')
Token(index=5, form='mouse', cpos='NN', pos='NN', head=2, deprel='conj_and')
Token(index=6, form='.', cpos='.', pos='.', head=2, deprel='punct')'''.strip()
tree2_out_CCprocessed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cat', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='a', cpos='DT', pos='DT', head=5, deprel='det')
Token(index=5, form='mouse', cpos='NN', pos='NN', head=2, deprel='conj_and')
Token(index=6, form='.', cpos='.', pos='.', head=2, deprel='punct')'''.strip()
tree2_out_collapsedTree = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cat', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='a', cpos='DT', pos='DT', head=5, deprel='det')
Token(index=5, form='mouse', cpos='NN', pos='NN', head=2, deprel='conj_and')
Token(index=6, form='.', cpos='.', pos='.', head=2, deprel='punct')'''.strip()
tree3 = '(S1 (NP (DT some) (JJ blue) (NN moose)))'
tree3_out = '''
Token(index=1, form='some', cpos='DT', pos='DT', head=3, deprel='det')
Token(index=2, form='blue', cpos='JJ', pos='JJ', head=3, deprel='amod')
Token(index=3, form='moose', cpos='NN', pos='NN', head=0, deprel='root')
'''.strip()
tree4 = '(S1 (NP (NP (DT A) (NN burrito)) (PP (IN with) (NP (NP ' + \
'(NNS beans)) (CONJP (CC but) (RB not)) (NP (NN chicken)))) (. .)))'
tree4_out_basic = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='with', cpos='IN', pos='IN', head=2, deprel='prep')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=3, deprel='pobj')
Token(index=5, form='but', cpos='CC', pos='CC', head=6, deprel='cc')
Token(index=6, form='not', cpos='RB', pos='RB', head=4, deprel='cc')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='prep_with')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj_negcc')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_CCprocessed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='prep_with')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=2, deprel='prep_with')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj_negcc')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_collapsedTree = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='prep_with')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj_negcc')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5 = '''
(S1 (S (NP (NNP Ed))
(VP (VBZ cooks)
(CC and)
(VBZ sells)
(NP (NP (NNS burritos))
(PP (IN with)
(NP (NNS beans) (CONJP (CC but) (RB not)) (NN rice)))))
(. .)))
'''.strip()
tree5_out_basic = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=5, deprel='prep')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=6, deprel='pobj')
Token(index=8, form='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsed = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_CCprocessed = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=4, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=5, deprel='prep_with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsedTree = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsedTree_no_punct = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
'''.strip()
tree5_out_collapsedTree_erased = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=0, deprel='erased')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=0, deprel='erased')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=8, form='but', cpos='CC', pos='CC', head=0, deprel='erased')
Token(index=9, form='not', cpos='RB', pos='RB', head=0, deprel='erased')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsedTree_erased_no_punct = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=0, deprel='erased')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj_and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=0, deprel='erased')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='prep_with')
Token(index=8, form='but', cpos='CC', pos='CC', head=0, deprel='erased')
Token(index=9, form='not', cpos='RB', pos='RB', head=0, deprel='erased')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj_negcc')
'''.strip()
tree5_out_basic_lemmas = '''
Token(index=1, form='Ed', lemma='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', lemma='cook', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', lemma='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', lemma='sell', cpos='VBZ', pos='VBZ', head=2, deprel='conj')
Token(index=5, form='burritos', lemma='burrito', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', lemma='with', cpos='IN', pos='IN', head=5, deprel='prep')
Token(index=7, form='beans', lemma='bean', cpos='NNS', pos='NNS', head=6, deprel='pobj')
Token(index=8, form='but', lemma='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', lemma='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', lemma='rice', cpos='NN', pos='NN', head=7, deprel='conj')
Token(index=11, form='.', lemma='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
# tests -NONE- handling
tree6 = '''
( (S
(S-TPC-1
(NP-SBJ (PRP He) )
(ADVP (RB also) )
(VP (VBZ is)
(NP-PRD (DT a) (NN consensus) (NN manager) )))
(, ,)
(NP-SBJ (NNS insiders) )
(VP (VBP say)
(SBAR (-NONE- 0)
(S (-NONE- *T*-1) )))
(. .) ))
'''
tree6_out = '''
Token(index=1, form='He', cpos='PRP', pos='PRP', head=6, deprel='nsubj')
Token(index=2, form='also', cpos='RB', pos='RB', head=6, deprel='advmod')
Token(index=3, form='is', cpos='VBZ', pos='VBZ', head=6, deprel='cop')
Token(index=4, form='a', cpos='DT', pos='DT', head=6, deprel='det')
Token(index=5, form='consensus', cpos='NN', pos='NN', head=6, deprel='nn')
Token(index=6, form='manager', cpos='NN', pos='NN', head=9, deprel='ccomp')
Token(index=7, form=',', cpos=',', pos=',', head=9, deprel='punct')
Token(index=8, form='insiders', cpos='NNS', pos='NNS', head=9, deprel='nsubj')
Token(index=9, form='say', cpos='VBP', pos='VBP', head=0, deprel='root')
Token(index=10, form='.', cpos='.', pos='.', head=9, deprel='punct')
'''.strip()
# tests weird \/ handling
tree7 = '''(S1 (NP
(NP (NNP PRIME) (NNP RATE) )
(: :)
(NP (CD 10) (CD 1\/2) (NN %) )
(. .) ))'''
tree7_out = '''
Token(index=1, form='PRIME', cpos='NNP', pos='NNP', head=2, deprel='nn')
Token(index=2, form='RATE', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=3, form=':', cpos=':', pos=':', head=2, deprel='punct')
Token(index=4, form='10', cpos='CD', pos='CD', head=6, deprel='num')
Token(index=5, form='1/2', cpos='CD', pos='CD', head=6, deprel='num')
Token(index=6, form='%', cpos='NN', pos='NN', head=2, deprel='dep')
Token(index=7, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree8 = '''
(ROOT (S (NP (NNS Visitors)) (VP (MD can) (VP (VB reach) (NP (PRP it)) (ADVP (RB only)) (PP (PP (IN under) (NP (JJ strict) (JJ military) (NN escort))) (CC and) (PP (IN with) (NP (NP (JJ prior) (NN permission)) (PP (IN from) (NP (DT the) (NNP Pentagon)))))) (, ,) (PP (IN aboard) (NP (NP (JJ special) (JJ small) (NN shuttle) (NNS flights)) (SBAR (WHNP (WDT that)) (S (VP (VBP reach) (NP (DT the) (NN base)) (PP (IN by) (NP (NP (DT a) (JJ circuitous) (NN flight)) (PP (IN from) (NP (DT the) (NNP United) (NNPS States)))))))))))) (. .)))
'''
tree8_out = '''
Token(index=1, form='Visitors', cpos='NNS', pos='NNS', head=3, deprel='nsubj')
Token(index=2, form='can', cpos='MD', pos='MD', head=3, deprel='aux')
Token(index=3, form='reach', cpos='VB', pos='VB', head=0, deprel='root')
Token(index=4, form='it', cpos='PRP', pos='PRP', head=3, deprel='dobj')
Token(index=5, form='only', cpos='RB', pos='RB', head=3, deprel='advmod')
Token(index=6, form='under', cpos='IN', pos='IN', head=3, deprel='prep')
Token(index=7, form='strict', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=8, form='military', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=9, form='escort', cpos='NN', pos='NN', head=6, deprel='pobj')
Token(index=10, form='and', cpos='CC', pos='CC', head=6, deprel='cc')
Token(index=11, form='with', cpos='IN', pos='IN', head=6, deprel='conj')
Token(index=12, form='prior', cpos='JJ', pos='JJ', head=13, deprel='amod')
Token(index=13, form='permission', cpos='NN', pos='NN', head=11, deprel='pobj')
Token(index=14, form='from', cpos='IN', pos='IN', head=13, deprel='prep')
Token(index=15, form='the', cpos='DT', pos='DT', head=16, deprel='det')
Token(index=16, form='Pentagon', cpos='NNP', pos='NNP', head=14, deprel='pobj')
Token(index=17, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=18, form='aboard', cpos='IN', pos='IN', head=3, deprel='prep')
Token(index=19, form='special', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=20, form='small', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=21, form='shuttle', cpos='NN', pos='NN', head=22, deprel='nn')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=18, deprel='pobj')
Token(index=23, form='that', cpos='WDT', pos='WDT', head=24, deprel='nsubj')
Token(index=24, form='reach', cpos='VBP', pos='VBP', head=22, deprel='rcmod')
Token(index=25, form='the', cpos='DT', pos='DT', head=26, deprel='det')
Token(index=26, form='base', cpos='NN', pos='NN', head=24, deprel='dobj')
Token(index=27, form='by', cpos='IN', pos='IN', head=24, deprel='prep')
Token(index=28, form='a', cpos='DT', pos='DT', head=30, deprel='det')
Token(index=29, form='circuitous', cpos='JJ', pos='JJ', head=30, deprel='amod')
Token(index=30, form='flight', cpos='NN', pos='NN', head=27, deprel='pobj')
Token(index=31, form='from', cpos='IN', pos='IN', head=30, deprel='prep')
Token(index=32, form='the', cpos='DT', pos='DT', head=34, deprel='det')
Token(index=33, form='United', cpos='NNP', pos='NNP', head=34, deprel='nn')
Token(index=34, form='States', cpos='NNPS', pos='NNPS', head=31, deprel='pobj')
Token(index=35, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree8_out_collapsed = '''
Token(index=1, form='Visitors', cpos='NNS', pos='NNS', head=3, deprel='nsubj')
Token(index=2, form='can', cpos='MD', pos='MD', head=3, deprel='aux')
Token(index=3, form='reach', cpos='VB', pos='VB', head=0, deprel='root')
Token(index=3, form='reach', cpos='VB', pos='VB', head=3, deprel='conj_and', extra={'dep_is_copy': 1})
Token(index=4, form='it', cpos='PRP', pos='PRP', head=3, deprel='dobj')
Token(index=5, form='only', cpos='RB', pos='RB', head=3, deprel='advmod')
Token(index=7, form='strict', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=8, form='military', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=9, form='escort', cpos='NN', pos='NN', head=3, deprel='prep_under')
Token(index=12, form='prior', cpos='JJ', pos='JJ', head=13, deprel='amod')
Token(index=13, form='permission', cpos='NN', pos='NN', head=3, deprel='prep_with', extra={'gov_is_copy': 1})
Token(index=15, form='the', cpos='DT', pos='DT', head=16, deprel='det')
Token(index=16, form='Pentagon', cpos='NNP', pos='NNP', head=13, deprel='prep_from')
Token(index=17, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=19, form='special', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=20, form='small', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=21, form='shuttle', cpos='NN', pos='NN', head=22, deprel='nn')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=3, deprel='prep_aboard')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=24, deprel='nsubj')
Token(index=24, form='reach', cpos='VBP', pos='VBP', head=22, deprel='rcmod')
Token(index=25, form='the', cpos='DT', pos='DT', head=26, deprel='det')
Token(index=26, form='base', cpos='NN', pos='NN', head=24, deprel='dobj')
Token(index=28, form='a', cpos='DT', pos='DT', head=30, deprel='det')
Token(index=29, form='circuitous', cpos='JJ', pos='JJ', head=30, deprel='amod')
Token(index=30, form='flight', cpos='NN', pos='NN', head=24, deprel='prep_by')
Token(index=32, form='the', cpos='DT', pos='DT', head=34, deprel='det')
Token(index=33, form='United', cpos='NNP', pos='NNP', head=34, deprel='nn')
Token(index=34, form='States', cpos='NNPS', pos='NNPS', head=30, deprel='prep_from')
Token(index=35, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree9 = '''(ROOT (S (NP (NP (DT A) (NN total)) (PP (IN of) (NP (NP
(QP (CD 17) (CD million)) (JJ metric) (NNS tons)) (PP (IN of) (NP
(NNS potatoes)))))) (VP (VBD was) (VP (VBN produced) (, ,) (SBAR (WHNP
(WDT which)) (S (VP (VBD was) (ADJP (NP (CD 14) (NN %)) (JJR less)
(PP (PP (IN than) (NP (NP (NP (JJ last) (NN year)) (PRN (-LRB- -LRB-)
(NP (NP (CD 106) (NNS quintals)) (PP (IN per) (NP (NN hectare))))
(-RRB- -RRB-))) (, ,) (CC and) (NP (NP (QP (CD 5.4) (CD million))
(JJ metric) (NNS tons)) (PP (IN of) (NP (NNS vegetables)))))) (, ,)
(CC or) (ADVP (NP (CD 2.2) (NN %)) (RBR more)) (PP (IN than) (PP (IN
on) (NP (DT the) (JJ same) (NN date)) (NP (JJ last) (NN year))))))
(PRN (-LRB- -LRB-) (NP (NP (JJ 116) (NNS quintals)) (PP (IN per)
(NP (NN hectare)))) (-RRB- -RRB-))))))) (. .)))'''
tree9_out = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='total', cpos='NN', pos='NN', head=11, deprel='nsubjpass')
Token(index=3, form='of', cpos='IN', pos='IN', head=2, deprel='prep')
Token(index=4, form='17', cpos='CD', pos='CD', head=5, deprel='number')
Token(index=5, form='million', cpos='CD', pos='CD', head=7, deprel='num')
Token(index=6, form='metric', cpos='JJ', pos='JJ', head=7, deprel='amod')
Token(index=7, form='tons', cpos='NNS', pos='NNS', head=3, deprel='pobj')
Token(index=8, form='of', cpos='IN', pos='IN', head=7, deprel='prep')
Token(index=9, form='potatoes', cpos='NNS', pos='NNS', head=8, deprel='pobj')
Token(index=10, form='was', cpos='VBD', pos='VBD', head=11, deprel='auxpass')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=0, deprel='root')
Token(index=12, form=',', cpos=',', pos=',', head=11, deprel='punct')
Token(index=13, form='which', cpos='WDT', pos='WDT', head=17, deprel='nsubj')
Token(index=14, form='was', cpos='VBD', pos='VBD', head=17, deprel='cop')
Token(index=15, form='14', cpos='CD', pos='CD', head=16, deprel='num')
Token(index=16, form='%', cpos='NN', pos='NN', head=17, deprel='npadvmod')
Token(index=17, form='less', cpos='JJR', pos='JJR', head=11, deprel='ccomp')
Token(index=18, form='than', cpos='IN', pos='IN', head=17, deprel='prep')
Token(index=19, form='last', cpos='JJ', pos='JJ', head=20, deprel='amod')
Token(index=20, form='year', cpos='NN', pos='NN', head=18, deprel='pobj')
Token(index=21, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=23, deprel='punct')
Token(index=22, form='106', cpos='CD', pos='CD', head=23, deprel='num')
Token(index=23, form='quintals', cpos='NNS', pos='NNS', head=20, deprel='dep')
Token(index=24, form='per', cpos='IN', pos='IN', head=23, deprel='prep')
Token(index=25, form='hectare', cpos='NN', pos='NN', head=24, deprel='pobj')
Token(index=26, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=23, deprel='punct')
Token(index=27, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=28, form='and', cpos='CC', pos='CC', head=20, deprel='cc')
Token(index=29, form='5.4', cpos='CD', pos='CD', head=30, deprel='number')
Token(index=30, form='million', cpos='CD', pos='CD', head=32, deprel='num')
Token(index=31, form='metric', cpos='JJ', pos='JJ', head=32, deprel='amod')
Token(index=32, form='tons', cpos='NNS', pos='NNS', head=20, deprel='conj')
Token(index=33, form='of', cpos='IN', pos='IN', head=32, deprel='prep')
Token(index=34, form='vegetables', cpos='NNS', pos='NNS', head=33, deprel='pobj')
Token(index=35, form=',', cpos=',', pos=',', head=18, deprel='punct')
Token(index=36, form='or', cpos='CC', pos='CC', head=18, deprel='cc')
Token(index=37, form='2.2', cpos='CD', pos='CD', head=38, deprel='num')
Token(index=38, form='%', cpos='NN', pos='NN', head=39, deprel='npadvmod')
Token(index=39, form='more', cpos='RBR', pos='RBR', head=18, deprel='conj')
Token(index=40, form='than', cpos='IN', pos='IN', head=18, deprel='conj')
Token(index=41, form='on', cpos='IN', pos='IN', head=40, deprel='pcomp')
Token(index=42, form='the', cpos='DT', pos='DT', head=44, deprel='det')
Token(index=43, form='same', cpos='JJ', pos='JJ', head=44, deprel='amod')
Token(index=44, form='date', cpos='NN', pos='NN', head=41, deprel='pobj')
Token(index=45, form='last', cpos='JJ', pos='JJ', head=46, deprel='amod')
Token(index=46, form='year', cpos='NN', pos='NN', head=41, deprel='tmod')
Token(index=47, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=49, deprel='punct')
Token(index=48, form='116', cpos='JJ', pos='JJ', head=49, deprel='amod')
Token(index=49, form='quintals', cpos='NNS', pos='NNS', head=17, deprel='dep')
Token(index=50, form='per', cpos='IN', pos='IN', head=49, deprel='prep')
Token(index=51, form='hectare', cpos='NN', pos='NN', head=50, deprel='pobj')
Token(index=52, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=49, deprel='punct')
Token(index=53, form='.', cpos='.', pos='.', head=11, deprel='punct')
'''.strip()
tree9_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='total', cpos='NN', pos='NN', head=11, deprel='nsubjpass')
Token(index=4, form='17', cpos='CD', pos='CD', head=5, deprel='number')
Token(index=5, form='million', cpos='CD', pos='CD', head=7, deprel='num')
Token(index=6, form='metric', cpos='JJ', pos='JJ', head=7, deprel='amod')
Token(index=7, form='tons', cpos='NNS', pos='NNS', head=2, deprel='prep_of')
Token(index=9, form='potatoes', cpos='NNS', pos='NNS', head=7, deprel='prep_of')
Token(index=10, form='was', cpos='VBD', pos='VBD', head=11, deprel='auxpass')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=0, deprel='root')
Token(index=12, form=',', cpos=',', pos=',', head=11, deprel='punct')
Token(index=13, form='which', cpos='WDT', pos='WDT', head=17, deprel='nsubj')
Token(index=14, form='was', cpos='VBD', pos='VBD', head=17, deprel='cop')
Token(index=15, form='14', cpos='CD', pos='CD', head=16, deprel='num')
Token(index=16, form='%', cpos='NN', pos='NN', head=17, deprel='npadvmod')
Token(index=17, form='less', cpos='JJR', pos='JJR', head=11, deprel='ccomp')
Token(index=19, form='last', cpos='JJ', pos='JJ', head=20, deprel='amod')
Token(index=20, form='year', cpos='NN', pos='NN', head=17, deprel='prep_than')
Token(index=21, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=23, deprel='punct')
Token(index=22, form='106', cpos='CD', pos='CD', head=23, deprel='num')
Token(index=23, form='quintals', cpos='NNS', pos='NNS', head=20, deprel='dep')
Token(index=25, form='hectare', cpos='NN', pos='NN', head=23, deprel='prep_per')
Token(index=26, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=23, deprel='punct')
Token(index=27, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=29, form='5.4', cpos='CD', pos='CD', head=30, deprel='number')
Token(index=30, form='million', cpos='CD', pos='CD', head=32, deprel='num')
Token(index=31, form='metric', cpos='JJ', pos='JJ', head=32, deprel='amod')
Token(index=32, form='tons', cpos='NNS', pos='NNS', head=20, deprel='conj_and')
Token(index=34, form='vegetables', cpos='NNS', pos='NNS', head=32, deprel='prep_of')
Token(index=35, form=',', cpos=',', pos=',', head=17, deprel='punct')
Token(index=37, form='2.2', cpos='CD', pos='CD', head=38, deprel='num')
Token(index=38, form='%', cpos='NN', pos='NN', head=39, deprel='npadvmod')
Token(index=39, form='more', cpos='RBR', pos='RBR', head=17, deprel='conj')
Token(index=41, form='on', cpos='IN', pos='IN', head=17, deprel='pcomp')
Token(index=42, form='the', cpos='DT', pos='DT', head=44, deprel='det')
Token(index=43, form='same', cpos='JJ', pos='JJ', head=44, deprel='amod')
Token(index=44, form='date', cpos='NN', pos='NN', head=41, deprel='pobj')
Token(index=45, form='last', cpos='JJ', pos='JJ', head=46, deprel='amod')
Token(index=46, form='year', cpos='NN', pos='NN', head=41, deprel='tmod')
Token(index=47, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=49, deprel='punct')
Token(index=48, form='116', cpos='JJ', pos='JJ', head=49, deprel='amod')
Token(index=49, form='quintals', cpos='NNS', pos='NNS', head=17, deprel='dep')
Token(index=51, form='hectare', cpos='NN', pos='NN', head=49, deprel='prep_per')
Token(index=52, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=49, deprel='punct')
Token(index=53, form='.', cpos='.', pos='.', head=11, deprel='punct')
'''.strip()
tree10 = r'''
(ROOT (NP (NP (NNP Hanoi) (, ,) (NNP May) (CD 13)) (PRN (-LRB- -LRB-) (NP (NNP VNA)) (-RRB- -RRB-)) (: --) (NP (NP (NNP Vietnam)) (SBAR (S (VP (VBZ has) (VP (VBN produced) (NP (NP (DT a) (NN variety)) (PP (IN of) (NP (NNS drugs)))) (S (VP (TO to) (VP (VB control) (NP (NNS HIV\/AIDS)) (PP (IN in) (NP (NP (NNS patients)) (VP (VBG suffering) (PP (IN with) (NP (DT the) (NN disease)))))))))))))) (. .)))
'''.strip()
tree10_out = '''
Token(index=1, form='Hanoi', cpos='NNP', pos='NNP', head=3, deprel='nn')
Token(index=2, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=3, form='May', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=4, form='13', cpos='CD', pos='CD', head=3, deprel='num')
Token(index=5, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=6, deprel='punct')
Token(index=6, form='VNA', cpos='NNP', pos='NNP', head=3, deprel='appos')
Token(index=7, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=6, deprel='punct')
Token(index=8, form='--', cpos=':', pos=':', head=3, deprel='punct')
Token(index=9, form='Vietnam', cpos='NNP', pos='NNP', head=3, deprel='dep')
Token(index=10, form='has', cpos='VBZ', pos='VBZ', head=11, deprel='aux')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=9, deprel='rcmod')
Token(index=12, form='a', cpos='DT', pos='DT', head=13, deprel='det')
Token(index=13, form='variety', cpos='NN', pos='NN', head=11, deprel='dobj')
Token(index=14, form='of', cpos='IN', pos='IN', head=13, deprel='prep')
Token(index=15, form='drugs', cpos='NNS', pos='NNS', head=14, deprel='pobj')
Token(index=16, form='to', cpos='TO', pos='TO', head=17, deprel='aux')
Token(index=17, form='control', cpos='VB', pos='VB', head=11, deprel='vmod')
Token(index=18, form='HIV/AIDS', cpos='NNS', pos='NNS', head=17, deprel='dobj')
Token(index=19, form='in', cpos='IN', pos='IN', head=17, deprel='prep')
Token(index=20, form='patients', cpos='NNS', pos='NNS', head=19, deprel='pobj')
Token(index=21, form='suffering', cpos='VBG', pos='VBG', head=20, deprel='vmod')
Token(index=22, form='with', cpos='IN', pos='IN', head=21, deprel='prep')
Token(index=23, form='the', cpos='DT', pos='DT', head=24, deprel='det')
Token(index=24, form='disease', cpos='NN', pos='NN', head=22, deprel='pobj')
Token(index=25, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree10_out_collapsed = '''
Token(index=1, form='Hanoi', cpos='NNP', pos='NNP', head=3, deprel='nn')
Token(index=2, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=3, form='May', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=4, form='13', cpos='CD', pos='CD', head=3, deprel='num')
Token(index=5, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=6, deprel='punct')
Token(index=6, form='VNA', cpos='NNP', pos='NNP', head=3, deprel='appos')
Token(index=7, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=6, deprel='punct')
Token(index=8, form='--', cpos=':', pos=':', head=3, deprel='punct')
Token(index=9, form='Vietnam', cpos='NNP', pos='NNP', head=3, deprel='dep')
Token(index=10, form='has', cpos='VBZ', pos='VBZ', head=11, deprel='aux')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=9, deprel='rcmod')
Token(index=12, form='a', cpos='DT', pos='DT', head=13, deprel='det')
Token(index=13, form='variety', cpos='NN', pos='NN', head=11, deprel='dobj')
Token(index=15, form='drugs', cpos='NNS', pos='NNS', head=13, deprel='prep_of')
Token(index=16, form='to', cpos='TO', pos='TO', head=17, deprel='aux')
Token(index=17, form='control', cpos='VB', pos='VB', head=11, deprel='vmod')
Token(index=18, form='HIV/AIDS', cpos='NNS', pos='NNS', head=17, deprel='dobj')
Token(index=20, form='patients', cpos='NNS', pos='NNS', head=17, deprel='prep_in')
Token(index=21, form='suffering', cpos='VBG', pos='VBG', head=20, deprel='vmod')
Token(index=23, form='the', cpos='DT', pos='DT', head=24, deprel='det')
Token(index=24, form='disease', cpos='NN', pos='NN', head=21, deprel='prep_with')
Token(index=25, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
@classmethod
def get_basic_test_trees(self):
return ((self.tree1, self.tree1_out),
(self.tree2, self.tree2_out_basic),
(self.tree3, self.tree3_out),
(self.tree4, self.tree4_out_basic),
(self.tree5, self.tree5_out_basic),
(self.tree6, self.tree6_out),
(self.tree7, self.tree7_out),
(self.tree8, self.tree8_out),
(self.tree9, self.tree9_out),
(self.tree10, self.tree10_out))
@classmethod
def get_repr_test_trees(self):
return ((self.tree2,
dict(basic=self.tree2_out_basic,
collapsed=self.tree2_out_collapsed,
CCprocessed=self.tree2_out_CCprocessed,
collapsedTree=self.tree2_out_collapsedTree)),
(self.tree4,
dict(basic=self.tree4_out_basic,
collapsed=self.tree4_out_collapsed,
CCprocessed=self.tree4_out_CCprocessed,
collapsedTree=self.tree4_out_collapsedTree)),
(self.tree5,
dict(basic=self.tree5_out_basic,
collapsed=self.tree5_out_collapsed,
CCprocessed=self.tree5_out_CCprocessed,
collapsedTree=self.tree5_out_collapsedTree)),
(self.tree8, dict(collapsed=self.tree8_out_collapsed)),
(self.tree9, dict(collapsed=self.tree9_out_collapsed)),
(self.tree10, dict(collapsed=self.tree10_out_collapsed)))
# UD trees are similar to SD trees, but some parts are overridden
class trees_ud(trees_sd):
tree2_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='cat', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='a', cpos='DT', pos='DT', head=5, deprel='det')
Token(index=5, form='mouse', cpos='NN', pos='NN', head=2, deprel='conj:and')
Token(index=6, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree2_out_collapsedTree = tree2_out_collapsed
tree2_out_CCprocessed = tree2_out_collapsed
tree4_out_basic = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='with', cpos='IN', pos='IN', head=4, deprel='case')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='nmod')
Token(index=5, form='but', cpos='CC', pos='CC', head=6, deprel='cc')
Token(index=6, form='not', cpos='RB', pos='RB', head=4, deprel='cc')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='with', cpos='IN', pos='IN', head=4, deprel='case')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='nmod:with')
Token(index=5, form='but', cpos='CC', pos='CC', head=6, deprel='cc')
Token(index=6, form='not', cpos='RB', pos='RB', head=4, deprel='cc')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj:negcc')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_CCprocessed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='burrito', cpos='NN', pos='NN', head=0, deprel='root')
Token(index=3, form='with', cpos='IN', pos='IN', head=4, deprel='case')
Token(index=4, form='beans', cpos='NNS', pos='NNS', head=2, deprel='nmod:with')
Token(index=5, form='but', cpos='CC', pos='CC', head=6, deprel='cc')
Token(index=6, form='not', cpos='RB', pos='RB', head=4, deprel='cc')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=2, deprel='nmod:with')
Token(index=7, form='chicken', cpos='NN', pos='NN', head=4, deprel='conj:negcc')
Token(index=8, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree4_out_collapsedTree = tree4_out_collapsed
tree5_out_basic = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='nmod')
Token(index=8, form='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsed = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj:and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='nmod:with')
Token(index=8, form='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj:negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_CCprocessed = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=4, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj:and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='nmod:with')
Token(index=8, form='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', cpos='NN', pos='NN', head=5, deprel='nmod:with')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj:negcc')
Token(index=11, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree5_out_collapsedTree = tree5_out_collapsed
tree5_out_collapsedTree_no_punct = '''
Token(index=1, form='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', cpos='VBZ', pos='VBZ', head=2, deprel='conj:and')
Token(index=5, form='burritos', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=7, form='beans', cpos='NNS', pos='NNS', head=5, deprel='nmod:with')
Token(index=8, form='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', cpos='NN', pos='NN', head=7, deprel='conj:negcc')
'''.strip()
# nothing gets erased in UD
tree5_out_collapsedTree_erased = tree5_out_collapsedTree
tree5_out_collapsedTree_erased_no_punct = tree5_out_collapsedTree_no_punct
tree5_out_basic_lemmas = '''
Token(index=1, form='Ed', lemma='Ed', cpos='NNP', pos='NNP', head=2, deprel='nsubj')
Token(index=2, form='cooks', lemma='cook', cpos='VBZ', pos='VBZ', head=0, deprel='root')
Token(index=3, form='and', lemma='and', cpos='CC', pos='CC', head=2, deprel='cc')
Token(index=4, form='sells', lemma='sell', cpos='VBZ', pos='VBZ', head=2, deprel='conj')
Token(index=5, form='burritos', lemma='burrito', cpos='NNS', pos='NNS', head=2, deprel='dobj')
Token(index=6, form='with', lemma='with', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=7, form='beans', lemma='bean', cpos='NNS', pos='NNS', head=5, deprel='nmod')
Token(index=8, form='but', lemma='but', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=9, form='not', lemma='not', cpos='RB', pos='RB', head=7, deprel='cc')
Token(index=10, form='rice', lemma='rice', cpos='NN', pos='NN', head=7, deprel='conj')
Token(index=11, form='.', lemma='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree6_out = '''
Token(index=1, form='He', cpos='PRP', pos='PRP', head=6, deprel='nsubj')
Token(index=2, form='also', cpos='RB', pos='RB', head=6, deprel='advmod')
Token(index=3, form='is', cpos='VBZ', pos='VBZ', head=6, deprel='cop')
Token(index=4, form='a', cpos='DT', pos='DT', head=6, deprel='det')
Token(index=5, form='consensus', cpos='NN', pos='NN', head=6, deprel='compound')
Token(index=6, form='manager', cpos='NN', pos='NN', head=9, deprel='ccomp')
Token(index=7, form=',', cpos=',', pos=',', head=9, deprel='punct')
Token(index=8, form='insiders', cpos='NNS', pos='NNS', head=9, deprel='nsubj')
Token(index=9, form='say', cpos='VBP', pos='VBP', head=0, deprel='root')
Token(index=10, form='.', cpos='.', pos='.', head=9, deprel='punct')
'''.strip()
tree7_out = '''
Token(index=1, form='PRIME', cpos='NNP', pos='NNP', head=2, deprel='compound')
Token(index=2, form='RATE', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=3, form=':', cpos=':', pos=':', head=2, deprel='punct')
Token(index=4, form='10', cpos='CD', pos='CD', head=6, deprel='nummod')
Token(index=5, form='1/2', cpos='CD', pos='CD', head=6, deprel='nummod')
Token(index=6, form='%', cpos='NN', pos='NN', head=2, deprel='dep')
Token(index=7, form='.', cpos='.', pos='.', head=2, deprel='punct')
'''.strip()
tree8_out = '''
Token(index=1, form='Visitors', cpos='NNS', pos='NNS', head=3, deprel='nsubj')
Token(index=2, form='can', cpos='MD', pos='MD', head=3, deprel='aux')
Token(index=3, form='reach', cpos='VB', pos='VB', head=0, deprel='root')
Token(index=4, form='it', cpos='PRP', pos='PRP', head=3, deprel='dobj')
Token(index=5, form='only', cpos='RB', pos='RB', head=3, deprel='advmod')
Token(index=6, form='under', cpos='IN', pos='IN', head=9, deprel='case')
Token(index=7, form='strict', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=8, form='military', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=9, form='escort', cpos='NN', pos='NN', head=3, deprel='nmod')
Token(index=10, form='and', cpos='CC', pos='CC', head=9, deprel='cc')
Token(index=11, form='with', cpos='IN', pos='IN', head=13, deprel='case')
Token(index=12, form='prior', cpos='JJ', pos='JJ', head=13, deprel='amod')
Token(index=13, form='permission', cpos='NN', pos='NN', head=9, deprel='conj')
Token(index=14, form='from', cpos='IN', pos='IN', head=16, deprel='case')
Token(index=15, form='the', cpos='DT', pos='DT', head=16, deprel='det')
Token(index=16, form='Pentagon', cpos='NNP', pos='NNP', head=13, deprel='nmod')
Token(index=17, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=18, form='aboard', cpos='IN', pos='IN', head=22, deprel='case')
Token(index=19, form='special', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=20, form='small', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=21, form='shuttle', cpos='NN', pos='NN', head=22, deprel='compound')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=3, deprel='nmod')
Token(index=23, form='that', cpos='WDT', pos='WDT', head=24, deprel='nsubj')
Token(index=24, form='reach', cpos='VBP', pos='VBP', head=22, deprel='acl:relcl')
Token(index=25, form='the', cpos='DT', pos='DT', head=26, deprel='det')
Token(index=26, form='base', cpos='NN', pos='NN', head=24, deprel='dobj')
Token(index=27, form='by', cpos='IN', pos='IN', head=30, deprel='case')
Token(index=28, form='a', cpos='DT', pos='DT', head=30, deprel='det')
Token(index=29, form='circuitous', cpos='JJ', pos='JJ', head=30, deprel='amod')
Token(index=30, form='flight', cpos='NN', pos='NN', head=24, deprel='nmod')
Token(index=31, form='from', cpos='IN', pos='IN', head=34, deprel='case')
Token(index=32, form='the', cpos='DT', pos='DT', head=34, deprel='det')
Token(index=33, form='United', cpos='NNP', pos='NNP', head=34, deprel='compound')
Token(index=34, form='States', cpos='NNPS', pos='NNPS', head=30, deprel='nmod')
Token(index=35, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree8_out_collapsed = '''
Token(index=1, form='Visitors', cpos='NNS', pos='NNS', head=3, deprel='nsubj')
Token(index=2, form='can', cpos='MD', pos='MD', head=3, deprel='aux')
Token(index=3, form='reach', cpos='VB', pos='VB', head=0, deprel='root')
Token(index=3, form='reach', cpos='VB', pos='VB', head=3, deprel='conj:and', extra={'dep_is_copy': 1})
Token(index=4, form='it', cpos='PRP', pos='PRP', head=3, deprel='dobj')
Token(index=5, form='only', cpos='RB', pos='RB', head=3, deprel='advmod')
Token(index=6, form='under', cpos='IN', pos='IN', head=9, deprel='case')
Token(index=7, form='strict', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=8, form='military', cpos='JJ', pos='JJ', head=9, deprel='amod')
Token(index=9, form='escort', cpos='NN', pos='NN', head=3, deprel='nmod:under')
Token(index=10, form='and', cpos='CC', pos='CC', head=3, deprel='cc')
Token(index=11, form='with', cpos='IN', pos='IN', head=13, deprel='case')
Token(index=12, form='prior', cpos='JJ', pos='JJ', head=13, deprel='amod')
Token(index=13, form='permission', cpos='NN', pos='NN', head=3, deprel='nmod:with', extra={'gov_is_copy': 1})
Token(index=14, form='from', cpos='IN', pos='IN', head=16, deprel='case')
Token(index=15, form='the', cpos='DT', pos='DT', head=16, deprel='det')
Token(index=16, form='Pentagon', cpos='NNP', pos='NNP', head=13, deprel='nmod:from')
Token(index=17, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=18, form='aboard', cpos='IN', pos='IN', head=22, deprel='case')
Token(index=19, form='special', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=20, form='small', cpos='JJ', pos='JJ', head=22, deprel='amod')
Token(index=21, form='shuttle', cpos='NN', pos='NN', head=22, deprel='compound')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=3, deprel='nmod:aboard')
Token(index=22, form='flights', cpos='NNS', pos='NNS', head=24, deprel='nsubj')
Token(index=23, form='that', cpos='WDT', pos='WDT', head=22, deprel='ref')
Token(index=24, form='reach', cpos='VBP', pos='VBP', head=22, deprel='acl:relcl')
Token(index=25, form='the', cpos='DT', pos='DT', head=26, deprel='det')
Token(index=26, form='base', cpos='NN', pos='NN', head=24, deprel='dobj')
Token(index=27, form='by', cpos='IN', pos='IN', head=30, deprel='case')
Token(index=28, form='a', cpos='DT', pos='DT', head=30, deprel='det')
Token(index=29, form='circuitous', cpos='JJ', pos='JJ', head=30, deprel='amod')
Token(index=30, form='flight', cpos='NN', pos='NN', head=24, deprel='nmod:by')
Token(index=31, form='from', cpos='IN', pos='IN', head=34, deprel='case')
Token(index=32, form='the', cpos='DT', pos='DT', head=34, deprel='det')
Token(index=33, form='United', cpos='NNP', pos='NNP', head=34, deprel='compound')
Token(index=34, form='States', cpos='NNPS', pos='NNPS', head=30, deprel='nmod:from')
Token(index=35, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree9_out = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='total', cpos='NN', pos='NN', head=11, deprel='nsubjpass')
Token(index=3, form='of', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=4, form='17', cpos='CD', pos='CD', head=5, deprel='compound')
Token(index=5, form='million', cpos='CD', pos='CD', head=7, deprel='nummod')
Token(index=6, form='metric', cpos='JJ', pos='JJ', head=7, deprel='amod')
Token(index=7, form='tons', cpos='NNS', pos='NNS', head=2, deprel='nmod')
Token(index=8, form='of', cpos='IN', pos='IN', head=9, deprel='case')
Token(index=9, form='potatoes', cpos='NNS', pos='NNS', head=7, deprel='nmod')
Token(index=10, form='was', cpos='VBD', pos='VBD', head=11, deprel='auxpass')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=0, deprel='root')
Token(index=12, form=',', cpos=',', pos=',', head=11, deprel='punct')
Token(index=13, form='which', cpos='WDT', pos='WDT', head=17, deprel='nsubj')
Token(index=14, form='was', cpos='VBD', pos='VBD', head=17, deprel='cop')
Token(index=15, form='14', cpos='CD', pos='CD', head=16, deprel='nummod')
Token(index=16, form='%', cpos='NN', pos='NN', head=17, deprel='nmod:npmod')
Token(index=17, form='less', cpos='JJR', pos='JJR', head=11, deprel='ccomp')
Token(index=18, form='than', cpos='IN', pos='IN', head=20, deprel='case')
Token(index=19, form='last', cpos='JJ', pos='JJ', head=20, deprel='amod')
Token(index=20, form='year', cpos='NN', pos='NN', head=17, deprel='nmod')
Token(index=21, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=23, deprel='punct')
Token(index=22, form='106', cpos='CD', pos='CD', head=23, deprel='nummod')
Token(index=23, form='quintals', cpos='NNS', pos='NNS', head=20, deprel='dep')
Token(index=24, form='per', cpos='IN', pos='IN', head=25, deprel='case')
Token(index=25, form='hectare', cpos='NN', pos='NN', head=23, deprel='nmod')
Token(index=26, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=23, deprel='punct')
Token(index=27, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=28, form='and', cpos='CC', pos='CC', head=20, deprel='cc')
Token(index=29, form='5.4', cpos='CD', pos='CD', head=30, deprel='compound')
Token(index=30, form='million', cpos='CD', pos='CD', head=32, deprel='nummod')
Token(index=31, form='metric', cpos='JJ', pos='JJ', head=32, deprel='amod')
Token(index=32, form='tons', cpos='NNS', pos='NNS', head=20, deprel='conj')
Token(index=33, form='of', cpos='IN', pos='IN', head=34, deprel='case')
Token(index=34, form='vegetables', cpos='NNS', pos='NNS', head=32, deprel='nmod')
Token(index=35, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=36, form='or', cpos='CC', pos='CC', head=20, deprel='cc')
Token(index=37, form='2.2', cpos='CD', pos='CD', head=38, deprel='nummod')
Token(index=38, form='%', cpos='NN', pos='NN', head=39, deprel='nmod:npmod')
Token(index=39, form='more', cpos='RBR', pos='RBR', head=20, deprel='conj')
Token(index=40, form='than', cpos='IN', pos='IN', head=44, deprel='case')
Token(index=41, form='on', cpos='IN', pos='IN', head=44, deprel='case')
Token(index=42, form='the', cpos='DT', pos='DT', head=44, deprel='det')
Token(index=43, form='same', cpos='JJ', pos='JJ', head=44, deprel='amod')
Token(index=44, form='date', cpos='NN', pos='NN', head=20, deprel='conj')
Token(index=45, form='last', cpos='JJ', pos='JJ', head=46, deprel='amod')
Token(index=46, form='year', cpos='NN', pos='NN', head=44, deprel='nmod:tmod')
Token(index=47, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=49, deprel='punct')
Token(index=48, form='116', cpos='JJ', pos='JJ', head=49, deprel='amod')
Token(index=49, form='quintals', cpos='NNS', pos='NNS', head=17, deprel='dep')
Token(index=50, form='per', cpos='IN', pos='IN', head=51, deprel='case')
Token(index=51, form='hectare', cpos='NN', pos='NN', head=49, deprel='nmod')
Token(index=52, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=49, deprel='punct')
Token(index=53, form='.', cpos='.', pos='.', head=11, deprel='punct')
'''.strip()
tree9_out_collapsed = '''
Token(index=1, form='A', cpos='DT', pos='DT', head=2, deprel='det')
Token(index=2, form='total', cpos='NN', pos='NN', head=11, deprel='nsubjpass')
Token(index=3, form='of', cpos='IN', pos='IN', head=7, deprel='case')
Token(index=4, form='17', cpos='CD', pos='CD', head=5, deprel='compound')
Token(index=5, form='million', cpos='CD', pos='CD', head=7, deprel='nummod')
Token(index=6, form='metric', cpos='JJ', pos='JJ', head=7, deprel='amod')
Token(index=7, form='tons', cpos='NNS', pos='NNS', head=2, deprel='nmod:of')
Token(index=8, form='of', cpos='IN', pos='IN', head=9, deprel='case')
Token(index=9, form='potatoes', cpos='NNS', pos='NNS', head=7, deprel='nmod:of')
Token(index=10, form='was', cpos='VBD', pos='VBD', head=11, deprel='auxpass')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=0, deprel='root')
Token(index=12, form=',', cpos=',', pos=',', head=11, deprel='punct')
Token(index=13, form='which', cpos='WDT', pos='WDT', head=17, deprel='nsubj')
Token(index=14, form='was', cpos='VBD', pos='VBD', head=17, deprel='cop')
Token(index=15, form='14', cpos='CD', pos='CD', head=16, deprel='nummod')
Token(index=16, form='%', cpos='NN', pos='NN', head=17, deprel='nmod:npmod')
Token(index=17, form='less', cpos='JJR', pos='JJR', head=11, deprel='ccomp')
Token(index=17, form='less', cpos='JJR', pos='JJR', head=17, deprel='conj:and', extra={'dep_is_copy': 3})
Token(index=17, form='less', cpos='JJR', pos='JJR', head=17, deprel='conj:and', extra={'dep_is_copy': 4})
Token(index=17, form='less', cpos='JJR', pos='JJR', head=17, deprel='conj:or', extra={'dep_is_copy': 1})
Token(index=17, form='less', cpos='JJR', pos='JJR', head=17, deprel='conj:or', extra={'dep_is_copy': 2})
Token(index=18, form='than', cpos='IN', pos='IN', head=20, deprel='case')
Token(index=19, form='last', cpos='JJ', pos='JJ', head=20, deprel='amod')
Token(index=20, form='year', cpos='NN', pos='NN', head=17, deprel='nmod:than')
Token(index=21, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=23, deprel='punct')
Token(index=22, form='106', cpos='CD', pos='CD', head=23, deprel='nummod')
Token(index=23, form='quintals', cpos='NNS', pos='NNS', head=20, deprel='dep')
Token(index=24, form='per', cpos='IN', pos='IN', head=25, deprel='case')
Token(index=25, form='hectare', cpos='NN', pos='NN', head=23, deprel='nmod:per')
Token(index=26, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=23, deprel='punct')
Token(index=27, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=28, form='and', cpos='CC', pos='CC', head=17, deprel='cc')
Token(index=29, form='5.4', cpos='CD', pos='CD', head=30, deprel='compound')
Token(index=30, form='million', cpos='CD', pos='CD', head=32, deprel='nummod')
Token(index=31, form='metric', cpos='JJ', pos='JJ', head=32, deprel='amod')
Token(index=32, form='tons', cpos='NNS', pos='NNS', head=20, deprel='conj')
Token(index=33, form='of', cpos='IN', pos='IN', head=34, deprel='case')
Token(index=34, form='vegetables', cpos='NNS', pos='NNS', head=32, deprel='nmod:of')
Token(index=35, form=',', cpos=',', pos=',', head=20, deprel='punct')
Token(index=36, form='or', cpos='CC', pos='CC', head=17, deprel='cc')
Token(index=37, form='2.2', cpos='CD', pos='CD', head=38, deprel='nummod')
Token(index=38, form='%', cpos='NN', pos='NN', head=39, deprel='nmod:npmod')
Token(index=39, form='more', cpos='RBR', pos='RBR', head=20, deprel='conj')
Token(index=40, form='than', cpos='IN', pos='IN', head=44, deprel='case')
Token(index=41, form='on', cpos='IN', pos='IN', head=44, deprel='case')
Token(index=42, form='the', cpos='DT', pos='DT', head=44, deprel='det')
Token(index=43, form='same', cpos='JJ', pos='JJ', head=44, deprel='amod')
Token(index=44, form='date', cpos='NN', pos='NN', head=17, deprel='nmod:on', extra={'gov_is_copy': 1})
Token(index=45, form='last', cpos='JJ', pos='JJ', head=46, deprel='amod')
Token(index=46, form='year', cpos='NN', pos='NN', head=44, deprel='nmod:tmod')
Token(index=47, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=49, deprel='punct')
Token(index=48, form='116', cpos='JJ', pos='JJ', head=49, deprel='amod')
Token(index=49, form='quintals', cpos='NNS', pos='NNS', head=17, deprel='dep')
Token(index=50, form='per', cpos='IN', pos='IN', head=51, deprel='case')
Token(index=51, form='hectare', cpos='NN', pos='NN', head=49, deprel='nmod:per')
Token(index=52, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=49, deprel='punct')
Token(index=53, form='.', cpos='.', pos='.', head=11, deprel='punct')
'''.strip()
tree10_out = '''
Token(index=1, form='Hanoi', cpos='NNP', pos='NNP', head=3, deprel='compound')
Token(index=2, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=3, form='May', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=4, form='13', cpos='CD', pos='CD', head=3, deprel='nummod')
Token(index=5, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=6, deprel='punct')
Token(index=6, form='VNA', cpos='NNP', pos='NNP', head=3, deprel='appos')
Token(index=7, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=6, deprel='punct')
Token(index=8, form='--', cpos=':', pos=':', head=3, deprel='punct')
Token(index=9, form='Vietnam', cpos='NNP', pos='NNP', head=3, deprel='dep')
Token(index=10, form='has', cpos='VBZ', pos='VBZ', head=11, deprel='aux')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=9, deprel='acl:relcl')
Token(index=12, form='a', cpos='DT', pos='DT', head=13, deprel='det')
Token(index=13, form='variety', cpos='NN', pos='NN', head=11, deprel='dobj')
Token(index=14, form='of', cpos='IN', pos='IN', head=15, deprel='case')
Token(index=15, form='drugs', cpos='NNS', pos='NNS', head=13, deprel='nmod')
Token(index=16, form='to', cpos='TO', pos='TO', head=17, deprel='mark')
Token(index=17, form='control', cpos='VB', pos='VB', head=11, deprel='advcl')
Token(index=18, form='HIV/AIDS', cpos='NNS', pos='NNS', head=17, deprel='dobj')
Token(index=19, form='in', cpos='IN', pos='IN', head=20, deprel='case')
Token(index=20, form='patients', cpos='NNS', pos='NNS', head=17, deprel='nmod')
Token(index=21, form='suffering', cpos='VBG', pos='VBG', head=20, deprel='acl')
Token(index=22, form='with', cpos='IN', pos='IN', head=24, deprel='case')
Token(index=23, form='the', cpos='DT', pos='DT', head=24, deprel='det')
Token(index=24, form='disease', cpos='NN', pos='NN', head=21, deprel='nmod')
Token(index=25, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
tree10_out_collapsed = '''
Token(index=1, form='Hanoi', cpos='NNP', pos='NNP', head=3, deprel='compound')
Token(index=2, form=',', cpos=',', pos=',', head=3, deprel='punct')
Token(index=3, form='May', cpos='NNP', pos='NNP', head=0, deprel='root')
Token(index=4, form='13', cpos='CD', pos='CD', head=3, deprel='nummod')
Token(index=5, form='-LRB-', cpos='-LRB-', pos='-LRB-', head=6, deprel='punct')
Token(index=6, form='VNA', cpos='NNP', pos='NNP', head=3, deprel='appos')
Token(index=7, form='-RRB-', cpos='-RRB-', pos='-RRB-', head=6, deprel='punct')
Token(index=8, form='--', cpos=':', pos=':', head=3, deprel='punct')
Token(index=9, form='Vietnam', cpos='NNP', pos='NNP', head=3, deprel='dep')
Token(index=10, form='has', cpos='VBZ', pos='VBZ', head=11, deprel='aux')
Token(index=11, form='produced', cpos='VBN', pos='VBN', head=9, deprel='acl:relcl')
Token(index=12, form='a', cpos='DT', pos='DT', head=13, deprel='det')
Token(index=13, form='variety', cpos='NN', pos='NN', head=11, deprel='dobj')
Token(index=14, form='of', cpos='IN', pos='IN', head=15, deprel='case')
Token(index=15, form='drugs', cpos='NNS', pos='NNS', head=13, deprel='nmod:of')
Token(index=16, form='to', cpos='TO', pos='TO', head=17, deprel='mark')
Token(index=17, form='control', cpos='VB', pos='VB', head=11, deprel='advcl')
Token(index=18, form='HIV/AIDS', cpos='NNS', pos='NNS', head=17, deprel='dobj')
Token(index=19, form='in', cpos='IN', pos='IN', head=20, deprel='case')
Token(index=20, form='patients', cpos='NNS', pos='NNS', head=17, deprel='nmod:in')
Token(index=21, form='suffering', cpos='VBG', pos='VBG', head=20, deprel='acl')
Token(index=22, form='with', cpos='IN', pos='IN', head=24, deprel='case')
Token(index=23, form='the', cpos='DT', pos='DT', head=24, deprel='det')
Token(index=24, form='disease', cpos='NN', pos='NN', head=21, deprel='nmod:with')
Token(index=25, form='.', cpos='.', pos='.', head=3, deprel='punct')
'''.strip()
| 65.96633
| 538
| 0.618535
| 9,799
| 58,776
| 3.686601
| 0.03133
| 0.189066
| 0.034408
| 0.03319
| 0.929522
| 0.921827
| 0.917343
| 0.914131
| 0.910062
| 0.906713
| 0
| 0.04314
| 0.104362
| 58,776
| 890
| 539
| 66.040449
| 0.643099
| 0.008626
| 0
| 0.740093
| 0
| 0.740093
| 0.931889
| 0.004326
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002331
| false
| 0.009324
| 0
| 0.002331
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a36e5e866ea32dfb59e174285397008dcf8a046e
| 31,238
|
py
|
Python
|
glance/tests/functional/v2/test_metadef_namespace_api_policy.py
|
Steap/glance
|
4ee7799aa7f6a7172e361392ebb8d3da03e0bf7f
|
[
"Apache-2.0"
] | 309
|
2015-01-01T17:49:09.000Z
|
2022-03-29T14:56:31.000Z
|
glance/tests/functional/v2/test_metadef_namespace_api_policy.py
|
Steap/glance
|
4ee7799aa7f6a7172e361392ebb8d3da03e0bf7f
|
[
"Apache-2.0"
] | 8
|
2015-11-04T21:53:48.000Z
|
2020-12-15T05:36:35.000Z
|
glance/tests/functional/v2/test_metadef_namespace_api_policy.py
|
Steap/glance
|
4ee7799aa7f6a7172e361392ebb8d3da03e0bf7f
|
[
"Apache-2.0"
] | 409
|
2015-01-01T11:28:26.000Z
|
2022-03-29T14:56:41.000Z
|
# Copyright 2021 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import oslo_policy.policy
from glance.api import policy
from glance.tests import functional
GLOBAL_NAMESPACE_DATA = {
"namespace": "MyNamespace",
"display_name": "My User Friendly Namespace",
"description": "My description",
"resource_type_associations": [{
"name": "MyResourceType",
"prefix": "prefix_",
"properties_target": "temp"
}],
"objects": [{
"name": "MyObject",
"description": "My object for My namespace",
"properties": {
"test_property": {
"title": "test_property",
"description": "Test property for My object",
"type": "string"
},
}
}],
"tags": [{
"name": "MyTag",
}],
"properties": {
"TestProperty": {
"title": "MyTestProperty",
"description": "Test Property for My namespace",
"type": "string"
},
},
}
NAME_SPACE1 = {
"namespace": "MyNamespace",
"display_name": "My User Friendly Namespace",
"description": "My description"
}
NAME_SPACE2 = {
"namespace": "MySecondNamespace",
"display_name": "My User Friendly Namespace",
"description": "My description"
}
class TestMetadefNamespacesPolicy(functional.SynchronousAPIBase):
def setUp(self):
super(TestMetadefNamespacesPolicy, self).setUp()
self.policy = policy.Enforcer(suppress_deprecation_warnings=True)
def set_policy_rules(self, rules):
self.policy.set_rules(
oslo_policy.policy.Rules.from_dict(rules),
overwrite=True)
def start_server(self):
with mock.patch.object(policy, 'Enforcer') as mock_enf:
mock_enf.return_value = self.policy
super(TestMetadefNamespacesPolicy, self).start_server()
def _verify_forbidden_converted_to_not_found(self, path, method,
json=None):
# Note for other reviewers, these tests runs by default using
# admin role, to test this scenario we need private namespace
# of current project to be accessed by other projects non-admin
# user.
headers = self._headers({
'X-Tenant-Id': 'fake-tenant-id',
'X-Roles': 'member',
})
resp = self.api_request(method, path, headers=headers, json=json)
self.assertEqual(404, resp.status_code)
def test_namespace_list_basic(self):
self.start_server()
# First make sure create private namespace works with default policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path=path,
data=NAME_SPACE1)
self.assertEqual('MyNamespace', md_resource['namespace'])
# First make sure create public namespace works with default policy
path = '/v2/metadefs/namespaces'
NAME_SPACE2["visibility"] = 'public'
md_resource = self._create_metadef_resource(path=path,
data=NAME_SPACE2)
self.assertEqual('MySecondNamespace', md_resource['namespace'])
# Now make sure 'get_metadef_namespaces' allows user to get all the
# namespaces
resp = self.api_get(path)
md_resource = resp.json
self.assertEqual(2, len(md_resource['namespaces']))
# Now disable get_metadef_namespaces permissions and make sure any
# other attempts fail
self.set_policy_rules({
'get_metadef_namespaces': '!',
'get_metadef_namespace': '@',
})
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
def test_namespace_list_with_resource_types(self):
self.start_server()
# First make sure create namespace works with default policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path=path,
data=GLOBAL_NAMESPACE_DATA)
self.assertEqual('MyNamespace', md_resource['namespace'])
# Now make sure 'get_metadef_namespaces' allows user to get all the
# namespaces with associated resource types
resp = self.api_get(path)
md_resource = resp.json
self.assertEqual(1, len(md_resource['namespaces']))
# Verify that response includes associated resource types as well
for namespace_obj in md_resource['namespaces']:
self.assertIn('resource_type_associations', namespace_obj)
# Now disable list_metadef_resource_types permissions and make sure
# you get forbidden response
self.set_policy_rules({
'get_metadef_namespaces': '@',
'get_metadef_namespace': '@',
'list_metadef_resource_types': '!'
})
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
# Now enable list_metadef_resource_types and get_metadef_namespaces
# permissions and disable get_metadef_namespace permission to make sure
# you will get empty list as a response
self.set_policy_rules({
'get_metadef_namespaces': '@',
'get_metadef_namespace': '!',
'list_metadef_resource_types': '@'
})
resp = self.api_get(path)
md_resource = resp.json
self.assertEqual(0, len(md_resource['namespaces']))
# Verify that response does not includes associated resource types
for namespace_obj in md_resource['namespaces']:
self.assertNotIn('resource_type_associations', namespace_obj)
def test_namespace_create_basic(self):
self.start_server()
# First make sure create namespace works with default policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path=path,
data=NAME_SPACE1)
self.assertEqual('MyNamespace', md_resource['namespace'])
# Now disable add_metadef_namespace permissions and make sure any other
# attempts fail
self.set_policy_rules({
'add_metadef_namespace': '!',
'get_metadef_namespace': '@'
})
resp = self.api_post(path, json=NAME_SPACE2)
self.assertEqual(403, resp.status_code)
def test_namespace_create_with_resource_type_associations(self):
self.start_server()
# First make sure you can create namespace and resource type
# associations with default policy
path = '/v2/metadefs/namespaces'
data = {
"resource_type_associations": [{
"name": "MyResourceType",
"prefix": "prefix_",
"properties_target": "temp"
}],
}
data.update(NAME_SPACE1)
md_resource = self._create_metadef_resource(path=path,
data=data)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual(
'MyResourceType',
md_resource['resource_type_associations'][0]['name'])
# Now disable add_metadef_resource_type_association permissions and
# make sure that even you have permission to create namespace the
# request will fail
self.set_policy_rules({
'add_metadef_resource_type_association': '!',
'get_metadef_namespace': '@'
})
data.update(NAME_SPACE2)
resp = self.api_post(path, json=data)
self.assertEqual(403, resp.status_code)
def test_namespace_create_with_objects(self):
self.start_server()
# First make sure you can create namespace and objects
# with default policy
path = '/v2/metadefs/namespaces'
data = {
"objects": [{
"name": "MyObject",
"description": "My object for My namespace",
"properties": {
"test_property": {
"title": "test_property",
"description": "Test property for My object",
"type": "string"
},
}
}],
}
data.update(NAME_SPACE1)
md_resource = self._create_metadef_resource(path=path,
data=data)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual(
'MyObject',
md_resource['objects'][0]['name'])
# Now disable add_metadef_object permissions and
# make sure that even you have permission to create namespace the
# request will fail
self.set_policy_rules({
'add_metadef_object': '!',
'get_metadef_namespace': '@'
})
data.update(NAME_SPACE2)
resp = self.api_post(path, json=data)
self.assertEqual(403, resp.status_code)
def test_namespace_create_with_tags(self):
self.start_server()
# First make sure you can create namespace and tags
# with default policy
path = '/v2/metadefs/namespaces'
data = {
"tags": [{
"name": "MyTag",
}],
}
data.update(NAME_SPACE1)
md_resource = self._create_metadef_resource(path=path,
data=data)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual(
'MyTag',
md_resource['tags'][0]['name'])
# Now disable add_metadef_object permissions and
# make sure that even you have permission to create namespace the
# request will fail
data.update(NAME_SPACE2)
self.set_policy_rules({
'add_metadef_tag': '!',
'get_metadef_namespace': '@'
})
resp = self.api_post(path, json=data)
self.assertEqual(403, resp.status_code)
def test_namespace_create_with_properties(self):
self.start_server()
# First make sure you can create namespace and properties
# with default policy
path = '/v2/metadefs/namespaces'
data = {
"properties": {
"TestProperty": {
"title": "MyTestProperty",
"description": "Test Property for My namespace",
"type": "string"
},
}
}
data.update(NAME_SPACE1)
md_resource = self._create_metadef_resource(path=path,
data=data)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual(
'MyTestProperty',
md_resource['properties']['TestProperty']['title'])
# Now disable add_metadef_property permissions and
# make sure that even you have permission to create namespace the
# request will fail
data.update(NAME_SPACE2)
self.set_policy_rules({
'add_metadef_property': '!',
'get_metadef_namespace': '@'
})
resp = self.api_post(path, json=data)
self.assertEqual(403, resp.status_code)
def test_namespace_get_basic(self):
self.start_server()
# First make sure create namespace works with default policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path=path,
data=GLOBAL_NAMESPACE_DATA)
self.assertEqual('MyNamespace', md_resource['namespace'])
# Now make sure get_metadef_namespace will return all associated
# resources in the response as every policy is open.
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
md_resource = resp.json
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertIn('objects', md_resource)
self.assertIn('resource_type_associations', md_resource)
self.assertIn('tags', md_resource)
self.assertIn('properties', md_resource)
# Now disable get_metadef_namespace policy to ensure that you are
# forbidden to fulfill the request and get 404 not found
self.set_policy_rules({'get_metadef_namespace': '!'})
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(404, resp.status_code)
# Now try to get the same namespace by different user
self.set_policy_rules({'get_metadef_namespace': '@'})
self._verify_forbidden_converted_to_not_found(path, 'GET')
# Now disable get_metadef_objects policy to ensure that you will
# get forbidden response
self.set_policy_rules({
'get_metadef_objects': '!',
'get_metadef_namespace': '@',
'list_metadef_resource_types': '@',
'get_metadef_properties': '@',
'get_metadef_tags': '@'
})
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
# Now disable list_metadef_resource_types policy to ensure that you
# will get forbidden response
self.set_policy_rules({
'get_metadef_objects': '@',
'get_metadef_namespace': '@',
'list_metadef_resource_types': '!',
'get_metadef_properties': '@',
'get_metadef_tags': '@'
})
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
# Now disable get_metadef_properties policy to ensure that you will
# ger forbidden response
self.set_policy_rules({
'get_metadef_objects': '@',
'get_metadef_namespace': '@',
'list_metadef_resource_types': '@',
'get_metadef_properties': '!',
'get_metadef_tags': '@'
})
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
# Now disable get_metadef_tags policy to ensure that you will
# get forbidden response
self.set_policy_rules({
'get_metadef_objects': '@',
'get_metadef_namespace': '@',
'list_metadef_resource_types': '@',
'get_metadef_properties': '@',
'get_metadef_tags': '!'
})
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(403, resp.status_code)
def test_namespace_update_basic(self):
self.start_server()
# First make sure create namespace works with default policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path=path,
data=NAME_SPACE1)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual('private', md_resource['visibility'])
# Now ensure you are able to update the namespace
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
data = {
'visibility': 'public',
'namespace': md_resource['namespace'],
}
resp = self.api_put(path, json=data)
md_resource = resp.json
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual('public', md_resource['visibility'])
# Now disable modify_metadef_namespace permissions and make sure
# any other attempts results in 403 forbidden
self.set_policy_rules({
'modify_metadef_namespace': '!',
'get_metadef_namespace': '@',
})
resp = self.api_put(path, json=data)
self.assertEqual(403, resp.status_code)
# Now enable modify_metadef_namespace and get_metadef_namespace
# permissions and make sure modifying non existing results in
# 404 NotFound
self.set_policy_rules({
'modify_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
path = '/v2/metadefs/namespaces/non-existing'
resp = self.api_put(path, json=data)
self.assertEqual(404, resp.status_code)
# Note for reviewers, this causes our "check get if modify fails"
# logic to return 404 as we expect, but not related to the latest
# rev that checks the namespace get operation first.
self.set_policy_rules({
'modify_metadef_namespace': '!',
'get_metadef_namespace': '!',
})
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
resp = self.api_put(path, json=data)
self.assertEqual(404, resp.status_code)
# Ensure accessing non visible namespace will catch 403 and
# return 404 to user
self.set_policy_rules({
'modify_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
# Reset visibility to private
# Now ensure you are able to update the namespace
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
data = {
'visibility': 'private',
'namespace': md_resource['namespace'],
}
resp = self.api_put(path, json=data)
md_resource = resp.json
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertEqual('private', md_resource['visibility'])
# Now try to update the same namespace by different user
self._verify_forbidden_converted_to_not_found(path, 'PUT',
json=data)
def test_namespace_delete_basic(self):
def _create_private_namespace(fn_call, data):
path = '/v2/metadefs/namespaces'
return fn_call(path=path, data=data)
self.start_server()
# First make sure create namespace works with default policy
md_resource = _create_private_namespace(
self._create_metadef_resource, NAME_SPACE1)
self.assertEqual('MyNamespace', md_resource['namespace'])
# Now ensure you are able to delete the namespace
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
resp = self.api_delete(path)
self.assertEqual(204, resp.status_code)
# Verify that namespace is deleted
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
self.assertEqual(404, resp.status_code)
# Now create another namespace to check deletion is not allowed
md_resource = _create_private_namespace(
self._create_metadef_resource, NAME_SPACE2)
self.assertEqual('MySecondNamespace', md_resource['namespace'])
# Now disable delete_metadef_namespace permissions and make sure
# any other attempts fail
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(403, resp.status_code)
# Now enable both permissions and make sure deleting non
# exsting namespace returns 404 NotFound
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@'
})
path = '/v2/metadefs/namespaces/non-existing'
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Note for reviewers, this causes our "check get if delete fails"
# logic to return 404 as we expect, but not related to the latest
# rev that checks the namespace get operation first.
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '!',
})
path = '/v2/metadefs/namespaces/%s' % md_resource['namespace']
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Ensure accessing non visible namespace will catch 403 and
# return 404 to user
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
self._verify_forbidden_converted_to_not_found(path, 'DELETE')
def test_namespace_delete_objects_basic(self):
self.start_server()
# First make sure create namespace and object works with default
# policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path,
data=GLOBAL_NAMESPACE_DATA)
self.assertEqual('MyNamespace', md_resource['namespace'])
self.assertIn('objects', md_resource)
# Now ensure you are able to delete the object(s) from namespace
path = '/v2/metadefs/namespaces/%s/objects' % md_resource['namespace']
resp = self.api_delete(path)
self.assertEqual(204, resp.status_code)
# Verify that object from namespace is deleted but namespace is
# available
path = "/v2/metadefs/namespaces/%s" % md_resource['namespace']
resp = self.api_get(path)
md_resource = resp.json
self.assertNotIn('objects', md_resource)
self.assertEqual('MyNamespace', md_resource['namespace'])
# Now add another object to the namespace
path = '/v2/metadefs/namespaces/%s/objects' % md_resource['namespace']
data = {
"name": "MyObject",
"description": "My object for My namespace",
"properties": {
"test_property": {
"title": "test_property",
"description": "Test property for My object",
"type": "string"
},
}
}
md_object = self._create_metadef_resource(path, data=data)
self.assertEqual('MyObject', md_object['name'])
# Now disable delete_metadef_namespace permissions and make sure
# any other attempts to delete objects fails
path = '/v2/metadefs/namespaces/%s/objects' % md_resource['namespace']
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(403, resp.status_code)
# Now enable both permissions and make sure
# deleting objects for non existing namespace returns 404 Not found
path = '/v2/metadefs/namespaces/non-existing/objects'
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metaded_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Note for reviewers, this causes our "check get if delete fails"
# logic to return 404 as we expect, but not related to the latest
# rev that checks the namespace get operation first.
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '!',
})
path = '/v2/metadefs/namespaces/%s/objects' % md_resource['namespace']
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Ensure accessing non visible namespace will catch 403 and
# return 404 to user
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
self._verify_forbidden_converted_to_not_found(path, 'DELETE')
def test_namespace_delete_properties_basic(self):
self.start_server()
# First make sure create namespace and properties works with default
# policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path,
data=GLOBAL_NAMESPACE_DATA)
namespace = md_resource['namespace']
self.assertEqual('MyNamespace', namespace)
self.assertIn('properties', md_resource)
# Now ensure you are able to delete all properties from namespace
path = '/v2/metadefs/namespaces/%s/properties' % namespace
resp = self.api_delete(path)
self.assertEqual(204, resp.status_code)
# Verify that properties from namespace are deleted but namespace is
# available
path = "/v2/metadefs/namespaces/%s" % namespace
resp = self.api_get(path)
md_resource = resp.json
self.assertNotIn('properties', md_resource)
self.assertEqual('MyNamespace', namespace)
# Now add another property to the namespace
path = '/v2/metadefs/namespaces/%s/properties' % namespace
data = {
"name": "MyProperty",
"title": "test_property",
"description": "Test property for My Namespace",
"type": "string"
}
md_resource = self._create_metadef_resource(path,
data=data)
self.assertEqual('MyProperty', md_resource['name'])
# Now disable delete_metadef_namespace permissions and make sure
# any other attempts to delete properties fails
path = '/v2/metadefs/namespaces/%s/properties' % namespace
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '@',
})
resp = self.api_delete(path)
self.assertEqual(403, resp.status_code)
# Now disable both permissions and make sure
# deleting properties for non existing namespace returns 404 Not found
path = '/v2/metadefs/namespaces/non-existing/properties'
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Note for reviewers, this causes our "check get if delete fails"
# logic to return 404 as we expect, but not related to the latest
# rev that checks the namespace get operation first.
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '!',
})
path = '/v2/metadefs/namespaces/%s/properties' % namespace
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Ensure accessing non visible namespace will catch 403 and
# return 404 to user
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@',
})
self._verify_forbidden_converted_to_not_found(path, 'DELETE')
def test_namespace_delete_tags_basic(self):
self.start_server()
# First make sure create namespace and tags works with default
# policy
path = '/v2/metadefs/namespaces'
md_resource = self._create_metadef_resource(path,
data=GLOBAL_NAMESPACE_DATA)
namespace = md_resource['namespace']
self.assertEqual('MyNamespace', namespace)
self.assertIn('tags', md_resource)
# Now ensure you are able to delete all properties from namespace
path = '/v2/metadefs/namespaces/%s/tags' % namespace
resp = self.api_delete(path)
self.assertEqual(204, resp.status_code)
# Verify that tags from namespace are deleted but namespace is
# available
path = "/v2/metadefs/namespaces/%s" % namespace
resp = self.api_get(path)
md_resource = resp.json
self.assertNotIn('tags', md_resource)
self.assertEqual('MyNamespace', namespace)
# Now add another tag to the namespace
tag_name = "MyTag"
path = '/v2/metadefs/namespaces/%s/tags/%s' % (namespace,
tag_name)
md_resource = self._create_metadef_resource(path)
self.assertEqual('MyTag', md_resource['name'])
# Now disable delete_metadef_namespace permissions and make sure
# any other attempts to delete tags fails
path = '/v2/metadefs/namespaces/%s/tags' % namespace
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(403, resp.status_code)
# Now enable delete_metadef_namespace permissions and and disable
# delete_metadef_tags to make sure
# any other attempts to delete tags fails
path = '/v2/metadefs/namespaces/%s/tags' % namespace
self.set_policy_rules({
'delete_metadef_namespace': '@',
'delete_metadef_tags': '!',
'get_metadef_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(403, resp.status_code)
# Now enable all permissions and make sure deleting tags for
# non existing namespace will return 404 Not found
path = '/v2/metadefs/namespaces/non-existing/tags'
self.set_policy_rules({
'delete_metadef_namespace': '@',
'delete_metadef_tags': '@',
'get_metadef_namespace': '@'
})
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Note for reviewers, this causes our "check get if delete fails"
# logic to return 404 as we expect, but not related to the latest
# rev that checks the namespace get operation first.
self.set_policy_rules({
'delete_metadef_namespace': '!',
'get_metadef_namespace': '!',
'delete_metadef_tags': '!'
})
path = '/v2/metadefs/namespaces/%s/tags' % namespace
resp = self.api_delete(path)
self.assertEqual(404, resp.status_code)
# Ensure accessing non visible namespace will catch 403 and
# return 404 to user
self.set_policy_rules({
'delete_metadef_namespace': '@',
'get_metadef_namespace': '@',
'delete_metadef_tags': '@'
})
self._verify_forbidden_converted_to_not_found(path, 'DELETE')
| 40.568831
| 79
| 0.605577
| 3,356
| 31,238
| 5.421335
| 0.079261
| 0.048917
| 0.036935
| 0.063318
| 0.840057
| 0.818017
| 0.799439
| 0.780367
| 0.749533
| 0.704518
| 0
| 0.011296
| 0.297202
| 31,238
| 769
| 80
| 40.621586
| 0.817436
| 0.224214
| 0
| 0.774725
| 0
| 0
| 0.233289
| 0.130906
| 0
| 0
| 0
| 0
| 0.14652
| 1
| 0.032967
| false
| 0
| 0.007326
| 0
| 0.043956
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a375471e77ac148d25d4f71c344c69de42cf2a8f
| 222,966
|
py
|
Python
|
pyboto3/appstream.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/appstream.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/appstream.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def associate_fleet(FleetName=None, StackName=None):
"""
Associates the specified fleet with the specified stack.
See also: AWS API Documentation
Exceptions
:example: response = client.associate_fleet(
FleetName='string',
StackName='string'
)
:type FleetName: string
:param FleetName: [REQUIRED]\nThe name of the fleet.\n
:type StackName: string
:param StackName: [REQUIRED]\nThe name of the stack.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.IncompatibleImageException
AppStream.Client.exceptions.OperationNotPermittedException
:return: {}
:returns:
(dict) --
"""
pass
def batch_associate_user_stack(UserStackAssociations=None):
"""
Associates the specified users with the specified stacks. Users in a user pool cannot be assigned to stacks with fleets that are joined to an Active Directory domain.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_associate_user_stack(
UserStackAssociations=[
{
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
]
)
:type UserStackAssociations: list
:param UserStackAssociations: [REQUIRED]\nThe list of UserStackAssociation objects.\n\n(dict) --Describes a user in the user pool and the associated stack.\n\nStackName (string) -- [REQUIRED]The name of the stack that is associated with the user.\n\nUserName (string) -- [REQUIRED]The email address of the user who is associated with the stack.\n\nNote\nUsers\' email addresses are case-sensitive.\n\n\nAuthenticationType (string) -- [REQUIRED]The authentication type for the user.\n\nSendEmailNotification (boolean) --Specifies whether a welcome email is sent to a user after the user is created in the user pool.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax{
'errors': [
{
'UserStackAssociation': {
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
'ErrorCode': 'STACK_NOT_FOUND'|'USER_NAME_NOT_FOUND'|'INTERNAL_ERROR',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
errors (list) --The list of UserStackAssociationError objects.
(dict) --Describes the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
UserStackAssociation (dict) --Information about the user and associated stack.
StackName (string) --The name of the stack that is associated with the user.
UserName (string) --The email address of the user who is associated with the stack.
Note
Users\' email addresses are case-sensitive.
AuthenticationType (string) --The authentication type for the user.
SendEmailNotification (boolean) --Specifies whether a welcome email is sent to a user after the user is created in the user pool.
ErrorCode (string) --The error code for the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
ErrorMessage (string) --The error message for the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
Exceptions
AppStream.Client.exceptions.OperationNotPermittedException
:return: {
'errors': [
{
'UserStackAssociation': {
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
'ErrorCode': 'STACK_NOT_FOUND'|'USER_NAME_NOT_FOUND'|'INTERNAL_ERROR',
'ErrorMessage': 'string'
},
]
}
"""
pass
def batch_disassociate_user_stack(UserStackAssociations=None):
"""
Disassociates the specified users from the specified stacks.
See also: AWS API Documentation
:example: response = client.batch_disassociate_user_stack(
UserStackAssociations=[
{
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
]
)
:type UserStackAssociations: list
:param UserStackAssociations: [REQUIRED]\nThe list of UserStackAssociation objects.\n\n(dict) --Describes a user in the user pool and the associated stack.\n\nStackName (string) -- [REQUIRED]The name of the stack that is associated with the user.\n\nUserName (string) -- [REQUIRED]The email address of the user who is associated with the stack.\n\nNote\nUsers\' email addresses are case-sensitive.\n\n\nAuthenticationType (string) -- [REQUIRED]The authentication type for the user.\n\nSendEmailNotification (boolean) --Specifies whether a welcome email is sent to a user after the user is created in the user pool.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax{
'errors': [
{
'UserStackAssociation': {
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
'ErrorCode': 'STACK_NOT_FOUND'|'USER_NAME_NOT_FOUND'|'INTERNAL_ERROR',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
errors (list) --The list of UserStackAssociationError objects.
(dict) --Describes the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
UserStackAssociation (dict) --Information about the user and associated stack.
StackName (string) --The name of the stack that is associated with the user.
UserName (string) --The email address of the user who is associated with the stack.
Note
Users\' email addresses are case-sensitive.
AuthenticationType (string) --The authentication type for the user.
SendEmailNotification (boolean) --Specifies whether a welcome email is sent to a user after the user is created in the user pool.
ErrorCode (string) --The error code for the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
ErrorMessage (string) --The error message for the error that is returned when a user can\xe2\x80\x99t be associated with or disassociated from a stack.
:return: {
'errors': [
{
'UserStackAssociation': {
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
'ErrorCode': 'STACK_NOT_FOUND'|'USER_NAME_NOT_FOUND'|'INTERNAL_ERROR',
'ErrorMessage': 'string'
},
]
}
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def copy_image(SourceImageName=None, DestinationImageName=None, DestinationRegion=None, DestinationImageDescription=None):
"""
Copies the image within the same region or to a new region within the same AWS account. Note that any tags you added to the image will not be copied.
See also: AWS API Documentation
Exceptions
:example: response = client.copy_image(
SourceImageName='string',
DestinationImageName='string',
DestinationRegion='string',
DestinationImageDescription='string'
)
:type SourceImageName: string
:param SourceImageName: [REQUIRED]\nThe name of the image to copy.\n
:type DestinationImageName: string
:param DestinationImageName: [REQUIRED]\nThe name that the image will have when it is copied to the destination.\n
:type DestinationRegion: string
:param DestinationRegion: [REQUIRED]\nThe destination region to which the image will be copied. This parameter is required, even if you are copying an image within the same region.\n
:type DestinationImageDescription: string
:param DestinationImageDescription: The description that the image will have when it is copied to the destination.
:rtype: dict
ReturnsResponse Syntax
{
'DestinationImageName': 'string'
}
Response Structure
(dict) --
DestinationImageName (string) --
The name of the destination image.
Exceptions
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.IncompatibleImageException
:return: {
'DestinationImageName': 'string'
}
:returns:
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.IncompatibleImageException
"""
pass
def create_directory_config(DirectoryName=None, OrganizationalUnitDistinguishedNames=None, ServiceAccountCredentials=None):
"""
Creates a Directory Config object in AppStream 2.0. This object includes the configuration information required to join fleets and image builders to Microsoft Active Directory domains.
See also: AWS API Documentation
Exceptions
:example: response = client.create_directory_config(
DirectoryName='string',
OrganizationalUnitDistinguishedNames=[
'string',
],
ServiceAccountCredentials={
'AccountName': 'string',
'AccountPassword': 'string'
}
)
:type DirectoryName: string
:param DirectoryName: [REQUIRED]\nThe fully qualified name of the directory (for example, corp.example.com).\n
:type OrganizationalUnitDistinguishedNames: list
:param OrganizationalUnitDistinguishedNames: [REQUIRED]\nThe distinguished names of the organizational units for computer accounts.\n\n(string) --\n\n
:type ServiceAccountCredentials: dict
:param ServiceAccountCredentials: [REQUIRED]\nThe credentials for the service account used by the fleet or image builder to connect to the directory.\n\nAccountName (string) -- [REQUIRED]The user name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified.\n\nAccountPassword (string) -- [REQUIRED]The password for the account.\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'DirectoryConfig': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
DirectoryConfig (dict) --
Information about the directory configuration.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedNames (list) --
The distinguished names of the organizational units for computer accounts.
(string) --
ServiceAccountCredentials (dict) --
The credentials for the service account used by the fleet or image builder to connect to the directory.
AccountName (string) --
The user name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified.
AccountPassword (string) --
The password for the account.
CreatedTime (datetime) --
The time the directory configuration was created.
Exceptions
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
:return: {
'DirectoryConfig': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
}
}
:returns:
(string) --
"""
pass
def create_fleet(Name=None, ImageName=None, ImageArn=None, InstanceType=None, FleetType=None, ComputeCapacity=None, VpcConfig=None, MaxUserDurationInSeconds=None, DisconnectTimeoutInSeconds=None, Description=None, DisplayName=None, EnableDefaultInternetAccess=None, DomainJoinInfo=None, Tags=None, IdleDisconnectTimeoutInSeconds=None, IamRoleArn=None):
"""
Creates a fleet. A fleet consists of streaming instances that run a specified image.
See also: AWS API Documentation
Exceptions
:example: response = client.create_fleet(
Name='string',
ImageName='string',
ImageArn='string',
InstanceType='string',
FleetType='ALWAYS_ON'|'ON_DEMAND',
ComputeCapacity={
'DesiredInstances': 123
},
VpcConfig={
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
MaxUserDurationInSeconds=123,
DisconnectTimeoutInSeconds=123,
Description='string',
DisplayName='string',
EnableDefaultInternetAccess=True|False,
DomainJoinInfo={
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
Tags={
'string': 'string'
},
IdleDisconnectTimeoutInSeconds=123,
IamRoleArn='string'
)
:type Name: string
:param Name: [REQUIRED]\nA unique name for the fleet.\n
:type ImageName: string
:param ImageName: The name of the image used to create the fleet.
:type ImageArn: string
:param ImageArn: The ARN of the public, private, or shared image to use.
:type InstanceType: string
:param InstanceType: [REQUIRED]\nThe instance type to use when launching fleet instances. The following instance types are available:\n\nstream.standard.medium\nstream.standard.large\nstream.compute.large\nstream.compute.xlarge\nstream.compute.2xlarge\nstream.compute.4xlarge\nstream.compute.8xlarge\nstream.memory.large\nstream.memory.xlarge\nstream.memory.2xlarge\nstream.memory.4xlarge\nstream.memory.8xlarge\nstream.graphics-design.large\nstream.graphics-design.xlarge\nstream.graphics-design.2xlarge\nstream.graphics-design.4xlarge\nstream.graphics-desktop.2xlarge\nstream.graphics-pro.4xlarge\nstream.graphics-pro.8xlarge\nstream.graphics-pro.16xlarge\n\n
:type FleetType: string
:param FleetType: The fleet type.\n\nALWAYS_ON\nProvides users with instant-on access to their apps. You are charged for all running instances in your fleet, even if no users are streaming apps.\n\nON_DEMAND\nProvide users with access to applications after they connect, which takes one to two minutes. You are charged for instance streaming when users are connected and a small hourly fee for instances that are not streaming apps.\n
:type ComputeCapacity: dict
:param ComputeCapacity: [REQUIRED]\nThe desired capacity for the fleet.\n\nDesiredInstances (integer) -- [REQUIRED]The desired number of streaming instances.\n\n\n
:type VpcConfig: dict
:param VpcConfig: The VPC configuration for the fleet.\n\nSubnetIds (list) --The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.\n\n(string) --\n\n\nSecurityGroupIds (list) --The identifiers of the security groups for the fleet or image builder.\n\n(string) --\n\n\n\n
:type MaxUserDurationInSeconds: integer
:param MaxUserDurationInSeconds: The maximum amount of time that a streaming session can remain active, in seconds. If users are still connected to a streaming instance five minutes before this limit is reached, they are prompted to save any open documents before being disconnected. After this time elapses, the instance is terminated and replaced by a new instance.\nSpecify a value between 600 and 360000.\n
:type DisconnectTimeoutInSeconds: integer
:param DisconnectTimeoutInSeconds: The amount of time that a streaming session remains active after users disconnect. If users try to reconnect to the streaming session after a disconnection or network interruption within this time interval, they are connected to their previous session. Otherwise, they are connected to a new session with a new streaming instance.\nSpecify a value between 60 and 360000.\n
:type Description: string
:param Description: The description to display.
:type DisplayName: string
:param DisplayName: The fleet name to display.
:type EnableDefaultInternetAccess: boolean
:param EnableDefaultInternetAccess: Enables or disables default internet access for the fleet.
:type DomainJoinInfo: dict
:param DomainJoinInfo: The name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain.\n\nDirectoryName (string) --The fully qualified name of the directory (for example, corp.example.com).\n\nOrganizationalUnitDistinguishedName (string) --The distinguished name of the organizational unit for computer accounts.\n\n\n
:type Tags: dict
:param Tags: The tags to associate with the fleet. A tag is a key-value pair, and the value is optional. For example, Environment=Test. If you do not specify a value, Environment=.\nIf you do not specify a value, the value is set to an empty string.\nGenerally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following special characters:\n_ . : / = + - @\nFor more information, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .\n\n(string) --\n(string) --\n\n\n\n
:type IdleDisconnectTimeoutInSeconds: integer
:param IdleDisconnectTimeoutInSeconds: The amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the DisconnectTimeoutInSeconds time interval begins. Users are notified before they are disconnected due to inactivity. If they try to reconnect to the streaming session before the time interval specified in DisconnectTimeoutInSeconds elapses, they are connected to their previous session. Users are considered idle when they stop providing keyboard or mouse input during their streaming session. File uploads and downloads, audio in, audio out, and pixels changing do not qualify as user activity. If users continue to be idle after the time interval in IdleDisconnectTimeoutInSeconds elapses, they are disconnected.\nTo prevent users from being disconnected due to inactivity, specify a value of 0. Otherwise, specify a value between 60 and 3600. The default value is 0.\n\nNote\nIf you enable this feature, we recommend that you specify a value that corresponds exactly to a whole number of minutes (for example, 60, 120, and 180). If you don\'t do this, the value is rounded to the nearest minute. For example, if you specify a value of 70, users are disconnected after 1 minute of inactivity. If you specify a value that is at the midpoint between two different minutes, the value is rounded up. For example, if you specify a value of 90, users are disconnected after 2 minutes of inactivity.\n\n
:type IamRoleArn: string
:param IamRoleArn: The Amazon Resource Name (ARN) of the IAM role to apply to the fleet. To assume a role, a fleet instance calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.\nFor more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .\n
:rtype: dict
ReturnsResponse Syntax
{
'Fleet': {
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
}
}
Response Structure
(dict) --
Fleet (dict) --
Information about the fleet.
Arn (string) --
The Amazon Resource Name (ARN) for the fleet.
Name (string) --
The name of the fleet.
DisplayName (string) --
The fleet name to display.
Description (string) --
The description to display.
ImageName (string) --
The name of the image used to create the fleet.
ImageArn (string) --
The ARN for the public, private, or shared image.
InstanceType (string) --
The instance type to use when launching fleet instances. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
FleetType (string) --
The fleet type.
ALWAYS_ON
Provides users with instant-on access to their apps. You are charged for all running instances in your fleet, even if no users are streaming apps.
ON_DEMAND
Provide users with access to applications after they connect, which takes one to two minutes. You are charged for instance streaming when users are connected and a small hourly fee for instances that are not streaming apps.
ComputeCapacityStatus (dict) --
The capacity status for the fleet.
Desired (integer) --
The desired number of streaming instances.
Running (integer) --
The total number of simultaneous streaming instances that are running.
InUse (integer) --
The number of instances in use for streaming.
Available (integer) --
The number of currently available instances that can be used to stream sessions.
MaxUserDurationInSeconds (integer) --
The maximum amount of time that a streaming session can remain active, in seconds. If users are still connected to a streaming instance five minutes before this limit is reached, they are prompted to save any open documents before being disconnected. After this time elapses, the instance is terminated and replaced by a new instance.
Specify a value between 600 and 360000.
DisconnectTimeoutInSeconds (integer) --
The amount of time that a streaming session remains active after users disconnect. If they try to reconnect to the streaming session after a disconnection or network interruption within this time interval, they are connected to their previous session. Otherwise, they are connected to a new session with a new streaming instance.
Specify a value between 60 and 360000.
State (string) --
The current state for the fleet.
VpcConfig (dict) --
The VPC configuration for the fleet.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
CreatedTime (datetime) --
The time the fleet was created.
FleetErrors (list) --
The fleet errors.
(dict) --
Describes a fleet error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
EnableDefaultInternetAccess (boolean) --
Indicates whether default internet access is enabled for the fleet.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
IdleDisconnectTimeoutInSeconds (integer) --
The amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the DisconnectTimeoutInSeconds time interval begins. Users are notified before they are disconnected due to inactivity. If users try to reconnect to the streaming session before the time interval specified in DisconnectTimeoutInSeconds elapses, they are connected to their previous session. Users are considered idle when they stop providing keyboard or mouse input during their streaming session. File uploads and downloads, audio in, audio out, and pixels changing do not qualify as user activity. If users continue to be idle after the time interval in IdleDisconnectTimeoutInSeconds elapses, they are disconnected.
To prevent users from being disconnected due to inactivity, specify a value of 0. Otherwise, specify a value between 60 and 3600. The default value is 0.
Note
If you enable this feature, we recommend that you specify a value that corresponds exactly to a whole number of minutes (for example, 60, 120, and 180). If you don\'t do this, the value is rounded to the nearest minute. For example, if you specify a value of 70, users are disconnected after 1 minute of inactivity. If you specify a value that is at the midpoint between two different minutes, the value is rounded up. For example, if you specify a value of 90, users are disconnected after 2 minutes of inactivity.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the fleet. To assume a role, the fleet instance calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
Exceptions
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.IncompatibleImageException
AppStream.Client.exceptions.OperationNotPermittedException
:return: {
'Fleet': {
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
}
}
:returns:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
"""
pass
def create_image_builder(Name=None, ImageName=None, ImageArn=None, InstanceType=None, Description=None, DisplayName=None, VpcConfig=None, IamRoleArn=None, EnableDefaultInternetAccess=None, DomainJoinInfo=None, AppstreamAgentVersion=None, Tags=None, AccessEndpoints=None):
"""
Creates an image builder. An image builder is a virtual machine that is used to create an image.
The initial state of the builder is PENDING . When it is ready, the state is RUNNING .
See also: AWS API Documentation
Exceptions
:example: response = client.create_image_builder(
Name='string',
ImageName='string',
ImageArn='string',
InstanceType='string',
Description='string',
DisplayName='string',
VpcConfig={
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
IamRoleArn='string',
EnableDefaultInternetAccess=True|False,
DomainJoinInfo={
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
AppstreamAgentVersion='string',
Tags={
'string': 'string'
},
AccessEndpoints=[
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
)
:type Name: string
:param Name: [REQUIRED]\nA unique name for the image builder.\n
:type ImageName: string
:param ImageName: The name of the image used to create the image builder.
:type ImageArn: string
:param ImageArn: The ARN of the public, private, or shared image to use.
:type InstanceType: string
:param InstanceType: [REQUIRED]\nThe instance type to use when launching the image builder. The following instance types are available:\n\nstream.standard.medium\nstream.standard.large\nstream.compute.large\nstream.compute.xlarge\nstream.compute.2xlarge\nstream.compute.4xlarge\nstream.compute.8xlarge\nstream.memory.large\nstream.memory.xlarge\nstream.memory.2xlarge\nstream.memory.4xlarge\nstream.memory.8xlarge\nstream.graphics-design.large\nstream.graphics-design.xlarge\nstream.graphics-design.2xlarge\nstream.graphics-design.4xlarge\nstream.graphics-desktop.2xlarge\nstream.graphics-pro.4xlarge\nstream.graphics-pro.8xlarge\nstream.graphics-pro.16xlarge\n\n
:type Description: string
:param Description: The description to display.
:type DisplayName: string
:param DisplayName: The image builder name to display.
:type VpcConfig: dict
:param VpcConfig: The VPC configuration for the image builder. You can specify only one subnet.\n\nSubnetIds (list) --The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.\n\n(string) --\n\n\nSecurityGroupIds (list) --The identifiers of the security groups for the fleet or image builder.\n\n(string) --\n\n\n\n
:type IamRoleArn: string
:param IamRoleArn: The Amazon Resource Name (ARN) of the IAM role to apply to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.\nFor more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .\n
:type EnableDefaultInternetAccess: boolean
:param EnableDefaultInternetAccess: Enables or disables default internet access for the image builder.
:type DomainJoinInfo: dict
:param DomainJoinInfo: The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.\n\nDirectoryName (string) --The fully qualified name of the directory (for example, corp.example.com).\n\nOrganizationalUnitDistinguishedName (string) --The distinguished name of the organizational unit for computer accounts.\n\n\n
:type AppstreamAgentVersion: string
:param AppstreamAgentVersion: The version of the AppStream 2.0 agent to use for this image builder. To use the latest version of the AppStream 2.0 agent, specify [LATEST].
:type Tags: dict
:param Tags: The tags to associate with the image builder. A tag is a key-value pair, and the value is optional. For example, Environment=Test. If you do not specify a value, Environment=.\nGenerally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following special characters:\n_ . : / = + - @\nIf you do not specify a value, the value is set to an empty string.\nFor more information about tags, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .\n\n(string) --\n(string) --\n\n\n\n
:type AccessEndpoints: list
:param AccessEndpoints: The list of interface VPC endpoint (interface endpoint) objects. Administrators can connect to the image builder only through the specified endpoints.\n\n(dict) --Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.\n\nEndpointType (string) -- [REQUIRED]The type of interface endpoint.\n\nVpceId (string) --The identifier (ID) of the VPC in which the interface endpoint is used.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
Response Structure
(dict) --
ImageBuilder (dict) --
Information about the image builder.
Name (string) --
The name of the image builder.
Arn (string) --
The ARN for the image builder.
ImageArn (string) --
The ARN of the image from which this builder was created.
Description (string) --
The description to display.
DisplayName (string) --
The image builder name to display.
VpcConfig (dict) --
The VPC configuration of the image builder.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
InstanceType (string) --
The instance type for the image builder. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
Platform (string) --
The operating system platform of the image builder.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
State (string) --
The state of the image builder.
StateChangeReason (dict) --
The reason why the last state change occurred.
Code (string) --
The state change reason code.
Message (string) --
The state change reason message.
CreatedTime (datetime) --
The time stamp when the image builder was created.
EnableDefaultInternetAccess (boolean) --
Enables or disables default internet access for the image builder.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
NetworkAccessConfiguration (dict) --
Describes the network details of the fleet or image builder instance.
EniPrivateIpAddress (string) --
The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --
The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
ImageBuilderErrors (list) --
The image builder errors.
(dict) --
Describes a resource error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
ErrorTimestamp (datetime) --
The time the error occurred.
AppstreamAgentVersion (string) --
The version of the AppStream 2.0 agent that is currently being used by the image builder.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Administrators can connect to the image builder only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
Exceptions
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.IncompatibleImageException
AppStream.Client.exceptions.OperationNotPermittedException
:return: {
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
:returns:
(string) --
"""
pass
def create_image_builder_streaming_url(Name=None, Validity=None):
"""
Creates a URL to start an image builder streaming session.
See also: AWS API Documentation
Exceptions
:example: response = client.create_image_builder_streaming_url(
Name='string',
Validity=123
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the image builder.\n
:type Validity: integer
:param Validity: The time that the streaming URL will be valid, in seconds. Specify a value between 1 and 604800 seconds. The default is 3600 seconds.
:rtype: dict
ReturnsResponse Syntax
{
'StreamingURL': 'string',
'Expires': datetime(2015, 1, 1)
}
Response Structure
(dict) --
StreamingURL (string) --
The URL to start the AppStream 2.0 streaming session.
Expires (datetime) --
The elapsed time, in seconds after the Unix epoch, when this URL expires.
Exceptions
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'StreamingURL': 'string',
'Expires': datetime(2015, 1, 1)
}
:returns:
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ResourceNotFoundException
"""
pass
def create_stack(Name=None, Description=None, DisplayName=None, StorageConnectors=None, RedirectURL=None, FeedbackURL=None, UserSettings=None, ApplicationSettings=None, Tags=None, AccessEndpoints=None, EmbedHostDomains=None):
"""
Creates a stack to start streaming applications to users. A stack consists of an associated fleet, user access policies, and storage configurations.
See also: AWS API Documentation
Exceptions
:example: response = client.create_stack(
Name='string',
Description='string',
DisplayName='string',
StorageConnectors=[
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
RedirectURL='string',
FeedbackURL='string',
UserSettings=[
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
ApplicationSettings={
'Enabled': True|False,
'SettingsGroup': 'string'
},
Tags={
'string': 'string'
},
AccessEndpoints=[
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
EmbedHostDomains=[
'string',
]
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the stack.\n
:type Description: string
:param Description: The description to display.
:type DisplayName: string
:param DisplayName: The stack name to display.
:type StorageConnectors: list
:param StorageConnectors: The storage connectors to enable.\n\n(dict) --Describes a connector that enables persistent storage for users.\n\nConnectorType (string) -- [REQUIRED]The type of storage connector.\n\nResourceIdentifier (string) --The ARN of the storage connector.\n\nDomains (list) --The names of the domains for the account.\n\n(string) -- GSuite domain for GDrive integration.\n\n\n\n\n\n
:type RedirectURL: string
:param RedirectURL: The URL that users are redirected to after their streaming session ends.
:type FeedbackURL: string
:param FeedbackURL: The URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed.
:type UserSettings: list
:param UserSettings: The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are enabled.\n\n(dict) --Describes an action and whether the action is enabled or disabled for users during their streaming sessions.\n\nAction (string) -- [REQUIRED]The action that is enabled or disabled.\n\nPermission (string) -- [REQUIRED]Indicates whether the action is enabled or disabled.\n\n\n\n\n
:type ApplicationSettings: dict
:param ApplicationSettings: The persistent application settings for users of a stack. When these settings are enabled, changes that users make to applications and Windows settings are automatically saved after each session and applied to the next session.\n\nEnabled (boolean) -- [REQUIRED]Enables or disables persistent application settings for users during their streaming sessions.\n\nSettingsGroup (string) --The path prefix for the S3 bucket where users\xe2\x80\x99 persistent application settings are stored. You can allow the same persistent application settings to be used across multiple stacks by specifying the same settings group for each stack.\n\n\n
:type Tags: dict
:param Tags: The tags to associate with the stack. A tag is a key-value pair, and the value is optional. For example, Environment=Test. If you do not specify a value, Environment=.\nIf you do not specify a value, the value is set to an empty string.\nGenerally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following special characters:\n_ . : / = + - @\nFor more information about tags, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .\n\n(string) --\n(string) --\n\n\n\n
:type AccessEndpoints: list
:param AccessEndpoints: The list of interface VPC endpoint (interface endpoint) objects. Users of the stack can connect to AppStream 2.0 only through the specified endpoints.\n\n(dict) --Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.\n\nEndpointType (string) -- [REQUIRED]The type of interface endpoint.\n\nVpceId (string) --The identifier (ID) of the VPC in which the interface endpoint is used.\n\n\n\n\n
:type EmbedHostDomains: list
:param EmbedHostDomains: The domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions.\n\n(string) -- Specifies a valid domain that can embed AppStream. Valid examples include: ['testorigin.tt--com', 'testingorigin.com.us', 'test.com.us'] Invalid examples include: ['test,com', '.com', 'h*llo.com'. '']\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Stack': {
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
}
}
Response Structure
(dict) --
Stack (dict) --
Information about the stack.
Arn (string) --
The ARN of the stack.
Name (string) --
The name of the stack.
Description (string) --
The description to display.
DisplayName (string) --
The stack name to display.
CreatedTime (datetime) --
The time the stack was created.
StorageConnectors (list) --
The storage connectors to enable.
(dict) --
Describes a connector that enables persistent storage for users.
ConnectorType (string) --
The type of storage connector.
ResourceIdentifier (string) --
The ARN of the storage connector.
Domains (list) --
The names of the domains for the account.
(string) -- GSuite domain for GDrive integration.
RedirectURL (string) --
The URL that users are redirected to after their streaming session ends.
FeedbackURL (string) --
The URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed.
StackErrors (list) --
The errors for the stack.
(dict) --
Describes a stack error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
UserSettings (list) --
The actions that are enabled or disabled for users during their streaming sessions. By default these actions are enabled.
(dict) --
Describes an action and whether the action is enabled or disabled for users during their streaming sessions.
Action (string) --
The action that is enabled or disabled.
Permission (string) --
Indicates whether the action is enabled or disabled.
ApplicationSettings (dict) --
The persistent application settings for users of the stack.
Enabled (boolean) --
Specifies whether persistent application settings are enabled for users during their streaming sessions.
SettingsGroup (string) --
The path prefix for the S3 bucket where users\xe2\x80\x99 persistent application settings are stored.
S3BucketName (string) --
The S3 bucket where users\xe2\x80\x99 persistent application settings are stored. When persistent application settings are enabled for the first time for an account in an AWS Region, an S3 bucket is created. The bucket is unique to the AWS account and the Region.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream 2.0 only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
EmbedHostDomains (list) --
The domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions.
(string) -- Specifies a valid domain that can embed AppStream. Valid examples include: ["testorigin.tt--com", "testingorigin.com.us", "test.com.us"] Invalid examples include: ["test,com", ".com", "h*llo.com". ""]
Exceptions
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidParameterCombinationException
:return: {
'Stack': {
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
}
}
:returns:
(string) -- GSuite domain for GDrive integration.
"""
pass
def create_streaming_url(StackName=None, FleetName=None, UserId=None, ApplicationId=None, Validity=None, SessionContext=None):
"""
Creates a temporary URL to start an AppStream 2.0 streaming session for the specified user. A streaming URL enables application streaming to be tested without user setup.
See also: AWS API Documentation
Exceptions
:example: response = client.create_streaming_url(
StackName='string',
FleetName='string',
UserId='string',
ApplicationId='string',
Validity=123,
SessionContext='string'
)
:type StackName: string
:param StackName: [REQUIRED]\nThe name of the stack.\n
:type FleetName: string
:param FleetName: [REQUIRED]\nThe name of the fleet.\n
:type UserId: string
:param UserId: [REQUIRED]\nThe identifier of the user.\n
:type ApplicationId: string
:param ApplicationId: The name of the application to launch after the session starts. This is the name that you specified as Name in the Image Assistant.
:type Validity: integer
:param Validity: The time that the streaming URL will be valid, in seconds. Specify a value between 1 and 604800 seconds. The default is 60 seconds.
:type SessionContext: string
:param SessionContext: The session context. For more information, see Session Context in the Amazon AppStream 2.0 Administration Guide .
:rtype: dict
ReturnsResponse Syntax
{
'StreamingURL': 'string',
'Expires': datetime(2015, 1, 1)
}
Response Structure
(dict) --
StreamingURL (string) --
The URL to start the AppStream 2.0 streaming session.
Expires (datetime) --
The elapsed time, in seconds after the Unix epoch, when this URL expires.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.InvalidParameterCombinationException
:return: {
'StreamingURL': 'string',
'Expires': datetime(2015, 1, 1)
}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.InvalidParameterCombinationException
"""
pass
def create_usage_report_subscription():
"""
Creates a usage report subscription. reports are generated daily.
See also: AWS API Documentation
Exceptions
:example: response = client.create_usage_report_subscription()
:rtype: dict
ReturnsResponse Syntax{
'S3BucketName': 'string',
'Schedule': 'DAILY'
}
Response Structure
(dict) --
S3BucketName (string) --The Amazon S3 bucket where generated reports are stored.
If you enabled on-instance session scripts and Amazon S3 logging for your session script configuration, AppStream 2.0 created an S3 bucket to store the script output. The bucket is unique to your account and Region. When you enable usage reporting in this case, AppStream 2.0 uses the same bucket to store your usage reports. If you haven\'t already enabled on-instance session scripts, when you enable usage reports, AppStream 2.0 creates a new S3 bucket.
Schedule (string) --The schedule for generating usage reports.
Exceptions
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.LimitExceededException
:return: {
'S3BucketName': 'string',
'Schedule': 'DAILY'
}
"""
pass
def create_user(UserName=None, MessageAction=None, FirstName=None, LastName=None, AuthenticationType=None):
"""
Creates a new user in the user pool.
See also: AWS API Documentation
Exceptions
:example: response = client.create_user(
UserName='string',
MessageAction='SUPPRESS'|'RESEND',
FirstName='string',
LastName='string',
AuthenticationType='API'|'SAML'|'USERPOOL'
)
:type UserName: string
:param UserName: [REQUIRED]\nThe email address of the user.\n\nNote\nUsers\' email addresses are case-sensitive. During login, if they specify an email address that doesn\'t use the same capitalization as the email address specified when their user pool account was created, a 'user does not exist' error message displays.\n\n
:type MessageAction: string
:param MessageAction: The action to take for the welcome email that is sent to a user after the user is created in the user pool. If you specify SUPPRESS, no email is sent. If you specify RESEND, do not specify the first name or last name of the user. If the value is null, the email is sent.\n\nNote\nThe temporary password in the welcome email is valid for only 7 days. If users don\xe2\x80\x99t set their passwords within 7 days, you must send them a new welcome email.\n\n
:type FirstName: string
:param FirstName: The first name, or given name, of the user.
:type LastName: string
:param LastName: The last name, or surname, of the user.
:type AuthenticationType: string
:param AuthenticationType: [REQUIRED]\nThe authentication type for the user. You must specify USERPOOL.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceAlreadyExistsException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.OperationNotPermittedException
:return: {}
:returns:
(dict) --
"""
pass
def delete_directory_config(DirectoryName=None):
"""
Deletes the specified Directory Config object from AppStream 2.0. This object includes the information required to join streaming instances to an Active Directory domain.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_directory_config(
DirectoryName='string'
)
:type DirectoryName: string
:param DirectoryName: [REQUIRED]\nThe name of the directory configuration.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
"""
pass
def delete_fleet(Name=None):
"""
Deletes the specified fleet.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_fleet(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the fleet.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {}
:returns:
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
"""
pass
def delete_image(Name=None):
"""
Deletes the specified image. You cannot delete an image when it is in use. After you delete an image, you cannot provision new capacity using the image.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the image.\n
:rtype: dict
ReturnsResponse Syntax{
'Image': {
'Name': 'string',
'Arn': 'string',
'BaseImageArn': 'string',
'DisplayName': 'string',
'State': 'PENDING'|'AVAILABLE'|'FAILED'|'COPYING'|'DELETING',
'Visibility': 'PUBLIC'|'PRIVATE'|'SHARED',
'ImageBuilderSupported': True|False,
'ImageBuilderName': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'Description': 'string',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_BUILDER_NOT_AVAILABLE'|'IMAGE_COPY_FAILURE',
'Message': 'string'
},
'Applications': [
{
'Name': 'string',
'DisplayName': 'string',
'IconURL': 'string',
'LaunchPath': 'string',
'LaunchParameters': 'string',
'Enabled': True|False,
'Metadata': {
'string': 'string'
}
},
],
'CreatedTime': datetime(2015, 1, 1),
'PublicBaseImageReleasedDate': datetime(2015, 1, 1),
'AppstreamAgentVersion': 'string',
'ImagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
}
}
Response Structure
(dict) --
Image (dict) --Information about the image.
Name (string) --The name of the image.
Arn (string) --The ARN of the image.
BaseImageArn (string) --The ARN of the image from which this image was created.
DisplayName (string) --The image name to display.
State (string) --The image starts in the PENDING state. If image creation succeeds, the state is AVAILABLE . If image creation fails, the state is FAILED .
Visibility (string) --Indicates whether the image is public or private.
ImageBuilderSupported (boolean) --Indicates whether an image builder can be launched from this image.
ImageBuilderName (string) --The name of the image builder that was used to create the private image. If the image is shared, this value is null.
Platform (string) --The operating system platform of the image.
Description (string) --The description to display.
StateChangeReason (dict) --The reason why the last state change occurred.
Code (string) --The state change reason code.
Message (string) --The state change reason message.
Applications (list) --The applications associated with the image.
(dict) --Describes an application in the application catalog.
Name (string) --The name of the application.
DisplayName (string) --The application name to display.
IconURL (string) --The URL for the application icon. This URL might be time-limited.
LaunchPath (string) --The path to the application executable in the instance.
LaunchParameters (string) --The arguments that are passed to the application at launch.
Enabled (boolean) --If there is a problem, the application can be disabled after image creation.
Metadata (dict) --Additional attributes that describe the application.
(string) --
(string) --
CreatedTime (datetime) --The time the image was created.
PublicBaseImageReleasedDate (datetime) --The release date of the public base image. For private images, this date is the release date of the base image from which the image was created.
AppstreamAgentVersion (string) --The version of the AppStream 2.0 agent to use for instances that are launched from this image.
ImagePermissions (dict) --The permissions to provide to the destination AWS account for the specified image.
allowFleet (boolean) --Indicates whether the image can be used for a fleet.
allowImageBuilder (boolean) --Indicates whether the image can be used for an image builder.
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {
'Image': {
'Name': 'string',
'Arn': 'string',
'BaseImageArn': 'string',
'DisplayName': 'string',
'State': 'PENDING'|'AVAILABLE'|'FAILED'|'COPYING'|'DELETING',
'Visibility': 'PUBLIC'|'PRIVATE'|'SHARED',
'ImageBuilderSupported': True|False,
'ImageBuilderName': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'Description': 'string',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_BUILDER_NOT_AVAILABLE'|'IMAGE_COPY_FAILURE',
'Message': 'string'
},
'Applications': [
{
'Name': 'string',
'DisplayName': 'string',
'IconURL': 'string',
'LaunchPath': 'string',
'LaunchParameters': 'string',
'Enabled': True|False,
'Metadata': {
'string': 'string'
}
},
],
'CreatedTime': datetime(2015, 1, 1),
'PublicBaseImageReleasedDate': datetime(2015, 1, 1),
'AppstreamAgentVersion': 'string',
'ImagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
}
}
:returns:
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ConcurrentModificationException
"""
pass
def delete_image_builder(Name=None):
"""
Deletes the specified image builder and releases the capacity.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image_builder(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the image builder.\n
:rtype: dict
ReturnsResponse Syntax{
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
Response Structure
(dict) --
ImageBuilder (dict) --Information about the image builder.
Name (string) --The name of the image builder.
Arn (string) --The ARN for the image builder.
ImageArn (string) --The ARN of the image from which this builder was created.
Description (string) --The description to display.
DisplayName (string) --The image builder name to display.
VpcConfig (dict) --The VPC configuration of the image builder.
SubnetIds (list) --The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --The identifiers of the security groups for the fleet or image builder.
(string) --
InstanceType (string) --The instance type for the image builder. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
Platform (string) --The operating system platform of the image builder.
IamRoleArn (string) --The ARN of the IAM role that is applied to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
State (string) --The state of the image builder.
StateChangeReason (dict) --The reason why the last state change occurred.
Code (string) --The state change reason code.
Message (string) --The state change reason message.
CreatedTime (datetime) --The time stamp when the image builder was created.
EnableDefaultInternetAccess (boolean) --Enables or disables default internet access for the image builder.
DomainJoinInfo (dict) --The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.
DirectoryName (string) --The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --The distinguished name of the organizational unit for computer accounts.
NetworkAccessConfiguration (dict) --Describes the network details of the fleet or image builder instance.
EniPrivateIpAddress (string) --The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
ImageBuilderErrors (list) --The image builder errors.
(dict) --Describes a resource error.
ErrorCode (string) --The error code.
ErrorMessage (string) --The error message.
ErrorTimestamp (datetime) --The time the error occurred.
AppstreamAgentVersion (string) --The version of the AppStream 2.0 agent that is currently being used by the image builder.
AccessEndpoints (list) --The list of virtual private cloud (VPC) interface endpoint objects. Administrators can connect to the image builder only through the specified endpoints.
(dict) --Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --The type of interface endpoint.
VpceId (string) --The identifier (ID) of the VPC in which the interface endpoint is used.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
:returns:
(string) --
"""
pass
def delete_image_permissions(Name=None, SharedAccountId=None):
"""
Deletes permissions for the specified private image. After you delete permissions for an image, AWS accounts to which you previously granted these permissions can no longer use the image.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image_permissions(
Name='string',
SharedAccountId='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the private image.\n
:type SharedAccountId: string
:param SharedAccountId: [REQUIRED]\nThe 12-digit identifier of the AWS account for which to delete image permissions.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def delete_stack(Name=None):
"""
Deletes the specified stack. After the stack is deleted, the application streaming environment provided by the stack is no longer available to users. Also, any reservations made for application streaming sessions for the stack are released.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_stack(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the stack.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {}
:returns:
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
"""
pass
def delete_usage_report_subscription():
"""
Disables usage report generation.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_usage_report_subscription()
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceNotFoundException
"""
pass
def delete_user(UserName=None, AuthenticationType=None):
"""
Deletes a user from the user pool.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_user(
UserName='string',
AuthenticationType='API'|'SAML'|'USERPOOL'
)
:type UserName: string
:param UserName: [REQUIRED]\nThe email address of the user.\n\nNote\nUsers\' email addresses are case-sensitive.\n\n
:type AuthenticationType: string
:param AuthenticationType: [REQUIRED]\nThe authentication type for the user. You must specify USERPOOL.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def describe_directory_configs(DirectoryNames=None, MaxResults=None, NextToken=None):
"""
Retrieves a list that describes one or more specified Directory Config objects for AppStream 2.0, if the names for these objects are provided. Otherwise, all Directory Config objects in the account are described. These objects include the configuration information required to join fleets and image builders to Microsoft Active Directory domains.
Although the response syntax in this topic includes the account password, this password is not returned in the actual response.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_directory_configs(
DirectoryNames=[
'string',
],
MaxResults=123,
NextToken='string'
)
:type DirectoryNames: list
:param DirectoryNames: The directory names.\n\n(string) --\n\n
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'DirectoryConfigs': [
{
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
DirectoryConfigs (list) --
Information about the directory configurations. Note that although the response syntax in this topic includes the account password, this password is not returned in the actual response.
(dict) --
Describes the configuration information required to join fleets and image builders to Microsoft Active Directory domains.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedNames (list) --
The distinguished names of the organizational units for computer accounts.
(string) --
ServiceAccountCredentials (dict) --
The credentials for the service account used by the fleet or image builder to connect to the directory.
AccountName (string) --
The user name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified.
AccountPassword (string) --
The password for the account.
CreatedTime (datetime) --
The time the directory configuration was created.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'DirectoryConfigs': [
{
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def describe_fleets(Names=None, NextToken=None):
"""
Retrieves a list that describes one or more specified fleets, if the fleet names are provided. Otherwise, all fleets in the account are described.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_fleets(
Names=[
'string',
],
NextToken='string'
)
:type Names: list
:param Names: The names of the fleets to describe.\n\n(string) --\n\n
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Fleets': [
{
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Fleets (list) --
Information about the fleets.
(dict) --
Describes a fleet.
Arn (string) --
The Amazon Resource Name (ARN) for the fleet.
Name (string) --
The name of the fleet.
DisplayName (string) --
The fleet name to display.
Description (string) --
The description to display.
ImageName (string) --
The name of the image used to create the fleet.
ImageArn (string) --
The ARN for the public, private, or shared image.
InstanceType (string) --
The instance type to use when launching fleet instances. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
FleetType (string) --
The fleet type.
ALWAYS_ON
Provides users with instant-on access to their apps. You are charged for all running instances in your fleet, even if no users are streaming apps.
ON_DEMAND
Provide users with access to applications after they connect, which takes one to two minutes. You are charged for instance streaming when users are connected and a small hourly fee for instances that are not streaming apps.
ComputeCapacityStatus (dict) --
The capacity status for the fleet.
Desired (integer) --
The desired number of streaming instances.
Running (integer) --
The total number of simultaneous streaming instances that are running.
InUse (integer) --
The number of instances in use for streaming.
Available (integer) --
The number of currently available instances that can be used to stream sessions.
MaxUserDurationInSeconds (integer) --
The maximum amount of time that a streaming session can remain active, in seconds. If users are still connected to a streaming instance five minutes before this limit is reached, they are prompted to save any open documents before being disconnected. After this time elapses, the instance is terminated and replaced by a new instance.
Specify a value between 600 and 360000.
DisconnectTimeoutInSeconds (integer) --
The amount of time that a streaming session remains active after users disconnect. If they try to reconnect to the streaming session after a disconnection or network interruption within this time interval, they are connected to their previous session. Otherwise, they are connected to a new session with a new streaming instance.
Specify a value between 60 and 360000.
State (string) --
The current state for the fleet.
VpcConfig (dict) --
The VPC configuration for the fleet.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
CreatedTime (datetime) --
The time the fleet was created.
FleetErrors (list) --
The fleet errors.
(dict) --
Describes a fleet error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
EnableDefaultInternetAccess (boolean) --
Indicates whether default internet access is enabled for the fleet.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
IdleDisconnectTimeoutInSeconds (integer) --
The amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the DisconnectTimeoutInSeconds time interval begins. Users are notified before they are disconnected due to inactivity. If users try to reconnect to the streaming session before the time interval specified in DisconnectTimeoutInSeconds elapses, they are connected to their previous session. Users are considered idle when they stop providing keyboard or mouse input during their streaming session. File uploads and downloads, audio in, audio out, and pixels changing do not qualify as user activity. If users continue to be idle after the time interval in IdleDisconnectTimeoutInSeconds elapses, they are disconnected.
To prevent users from being disconnected due to inactivity, specify a value of 0. Otherwise, specify a value between 60 and 3600. The default value is 0.
Note
If you enable this feature, we recommend that you specify a value that corresponds exactly to a whole number of minutes (for example, 60, 120, and 180). If you don\'t do this, the value is rounded to the nearest minute. For example, if you specify a value of 70, users are disconnected after 1 minute of inactivity. If you specify a value that is at the midpoint between two different minutes, the value is rounded up. For example, if you specify a value of 90, users are disconnected after 2 minutes of inactivity.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the fleet. To assume a role, the fleet instance calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'Fleets': [
{
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
},
],
'NextToken': 'string'
}
:returns:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
"""
pass
def describe_image_builders(Names=None, MaxResults=None, NextToken=None):
"""
Retrieves a list that describes one or more specified image builders, if the image builder names are provided. Otherwise, all image builders in the account are described.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_image_builders(
Names=[
'string',
],
MaxResults=123,
NextToken='string'
)
:type Names: list
:param Names: The names of the image builders to describe.\n\n(string) --\n\n
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'ImageBuilders': [
{
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
ImageBuilders (list) --
Information about the image builders.
(dict) --
Describes a virtual machine that is used to create an image.
Name (string) --
The name of the image builder.
Arn (string) --
The ARN for the image builder.
ImageArn (string) --
The ARN of the image from which this builder was created.
Description (string) --
The description to display.
DisplayName (string) --
The image builder name to display.
VpcConfig (dict) --
The VPC configuration of the image builder.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
InstanceType (string) --
The instance type for the image builder. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
Platform (string) --
The operating system platform of the image builder.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
State (string) --
The state of the image builder.
StateChangeReason (dict) --
The reason why the last state change occurred.
Code (string) --
The state change reason code.
Message (string) --
The state change reason message.
CreatedTime (datetime) --
The time stamp when the image builder was created.
EnableDefaultInternetAccess (boolean) --
Enables or disables default internet access for the image builder.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
NetworkAccessConfiguration (dict) --
Describes the network details of the fleet or image builder instance.
EniPrivateIpAddress (string) --
The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --
The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
ImageBuilderErrors (list) --
The image builder errors.
(dict) --
Describes a resource error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
ErrorTimestamp (datetime) --
The time the error occurred.
AppstreamAgentVersion (string) --
The version of the AppStream 2.0 agent that is currently being used by the image builder.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Administrators can connect to the image builder only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'ImageBuilders': [
{
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
},
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def describe_image_permissions(Name=None, MaxResults=None, SharedAwsAccountIds=None, NextToken=None):
"""
Retrieves a list that describes the permissions for shared AWS account IDs on a private image that you own.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_image_permissions(
Name='string',
MaxResults=123,
SharedAwsAccountIds=[
'string',
],
NextToken='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the private image for which to describe permissions. The image must be one that you own.\n
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type SharedAwsAccountIds: list
:param SharedAwsAccountIds: The 12-digit identifier of one or more AWS accounts with which the image is shared.\n\n(string) --\n\n
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Name': 'string',
'SharedImagePermissionsList': [
{
'sharedAccountId': 'string',
'imagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Name (string) --
The name of the private image.
SharedImagePermissionsList (list) --
The permissions for a private image that you own.
(dict) --
Describes the permissions that are available to the specified AWS account for a shared image.
sharedAccountId (string) --
The 12-digit identifier of the AWS account with which the image is shared.
imagePermissions (dict) --
Describes the permissions for a shared image.
allowFleet (boolean) --
Indicates whether the image can be used for a fleet.
allowImageBuilder (boolean) --
Indicates whether the image can be used for an image builder.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'Name': 'string',
'SharedImagePermissionsList': [
{
'sharedAccountId': 'string',
'imagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
},
],
'NextToken': 'string'
}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
"""
pass
def describe_images(Names=None, Arns=None, Type=None, NextToken=None, MaxResults=None):
"""
Retrieves a list that describes one or more specified images, if the image names or image ARNs are provided. Otherwise, all images in the account are described.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_images(
Names=[
'string',
],
Arns=[
'string',
],
Type='PUBLIC'|'PRIVATE'|'SHARED',
NextToken='string',
MaxResults=123
)
:type Names: list
:param Names: The names of the public or private images to describe.\n\n(string) --\n\n
:type Arns: list
:param Arns: The ARNs of the public, private, and shared images to describe.\n\n(string) --\n\n
:type Type: string
:param Type: The type of image (public, private, or shared) to describe.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:rtype: dict
ReturnsResponse Syntax
{
'Images': [
{
'Name': 'string',
'Arn': 'string',
'BaseImageArn': 'string',
'DisplayName': 'string',
'State': 'PENDING'|'AVAILABLE'|'FAILED'|'COPYING'|'DELETING',
'Visibility': 'PUBLIC'|'PRIVATE'|'SHARED',
'ImageBuilderSupported': True|False,
'ImageBuilderName': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'Description': 'string',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_BUILDER_NOT_AVAILABLE'|'IMAGE_COPY_FAILURE',
'Message': 'string'
},
'Applications': [
{
'Name': 'string',
'DisplayName': 'string',
'IconURL': 'string',
'LaunchPath': 'string',
'LaunchParameters': 'string',
'Enabled': True|False,
'Metadata': {
'string': 'string'
}
},
],
'CreatedTime': datetime(2015, 1, 1),
'PublicBaseImageReleasedDate': datetime(2015, 1, 1),
'AppstreamAgentVersion': 'string',
'ImagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Images (list) --
Information about the images.
(dict) --
Describes an image.
Name (string) --
The name of the image.
Arn (string) --
The ARN of the image.
BaseImageArn (string) --
The ARN of the image from which this image was created.
DisplayName (string) --
The image name to display.
State (string) --
The image starts in the PENDING state. If image creation succeeds, the state is AVAILABLE . If image creation fails, the state is FAILED .
Visibility (string) --
Indicates whether the image is public or private.
ImageBuilderSupported (boolean) --
Indicates whether an image builder can be launched from this image.
ImageBuilderName (string) --
The name of the image builder that was used to create the private image. If the image is shared, this value is null.
Platform (string) --
The operating system platform of the image.
Description (string) --
The description to display.
StateChangeReason (dict) --
The reason why the last state change occurred.
Code (string) --
The state change reason code.
Message (string) --
The state change reason message.
Applications (list) --
The applications associated with the image.
(dict) --
Describes an application in the application catalog.
Name (string) --
The name of the application.
DisplayName (string) --
The application name to display.
IconURL (string) --
The URL for the application icon. This URL might be time-limited.
LaunchPath (string) --
The path to the application executable in the instance.
LaunchParameters (string) --
The arguments that are passed to the application at launch.
Enabled (boolean) --
If there is a problem, the application can be disabled after image creation.
Metadata (dict) --
Additional attributes that describe the application.
(string) --
(string) --
CreatedTime (datetime) --
The time the image was created.
PublicBaseImageReleasedDate (datetime) --
The release date of the public base image. For private images, this date is the release date of the base image from which the image was created.
AppstreamAgentVersion (string) --
The version of the AppStream 2.0 agent to use for instances that are launched from this image.
ImagePermissions (dict) --
The permissions to provide to the destination AWS account for the specified image.
allowFleet (boolean) --
Indicates whether the image can be used for a fleet.
allowImageBuilder (boolean) --
Indicates whether the image can be used for an image builder.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'Images': [
{
'Name': 'string',
'Arn': 'string',
'BaseImageArn': 'string',
'DisplayName': 'string',
'State': 'PENDING'|'AVAILABLE'|'FAILED'|'COPYING'|'DELETING',
'Visibility': 'PUBLIC'|'PRIVATE'|'SHARED',
'ImageBuilderSupported': True|False,
'ImageBuilderName': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'Description': 'string',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_BUILDER_NOT_AVAILABLE'|'IMAGE_COPY_FAILURE',
'Message': 'string'
},
'Applications': [
{
'Name': 'string',
'DisplayName': 'string',
'IconURL': 'string',
'LaunchPath': 'string',
'LaunchParameters': 'string',
'Enabled': True|False,
'Metadata': {
'string': 'string'
}
},
],
'CreatedTime': datetime(2015, 1, 1),
'PublicBaseImageReleasedDate': datetime(2015, 1, 1),
'AppstreamAgentVersion': 'string',
'ImagePermissions': {
'allowFleet': True|False,
'allowImageBuilder': True|False
}
},
],
'NextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_sessions(StackName=None, FleetName=None, UserId=None, NextToken=None, Limit=None, AuthenticationType=None):
"""
Retrieves a list that describes the streaming sessions for a specified stack and fleet. If a UserId is provided for the stack and fleet, only streaming sessions for that user are described. If an authentication type is not provided, the default is to authenticate users using a streaming URL.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_sessions(
StackName='string',
FleetName='string',
UserId='string',
NextToken='string',
Limit=123,
AuthenticationType='API'|'SAML'|'USERPOOL'
)
:type StackName: string
:param StackName: [REQUIRED]\nThe name of the stack. This value is case-sensitive.\n
:type FleetName: string
:param FleetName: [REQUIRED]\nThe name of the fleet. This value is case-sensitive.\n
:type UserId: string
:param UserId: The user identifier.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:type Limit: integer
:param Limit: The size of each page of results. The default value is 20 and the maximum value is 50.
:type AuthenticationType: string
:param AuthenticationType: The authentication method. Specify API for a user authenticated using a streaming URL or SAML for a SAML federated user. The default is to authenticate users using a streaming URL.
:rtype: dict
ReturnsResponse Syntax
{
'Sessions': [
{
'Id': 'string',
'UserId': 'string',
'StackName': 'string',
'FleetName': 'string',
'State': 'ACTIVE'|'PENDING'|'EXPIRED',
'ConnectionState': 'CONNECTED'|'NOT_CONNECTED',
'StartTime': datetime(2015, 1, 1),
'MaxExpirationTime': datetime(2015, 1, 1),
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
}
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Sessions (list) --
Information about the streaming sessions.
(dict) --
Describes a streaming session.
Id (string) --
The identifier of the streaming session.
UserId (string) --
The identifier of the user for whom the session was created.
StackName (string) --
The name of the stack for the streaming session.
FleetName (string) --
The name of the fleet for the streaming session.
State (string) --
The current state of the streaming session.
ConnectionState (string) --
Specifies whether a user is connected to the streaming session.
StartTime (datetime) --
The time when a streaming instance is dedicated for the user.
MaxExpirationTime (datetime) --
The time when the streaming session is set to expire. This time is based on the MaxUserDurationinSeconds value, which determines the maximum length of time that a streaming session can run. A streaming session might end earlier than the time specified in SessionMaxExpirationTime , when the DisconnectTimeOutInSeconds elapses or the user chooses to end his or her session. If the DisconnectTimeOutInSeconds elapses, or the user chooses to end his or her session, the streaming instance is terminated and the streaming session ends.
AuthenticationType (string) --
The authentication method. The user is authenticated using a streaming URL (API ) or SAML 2.0 federation (SAML ).
NetworkAccessConfiguration (dict) --
The network details for the streaming session.
EniPrivateIpAddress (string) --
The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --
The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.InvalidParameterCombinationException
:return: {
'Sessions': [
{
'Id': 'string',
'UserId': 'string',
'StackName': 'string',
'FleetName': 'string',
'State': 'ACTIVE'|'PENDING'|'EXPIRED',
'ConnectionState': 'CONNECTED'|'NOT_CONNECTED',
'StartTime': datetime(2015, 1, 1),
'MaxExpirationTime': datetime(2015, 1, 1),
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
}
},
],
'NextToken': 'string'
}
:returns:
AppStream.Client.exceptions.InvalidParameterCombinationException
"""
pass
def describe_stacks(Names=None, NextToken=None):
"""
Retrieves a list that describes one or more specified stacks, if the stack names are provided. Otherwise, all stacks in the account are described.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_stacks(
Names=[
'string',
],
NextToken='string'
)
:type Names: list
:param Names: The names of the stacks to describe.\n\n(string) --\n\n
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Stacks': [
{
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Stacks (list) --
Information about the stacks.
(dict) --
Describes a stack.
Arn (string) --
The ARN of the stack.
Name (string) --
The name of the stack.
Description (string) --
The description to display.
DisplayName (string) --
The stack name to display.
CreatedTime (datetime) --
The time the stack was created.
StorageConnectors (list) --
The storage connectors to enable.
(dict) --
Describes a connector that enables persistent storage for users.
ConnectorType (string) --
The type of storage connector.
ResourceIdentifier (string) --
The ARN of the storage connector.
Domains (list) --
The names of the domains for the account.
(string) -- GSuite domain for GDrive integration.
RedirectURL (string) --
The URL that users are redirected to after their streaming session ends.
FeedbackURL (string) --
The URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed.
StackErrors (list) --
The errors for the stack.
(dict) --
Describes a stack error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
UserSettings (list) --
The actions that are enabled or disabled for users during their streaming sessions. By default these actions are enabled.
(dict) --
Describes an action and whether the action is enabled or disabled for users during their streaming sessions.
Action (string) --
The action that is enabled or disabled.
Permission (string) --
Indicates whether the action is enabled or disabled.
ApplicationSettings (dict) --
The persistent application settings for users of the stack.
Enabled (boolean) --
Specifies whether persistent application settings are enabled for users during their streaming sessions.
SettingsGroup (string) --
The path prefix for the S3 bucket where users\xe2\x80\x99 persistent application settings are stored.
S3BucketName (string) --
The S3 bucket where users\xe2\x80\x99 persistent application settings are stored. When persistent application settings are enabled for the first time for an account in an AWS Region, an S3 bucket is created. The bucket is unique to the AWS account and the Region.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream 2.0 only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
EmbedHostDomains (list) --
The domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions.
(string) -- Specifies a valid domain that can embed AppStream. Valid examples include: ["testorigin.tt--com", "testingorigin.com.us", "test.com.us"] Invalid examples include: ["test,com", ".com", "h*llo.com". ""]
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'Stacks': [
{
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
},
],
'NextToken': 'string'
}
:returns:
(string) -- GSuite domain for GDrive integration.
"""
pass
def describe_usage_report_subscriptions(MaxResults=None, NextToken=None):
"""
Retrieves a list that describes one or more usage report subscriptions.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_usage_report_subscriptions(
MaxResults=123,
NextToken='string'
)
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'UsageReportSubscriptions': [
{
'S3BucketName': 'string',
'Schedule': 'DAILY',
'LastGeneratedReportDate': datetime(2015, 1, 1),
'SubscriptionErrors': [
{
'ErrorCode': 'RESOURCE_NOT_FOUND'|'ACCESS_DENIED'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
UsageReportSubscriptions (list) --
Information about the usage report subscription.
(dict) --
Describes information about the usage report subscription.
S3BucketName (string) --
The Amazon S3 bucket where generated reports are stored.
If you enabled on-instance session scripts and Amazon S3 logging for your session script configuration, AppStream 2.0 created an S3 bucket to store the script output. The bucket is unique to your account and Region. When you enable usage reporting in this case, AppStream 2.0 uses the same bucket to store your usage reports. If you haven\'t already enabled on-instance session scripts, when you enable usage reports, AppStream 2.0 creates a new S3 bucket.
Schedule (string) --
The schedule for generating usage reports.
LastGeneratedReportDate (datetime) --
The time when the last usage report was generated.
SubscriptionErrors (list) --
The errors that were returned if usage reports couldn\'t be generated.
(dict) --
Describes the error that is returned when a usage report can\'t be generated.
ErrorCode (string) --
The error code for the error that is returned when a usage report can\'t be generated.
ErrorMessage (string) --
The error message for the error that is returned when a usage report can\'t be generated.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidAccountStatusException
:return: {
'UsageReportSubscriptions': [
{
'S3BucketName': 'string',
'Schedule': 'DAILY',
'LastGeneratedReportDate': datetime(2015, 1, 1),
'SubscriptionErrors': [
{
'ErrorCode': 'RESOURCE_NOT_FOUND'|'ACCESS_DENIED'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
]
},
],
'NextToken': 'string'
}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidAccountStatusException
"""
pass
def describe_user_stack_associations(StackName=None, UserName=None, AuthenticationType=None, MaxResults=None, NextToken=None):
"""
Retrieves a list that describes the UserStackAssociation objects. You must specify either or both of the following:
See also: AWS API Documentation
Exceptions
:example: response = client.describe_user_stack_associations(
StackName='string',
UserName='string',
AuthenticationType='API'|'SAML'|'USERPOOL',
MaxResults=123,
NextToken='string'
)
:type StackName: string
:param StackName: The name of the stack that is associated with the user.
:type UserName: string
:param UserName: The email address of the user who is associated with the stack.\n\nNote\nUsers\' email addresses are case-sensitive.\n\n
:type AuthenticationType: string
:param AuthenticationType: The authentication type for the user who is associated with the stack. You must specify USERPOOL.
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'UserStackAssociations': [
{
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
UserStackAssociations (list) --
The UserStackAssociation objects.
(dict) --
Describes a user in the user pool and the associated stack.
StackName (string) --
The name of the stack that is associated with the user.
UserName (string) --
The email address of the user who is associated with the stack.
Note
Users\' email addresses are case-sensitive.
AuthenticationType (string) --
The authentication type for the user.
SendEmailNotification (boolean) --
Specifies whether a welcome email is sent to a user after the user is created in the user pool.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.InvalidParameterCombinationException
:return: {
'UserStackAssociations': [
{
'StackName': 'string',
'UserName': 'string',
'AuthenticationType': 'API'|'SAML'|'USERPOOL',
'SendEmailNotification': True|False
},
],
'NextToken': 'string'
}
:returns:
StackName (string) -- The name of the stack that is associated with the user.
UserName (string) -- The email address of the user who is associated with the stack.
Note
Users\' email addresses are case-sensitive.
AuthenticationType (string) -- The authentication type for the user who is associated with the stack. You must specify USERPOOL.
MaxResults (integer) -- The maximum size of each page of results.
NextToken (string) -- The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
"""
pass
def describe_users(AuthenticationType=None, MaxResults=None, NextToken=None):
"""
Retrieves a list that describes one or more specified users in the user pool.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_users(
AuthenticationType='API'|'SAML'|'USERPOOL',
MaxResults=123,
NextToken='string'
)
:type AuthenticationType: string
:param AuthenticationType: [REQUIRED]\nThe authentication type for the users in the user pool to describe. You must specify USERPOOL.\n
:type MaxResults: integer
:param MaxResults: The maximum size of each page of results.
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Users': [
{
'Arn': 'string',
'UserName': 'string',
'Enabled': True|False,
'Status': 'string',
'FirstName': 'string',
'LastName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'AuthenticationType': 'API'|'SAML'|'USERPOOL'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Users (list) --
Information about users in the user pool.
(dict) --
Describes a user in the user pool.
Arn (string) --
The ARN of the user.
UserName (string) --
The email address of the user.
Note
Users\' email addresses are case-sensitive.
Enabled (boolean) --
Specifies whether the user in the user pool is enabled.
Status (string) --
The status of the user in the user pool. The status can be one of the following:
UNCONFIRMED \xe2\x80\x93 The user is created but not confirmed.
CONFIRMED \xe2\x80\x93 The user is confirmed.
ARCHIVED \xe2\x80\x93 The user is no longer active.
COMPROMISED \xe2\x80\x93 The user is disabled because of a potential security threat.
UNKNOWN \xe2\x80\x93 The user status is not known.
FirstName (string) --
The first name, or given name, of the user.
LastName (string) --
The last name, or surname, of the user.
CreatedTime (datetime) --
The date and time the user was created in the user pool.
AuthenticationType (string) --
The authentication type for the user.
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidParameterCombinationException
:return: {
'Users': [
{
'Arn': 'string',
'UserName': 'string',
'Enabled': True|False,
'Status': 'string',
'FirstName': 'string',
'LastName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'AuthenticationType': 'API'|'SAML'|'USERPOOL'
},
],
'NextToken': 'string'
}
:returns:
UNCONFIRMED \xe2\x80\x93 The user is created but not confirmed.
CONFIRMED \xe2\x80\x93 The user is confirmed.
ARCHIVED \xe2\x80\x93 The user is no longer active.
COMPROMISED \xe2\x80\x93 The user is disabled because of a potential security threat.
UNKNOWN \xe2\x80\x93 The user status is not known.
"""
pass
def disable_user(UserName=None, AuthenticationType=None):
"""
Disables the specified user in the user pool. Users can\'t sign in to AppStream 2.0 until they are re-enabled. This action does not delete the user.
See also: AWS API Documentation
Exceptions
:example: response = client.disable_user(
UserName='string',
AuthenticationType='API'|'SAML'|'USERPOOL'
)
:type UserName: string
:param UserName: [REQUIRED]\nThe email address of the user.\n\nNote\nUsers\' email addresses are case-sensitive.\n\n
:type AuthenticationType: string
:param AuthenticationType: [REQUIRED]\nThe authentication type for the user. You must specify USERPOOL.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def disassociate_fleet(FleetName=None, StackName=None):
"""
Disassociates the specified fleet from the specified stack.
See also: AWS API Documentation
Exceptions
:example: response = client.disassociate_fleet(
FleetName='string',
StackName='string'
)
:type FleetName: string
:param FleetName: [REQUIRED]\nThe name of the fleet.\n
:type StackName: string
:param StackName: [REQUIRED]\nThe name of the stack.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {}
:returns:
(dict) --
"""
pass
def enable_user(UserName=None, AuthenticationType=None):
"""
Enables a user in the user pool. After being enabled, users can sign in to AppStream 2.0 and open applications from the stacks to which they are assigned.
See also: AWS API Documentation
Exceptions
:example: response = client.enable_user(
UserName='string',
AuthenticationType='API'|'SAML'|'USERPOOL'
)
:type UserName: string
:param UserName: [REQUIRED]\nThe email address of the user.\n\nNote\nUsers\' email addresses are case-sensitive. During login, if they specify an email address that doesn\'t use the same capitalization as the email address specified when their user pool account was created, a 'user does not exist' error message displays.\n\n
:type AuthenticationType: string
:param AuthenticationType: [REQUIRED]\nThe authentication type for the user. You must specify USERPOOL.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.InvalidAccountStatusException
:return: {}
:returns:
(dict) --
"""
pass
def expire_session(SessionId=None):
"""
Immediately stops the specified streaming session.
See also: AWS API Documentation
:example: response = client.expire_session(
SessionId='string'
)
:type SessionId: string
:param SessionId: [REQUIRED]\nThe identifier of the streaming session.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
:return: {}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def list_associated_fleets(StackName=None, NextToken=None):
"""
Retrieves the name of the fleet that is associated with the specified stack.
See also: AWS API Documentation
:example: response = client.list_associated_fleets(
StackName='string',
NextToken='string'
)
:type StackName: string
:param StackName: [REQUIRED]\nThe name of the stack.\n
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Names': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
Names (list) --
The name of the fleet.
(string) --
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
:return: {
'Names': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_associated_stacks(FleetName=None, NextToken=None):
"""
Retrieves the name of the stack with which the specified fleet is associated.
See also: AWS API Documentation
:example: response = client.list_associated_stacks(
FleetName='string',
NextToken='string'
)
:type FleetName: string
:param FleetName: [REQUIRED]\nThe name of the fleet.\n
:type NextToken: string
:param NextToken: The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
:rtype: dict
ReturnsResponse Syntax
{
'Names': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
Names (list) --
The name of the stack.
(string) --
NextToken (string) --
The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
:return: {
'Names': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceArn=None):
"""
Retrieves a list of all tags for the specified AppStream 2.0 resource. You can tag AppStream 2.0 image builders, images, fleets, and stacks.
For more information about tags, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceArn='string'
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource.\n
:rtype: dict
ReturnsResponse Syntax{
'Tags': {
'string': 'string'
}
}
Response Structure
(dict) --
Tags (dict) --The information about the tags.
(string) --
(string) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {
'Tags': {
'string': 'string'
}
}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
"""
pass
def start_fleet(Name=None):
"""
Starts the specified fleet.
See also: AWS API Documentation
Exceptions
:example: response = client.start_fleet(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the fleet.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.InvalidRoleException
:return: {}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.InvalidRoleException
"""
pass
def start_image_builder(Name=None, AppstreamAgentVersion=None):
"""
Starts the specified image builder.
See also: AWS API Documentation
Exceptions
:example: response = client.start_image_builder(
Name='string',
AppstreamAgentVersion='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the image builder.\n
:type AppstreamAgentVersion: string
:param AppstreamAgentVersion: The version of the AppStream 2.0 agent to use for this image builder. To use the latest version of the AppStream 2.0 agent, specify [LATEST].
:rtype: dict
ReturnsResponse Syntax
{
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
Response Structure
(dict) --
ImageBuilder (dict) --
Information about the image builder.
Name (string) --
The name of the image builder.
Arn (string) --
The ARN for the image builder.
ImageArn (string) --
The ARN of the image from which this builder was created.
Description (string) --
The description to display.
DisplayName (string) --
The image builder name to display.
VpcConfig (dict) --
The VPC configuration of the image builder.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
InstanceType (string) --
The instance type for the image builder. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
Platform (string) --
The operating system platform of the image builder.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
State (string) --
The state of the image builder.
StateChangeReason (dict) --
The reason why the last state change occurred.
Code (string) --
The state change reason code.
Message (string) --
The state change reason message.
CreatedTime (datetime) --
The time stamp when the image builder was created.
EnableDefaultInternetAccess (boolean) --
Enables or disables default internet access for the image builder.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
NetworkAccessConfiguration (dict) --
Describes the network details of the fleet or image builder instance.
EniPrivateIpAddress (string) --
The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --
The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
ImageBuilderErrors (list) --
The image builder errors.
(dict) --
Describes a resource error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
ErrorTimestamp (datetime) --
The time the error occurred.
AppstreamAgentVersion (string) --
The version of the AppStream 2.0 agent that is currently being used by the image builder.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Administrators can connect to the image builder only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
Exceptions
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.IncompatibleImageException
:return: {
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
:returns:
(string) --
"""
pass
def stop_fleet(Name=None):
"""
Stops the specified fleet.
See also: AWS API Documentation
Exceptions
:example: response = client.stop_fleet(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the fleet.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {}
:returns:
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
"""
pass
def stop_image_builder(Name=None):
"""
Stops the specified image builder.
See also: AWS API Documentation
Exceptions
:example: response = client.stop_image_builder(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the image builder.\n
:rtype: dict
ReturnsResponse Syntax{
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
Response Structure
(dict) --
ImageBuilder (dict) --Information about the image builder.
Name (string) --The name of the image builder.
Arn (string) --The ARN for the image builder.
ImageArn (string) --The ARN of the image from which this builder was created.
Description (string) --The description to display.
DisplayName (string) --The image builder name to display.
VpcConfig (dict) --The VPC configuration of the image builder.
SubnetIds (list) --The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --The identifiers of the security groups for the fleet or image builder.
(string) --
InstanceType (string) --The instance type for the image builder. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
Platform (string) --The operating system platform of the image builder.
IamRoleArn (string) --The ARN of the IAM role that is applied to the image builder. To assume a role, the image builder calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
State (string) --The state of the image builder.
StateChangeReason (dict) --The reason why the last state change occurred.
Code (string) --The state change reason code.
Message (string) --The state change reason message.
CreatedTime (datetime) --The time stamp when the image builder was created.
EnableDefaultInternetAccess (boolean) --Enables or disables default internet access for the image builder.
DomainJoinInfo (dict) --The name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain.
DirectoryName (string) --The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --The distinguished name of the organizational unit for computer accounts.
NetworkAccessConfiguration (dict) --Describes the network details of the fleet or image builder instance.
EniPrivateIpAddress (string) --The private IP address of the elastic network interface that is attached to instances in your VPC.
EniId (string) --The resource identifier of the elastic network interface that is attached to instances in your VPC. All network interfaces have the eni-xxxxxxxx resource identifier.
ImageBuilderErrors (list) --The image builder errors.
(dict) --Describes a resource error.
ErrorCode (string) --The error code.
ErrorMessage (string) --The error message.
ErrorTimestamp (datetime) --The time the error occurred.
AppstreamAgentVersion (string) --The version of the AppStream 2.0 agent that is currently being used by the image builder.
AccessEndpoints (list) --The list of virtual private cloud (VPC) interface endpoint objects. Administrators can connect to the image builder only through the specified endpoints.
(dict) --Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --The type of interface endpoint.
VpceId (string) --The identifier (ID) of the VPC in which the interface endpoint is used.
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {
'ImageBuilder': {
'Name': 'string',
'Arn': 'string',
'ImageArn': 'string',
'Description': 'string',
'DisplayName': 'string',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'InstanceType': 'string',
'Platform': 'WINDOWS'|'WINDOWS_SERVER_2016'|'WINDOWS_SERVER_2019',
'IamRoleArn': 'string',
'State': 'PENDING'|'UPDATING_AGENT'|'RUNNING'|'STOPPING'|'STOPPED'|'REBOOTING'|'SNAPSHOTTING'|'DELETING'|'FAILED',
'StateChangeReason': {
'Code': 'INTERNAL_ERROR'|'IMAGE_UNAVAILABLE',
'Message': 'string'
},
'CreatedTime': datetime(2015, 1, 1),
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'NetworkAccessConfiguration': {
'EniPrivateIpAddress': 'string',
'EniId': 'string'
},
'ImageBuilderErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string',
'ErrorTimestamp': datetime(2015, 1, 1)
},
],
'AppstreamAgentVersion': 'string',
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
]
}
}
:returns:
(string) --
"""
pass
def tag_resource(ResourceArn=None, Tags=None):
"""
Adds or overwrites one or more tags for the specified AppStream 2.0 resource. You can tag AppStream 2.0 image builders, images, fleets, and stacks.
Each tag consists of a key and an optional value. If a resource already has a tag with the same key, this operation updates its value.
To list the current tags for your resources, use ListTagsForResource . To disassociate tags from your resources, use UntagResource .
For more information about tags, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceArn='string',
Tags={
'string': 'string'
}
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource.\n
:type Tags: dict
:param Tags: [REQUIRED]\nThe tags to associate. A tag is a key-value pair, and the value is optional. For example, Environment=Test. If you do not specify a value, Environment=.\nIf you do not specify a value, the value is set to an empty string.\nGenerally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following special characters:\n_ . : / = + - @\n\n(string) --\n(string) --\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceArn=None, TagKeys=None):
"""
Disassociates one or more specified tags from the specified AppStream 2.0 resource.
To list the current tags for your resources, use ListTagsForResource .
For more information about tags, see Tagging Your Resources in the Amazon AppStream 2.0 Administration Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceArn='string',
TagKeys=[
'string',
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nThe tag keys for the tags to disassociate.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def update_directory_config(DirectoryName=None, OrganizationalUnitDistinguishedNames=None, ServiceAccountCredentials=None):
"""
Updates the specified Directory Config object in AppStream 2.0. This object includes the configuration information required to join fleets and image builders to Microsoft Active Directory domains.
See also: AWS API Documentation
Exceptions
:example: response = client.update_directory_config(
DirectoryName='string',
OrganizationalUnitDistinguishedNames=[
'string',
],
ServiceAccountCredentials={
'AccountName': 'string',
'AccountPassword': 'string'
}
)
:type DirectoryName: string
:param DirectoryName: [REQUIRED]\nThe name of the Directory Config object.\n
:type OrganizationalUnitDistinguishedNames: list
:param OrganizationalUnitDistinguishedNames: The distinguished names of the organizational units for computer accounts.\n\n(string) --\n\n
:type ServiceAccountCredentials: dict
:param ServiceAccountCredentials: The credentials for the service account used by the fleet or image builder to connect to the directory.\n\nAccountName (string) -- [REQUIRED]The user name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified.\n\nAccountPassword (string) -- [REQUIRED]The password for the account.\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'DirectoryConfig': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
DirectoryConfig (dict) --
Information about the Directory Config object.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedNames (list) --
The distinguished names of the organizational units for computer accounts.
(string) --
ServiceAccountCredentials (dict) --
The credentials for the service account used by the fleet or image builder to connect to the directory.
AccountName (string) --
The user name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified.
AccountPassword (string) --
The password for the account.
CreatedTime (datetime) --
The time the directory configuration was created.
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {
'DirectoryConfig': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedNames': [
'string',
],
'ServiceAccountCredentials': {
'AccountName': 'string',
'AccountPassword': 'string'
},
'CreatedTime': datetime(2015, 1, 1)
}
}
:returns:
(string) --
"""
pass
def update_fleet(ImageName=None, ImageArn=None, Name=None, InstanceType=None, ComputeCapacity=None, VpcConfig=None, MaxUserDurationInSeconds=None, DisconnectTimeoutInSeconds=None, DeleteVpcConfig=None, Description=None, DisplayName=None, EnableDefaultInternetAccess=None, DomainJoinInfo=None, IdleDisconnectTimeoutInSeconds=None, AttributesToDelete=None, IamRoleArn=None):
"""
Updates the specified fleet.
If the fleet is in the STOPPED state, you can update any attribute except the fleet name. If the fleet is in the RUNNING state, you can update the DisplayName , ComputeCapacity , ImageARN , ImageName , IdleDisconnectTimeoutInSeconds , and DisconnectTimeoutInSeconds attributes. If the fleet is in the STARTING or STOPPING state, you can\'t update it.
See also: AWS API Documentation
Exceptions
:example: response = client.update_fleet(
ImageName='string',
ImageArn='string',
Name='string',
InstanceType='string',
ComputeCapacity={
'DesiredInstances': 123
},
VpcConfig={
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
MaxUserDurationInSeconds=123,
DisconnectTimeoutInSeconds=123,
DeleteVpcConfig=True|False,
Description='string',
DisplayName='string',
EnableDefaultInternetAccess=True|False,
DomainJoinInfo={
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
IdleDisconnectTimeoutInSeconds=123,
AttributesToDelete=[
'VPC_CONFIGURATION'|'VPC_CONFIGURATION_SECURITY_GROUP_IDS'|'DOMAIN_JOIN_INFO'|'IAM_ROLE_ARN',
],
IamRoleArn='string'
)
:type ImageName: string
:param ImageName: The name of the image used to create the fleet.
:type ImageArn: string
:param ImageArn: The ARN of the public, private, or shared image to use.
:type Name: string
:param Name: A unique name for the fleet.
:type InstanceType: string
:param InstanceType: The instance type to use when launching fleet instances. The following instance types are available:\n\nstream.standard.medium\nstream.standard.large\nstream.compute.large\nstream.compute.xlarge\nstream.compute.2xlarge\nstream.compute.4xlarge\nstream.compute.8xlarge\nstream.memory.large\nstream.memory.xlarge\nstream.memory.2xlarge\nstream.memory.4xlarge\nstream.memory.8xlarge\nstream.graphics-design.large\nstream.graphics-design.xlarge\nstream.graphics-design.2xlarge\nstream.graphics-design.4xlarge\nstream.graphics-desktop.2xlarge\nstream.graphics-pro.4xlarge\nstream.graphics-pro.8xlarge\nstream.graphics-pro.16xlarge\n\n
:type ComputeCapacity: dict
:param ComputeCapacity: The desired capacity for the fleet.\n\nDesiredInstances (integer) -- [REQUIRED]The desired number of streaming instances.\n\n\n
:type VpcConfig: dict
:param VpcConfig: The VPC configuration for the fleet.\n\nSubnetIds (list) --The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.\n\n(string) --\n\n\nSecurityGroupIds (list) --The identifiers of the security groups for the fleet or image builder.\n\n(string) --\n\n\n\n
:type MaxUserDurationInSeconds: integer
:param MaxUserDurationInSeconds: The maximum amount of time that a streaming session can remain active, in seconds. If users are still connected to a streaming instance five minutes before this limit is reached, they are prompted to save any open documents before being disconnected. After this time elapses, the instance is terminated and replaced by a new instance.\nSpecify a value between 600 and 360000.\n
:type DisconnectTimeoutInSeconds: integer
:param DisconnectTimeoutInSeconds: The amount of time that a streaming session remains active after users disconnect. If users try to reconnect to the streaming session after a disconnection or network interruption within this time interval, they are connected to their previous session. Otherwise, they are connected to a new session with a new streaming instance.\nSpecify a value between 60 and 360000.\n
:type DeleteVpcConfig: boolean
:param DeleteVpcConfig: Deletes the VPC association for the specified fleet.
:type Description: string
:param Description: The description to display.
:type DisplayName: string
:param DisplayName: The fleet name to display.
:type EnableDefaultInternetAccess: boolean
:param EnableDefaultInternetAccess: Enables or disables default internet access for the fleet.
:type DomainJoinInfo: dict
:param DomainJoinInfo: The name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain.\n\nDirectoryName (string) --The fully qualified name of the directory (for example, corp.example.com).\n\nOrganizationalUnitDistinguishedName (string) --The distinguished name of the organizational unit for computer accounts.\n\n\n
:type IdleDisconnectTimeoutInSeconds: integer
:param IdleDisconnectTimeoutInSeconds: The amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the DisconnectTimeoutInSeconds time interval begins. Users are notified before they are disconnected due to inactivity. If users try to reconnect to the streaming session before the time interval specified in DisconnectTimeoutInSeconds elapses, they are connected to their previous session. Users are considered idle when they stop providing keyboard or mouse input during their streaming session. File uploads and downloads, audio in, audio out, and pixels changing do not qualify as user activity. If users continue to be idle after the time interval in IdleDisconnectTimeoutInSeconds elapses, they are disconnected.\nTo prevent users from being disconnected due to inactivity, specify a value of 0. Otherwise, specify a value between 60 and 3600. The default value is 0.\n\nNote\nIf you enable this feature, we recommend that you specify a value that corresponds exactly to a whole number of minutes (for example, 60, 120, and 180). If you don\'t do this, the value is rounded to the nearest minute. For example, if you specify a value of 70, users are disconnected after 1 minute of inactivity. If you specify a value that is at the midpoint between two different minutes, the value is rounded up. For example, if you specify a value of 90, users are disconnected after 2 minutes of inactivity.\n\n
:type AttributesToDelete: list
:param AttributesToDelete: The fleet attributes to delete.\n\n(string) --The fleet attribute.\n\n\n
:type IamRoleArn: string
:param IamRoleArn: The Amazon Resource Name (ARN) of the IAM role to apply to the fleet. To assume a role, a fleet instance calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.\nFor more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .\n
:rtype: dict
ReturnsResponse Syntax
{
'Fleet': {
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
}
}
Response Structure
(dict) --
Fleet (dict) --
Information about the fleet.
Arn (string) --
The Amazon Resource Name (ARN) for the fleet.
Name (string) --
The name of the fleet.
DisplayName (string) --
The fleet name to display.
Description (string) --
The description to display.
ImageName (string) --
The name of the image used to create the fleet.
ImageArn (string) --
The ARN for the public, private, or shared image.
InstanceType (string) --
The instance type to use when launching fleet instances. The following instance types are available:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
FleetType (string) --
The fleet type.
ALWAYS_ON
Provides users with instant-on access to their apps. You are charged for all running instances in your fleet, even if no users are streaming apps.
ON_DEMAND
Provide users with access to applications after they connect, which takes one to two minutes. You are charged for instance streaming when users are connected and a small hourly fee for instances that are not streaming apps.
ComputeCapacityStatus (dict) --
The capacity status for the fleet.
Desired (integer) --
The desired number of streaming instances.
Running (integer) --
The total number of simultaneous streaming instances that are running.
InUse (integer) --
The number of instances in use for streaming.
Available (integer) --
The number of currently available instances that can be used to stream sessions.
MaxUserDurationInSeconds (integer) --
The maximum amount of time that a streaming session can remain active, in seconds. If users are still connected to a streaming instance five minutes before this limit is reached, they are prompted to save any open documents before being disconnected. After this time elapses, the instance is terminated and replaced by a new instance.
Specify a value between 600 and 360000.
DisconnectTimeoutInSeconds (integer) --
The amount of time that a streaming session remains active after users disconnect. If they try to reconnect to the streaming session after a disconnection or network interruption within this time interval, they are connected to their previous session. Otherwise, they are connected to a new session with a new streaming instance.
Specify a value between 60 and 360000.
State (string) --
The current state for the fleet.
VpcConfig (dict) --
The VPC configuration for the fleet.
SubnetIds (list) --
The identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. Fleet instances use one or more subnets. Image builder instances use one subnet.
(string) --
SecurityGroupIds (list) --
The identifiers of the security groups for the fleet or image builder.
(string) --
CreatedTime (datetime) --
The time the fleet was created.
FleetErrors (list) --
The fleet errors.
(dict) --
Describes a fleet error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
EnableDefaultInternetAccess (boolean) --
Indicates whether default internet access is enabled for the fleet.
DomainJoinInfo (dict) --
The name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain.
DirectoryName (string) --
The fully qualified name of the directory (for example, corp.example.com).
OrganizationalUnitDistinguishedName (string) --
The distinguished name of the organizational unit for computer accounts.
IdleDisconnectTimeoutInSeconds (integer) --
The amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the DisconnectTimeoutInSeconds time interval begins. Users are notified before they are disconnected due to inactivity. If users try to reconnect to the streaming session before the time interval specified in DisconnectTimeoutInSeconds elapses, they are connected to their previous session. Users are considered idle when they stop providing keyboard or mouse input during their streaming session. File uploads and downloads, audio in, audio out, and pixels changing do not qualify as user activity. If users continue to be idle after the time interval in IdleDisconnectTimeoutInSeconds elapses, they are disconnected.
To prevent users from being disconnected due to inactivity, specify a value of 0. Otherwise, specify a value between 60 and 3600. The default value is 0.
Note
If you enable this feature, we recommend that you specify a value that corresponds exactly to a whole number of minutes (for example, 60, 120, and 180). If you don\'t do this, the value is rounded to the nearest minute. For example, if you specify a value of 70, users are disconnected after 1 minute of inactivity. If you specify a value that is at the midpoint between two different minutes, the value is rounded up. For example, if you specify a value of 90, users are disconnected after 2 minutes of inactivity.
IamRoleArn (string) --
The ARN of the IAM role that is applied to the fleet. To assume a role, the fleet instance calls the AWS Security Token Service (STS) AssumeRole API operation and passes the ARN of the role to use. The operation creates a new session with temporary credentials. AppStream 2.0 retrieves the temporary credentials and creates the AppStream_Machine_Role credential profile on the instance.
For more information, see Using an IAM Role to Grant Permissions to Applications and Scripts Running on AppStream 2.0 Streaming Instances in the Amazon AppStream 2.0 Administration Guide .
Exceptions
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.ConcurrentModificationException
AppStream.Client.exceptions.IncompatibleImageException
AppStream.Client.exceptions.OperationNotPermittedException
:return: {
'Fleet': {
'Arn': 'string',
'Name': 'string',
'DisplayName': 'string',
'Description': 'string',
'ImageName': 'string',
'ImageArn': 'string',
'InstanceType': 'string',
'FleetType': 'ALWAYS_ON'|'ON_DEMAND',
'ComputeCapacityStatus': {
'Desired': 123,
'Running': 123,
'InUse': 123,
'Available': 123
},
'MaxUserDurationInSeconds': 123,
'DisconnectTimeoutInSeconds': 123,
'State': 'STARTING'|'RUNNING'|'STOPPING'|'STOPPED',
'VpcConfig': {
'SubnetIds': [
'string',
],
'SecurityGroupIds': [
'string',
]
},
'CreatedTime': datetime(2015, 1, 1),
'FleetErrors': [
{
'ErrorCode': 'IAM_SERVICE_ROLE_MISSING_ENI_DESCRIBE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_CREATE_ACTION'|'IAM_SERVICE_ROLE_MISSING_ENI_DELETE_ACTION'|'NETWORK_INTERFACE_LIMIT_EXCEEDED'|'INTERNAL_SERVICE_ERROR'|'IAM_SERVICE_ROLE_IS_MISSING'|'MACHINE_ROLE_IS_MISSING'|'STS_DISABLED_IN_REGION'|'SUBNET_HAS_INSUFFICIENT_IP_ADDRESSES'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SUBNET_ACTION'|'SUBNET_NOT_FOUND'|'IMAGE_NOT_FOUND'|'INVALID_SUBNET_CONFIGURATION'|'SECURITY_GROUPS_NOT_FOUND'|'IGW_NOT_ATTACHED'|'IAM_SERVICE_ROLE_MISSING_DESCRIBE_SECURITY_GROUPS_ACTION'|'DOMAIN_JOIN_ERROR_FILE_NOT_FOUND'|'DOMAIN_JOIN_ERROR_ACCESS_DENIED'|'DOMAIN_JOIN_ERROR_LOGON_FAILURE'|'DOMAIN_JOIN_ERROR_INVALID_PARAMETER'|'DOMAIN_JOIN_ERROR_MORE_DATA'|'DOMAIN_JOIN_ERROR_NO_SUCH_DOMAIN'|'DOMAIN_JOIN_ERROR_NOT_SUPPORTED'|'DOMAIN_JOIN_NERR_INVALID_WORKGROUP_NAME'|'DOMAIN_JOIN_NERR_WORKSTATION_NOT_STARTED'|'DOMAIN_JOIN_ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED'|'DOMAIN_JOIN_NERR_PASSWORD_EXPIRED'|'DOMAIN_JOIN_INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'EnableDefaultInternetAccess': True|False,
'DomainJoinInfo': {
'DirectoryName': 'string',
'OrganizationalUnitDistinguishedName': 'string'
},
'IdleDisconnectTimeoutInSeconds': 123,
'IamRoleArn': 'string'
}
}
:returns:
stream.standard.medium
stream.standard.large
stream.compute.large
stream.compute.xlarge
stream.compute.2xlarge
stream.compute.4xlarge
stream.compute.8xlarge
stream.memory.large
stream.memory.xlarge
stream.memory.2xlarge
stream.memory.4xlarge
stream.memory.8xlarge
stream.graphics-design.large
stream.graphics-design.xlarge
stream.graphics-design.2xlarge
stream.graphics-design.4xlarge
stream.graphics-desktop.2xlarge
stream.graphics-pro.4xlarge
stream.graphics-pro.8xlarge
stream.graphics-pro.16xlarge
"""
pass
def update_image_permissions(Name=None, SharedAccountId=None, ImagePermissions=None):
"""
Adds or updates permissions for the specified private image.
See also: AWS API Documentation
Exceptions
:example: response = client.update_image_permissions(
Name='string',
SharedAccountId='string',
ImagePermissions={
'allowFleet': True|False,
'allowImageBuilder': True|False
}
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the private image.\n
:type SharedAccountId: string
:param SharedAccountId: [REQUIRED]\nThe 12-digit identifier of the AWS account for which you want add or update image permissions.\n
:type ImagePermissions: dict
:param ImagePermissions: [REQUIRED]\nThe permissions for the image.\n\nallowFleet (boolean) --Indicates whether the image can be used for a fleet.\n\nallowImageBuilder (boolean) --Indicates whether the image can be used for an image builder.\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceNotAvailableException
AppStream.Client.exceptions.LimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
def update_stack(DisplayName=None, Description=None, Name=None, StorageConnectors=None, DeleteStorageConnectors=None, RedirectURL=None, FeedbackURL=None, AttributesToDelete=None, UserSettings=None, ApplicationSettings=None, AccessEndpoints=None, EmbedHostDomains=None):
"""
Updates the specified fields for the specified stack.
See also: AWS API Documentation
Exceptions
:example: response = client.update_stack(
DisplayName='string',
Description='string',
Name='string',
StorageConnectors=[
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
DeleteStorageConnectors=True|False,
RedirectURL='string',
FeedbackURL='string',
AttributesToDelete=[
'STORAGE_CONNECTORS'|'STORAGE_CONNECTOR_HOMEFOLDERS'|'STORAGE_CONNECTOR_GOOGLE_DRIVE'|'STORAGE_CONNECTOR_ONE_DRIVE'|'REDIRECT_URL'|'FEEDBACK_URL'|'THEME_NAME'|'USER_SETTINGS'|'EMBED_HOST_DOMAINS'|'IAM_ROLE_ARN'|'ACCESS_ENDPOINTS',
],
UserSettings=[
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
ApplicationSettings={
'Enabled': True|False,
'SettingsGroup': 'string'
},
AccessEndpoints=[
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
EmbedHostDomains=[
'string',
]
)
:type DisplayName: string
:param DisplayName: The stack name to display.
:type Description: string
:param Description: The description to display.
:type Name: string
:param Name: [REQUIRED]\nThe name of the stack.\n
:type StorageConnectors: list
:param StorageConnectors: The storage connectors to enable.\n\n(dict) --Describes a connector that enables persistent storage for users.\n\nConnectorType (string) -- [REQUIRED]The type of storage connector.\n\nResourceIdentifier (string) --The ARN of the storage connector.\n\nDomains (list) --The names of the domains for the account.\n\n(string) -- GSuite domain for GDrive integration.\n\n\n\n\n\n
:type DeleteStorageConnectors: boolean
:param DeleteStorageConnectors: Deletes the storage connectors currently enabled for the stack.
:type RedirectURL: string
:param RedirectURL: The URL that users are redirected to after their streaming session ends.
:type FeedbackURL: string
:param FeedbackURL: The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no Send Feedback link is displayed.
:type AttributesToDelete: list
:param AttributesToDelete: The stack attributes to delete.\n\n(string) --\n\n
:type UserSettings: list
:param UserSettings: The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are enabled.\n\n(dict) --Describes an action and whether the action is enabled or disabled for users during their streaming sessions.\n\nAction (string) -- [REQUIRED]The action that is enabled or disabled.\n\nPermission (string) -- [REQUIRED]Indicates whether the action is enabled or disabled.\n\n\n\n\n
:type ApplicationSettings: dict
:param ApplicationSettings: The persistent application settings for users of a stack. When these settings are enabled, changes that users make to applications and Windows settings are automatically saved after each session and applied to the next session.\n\nEnabled (boolean) -- [REQUIRED]Enables or disables persistent application settings for users during their streaming sessions.\n\nSettingsGroup (string) --The path prefix for the S3 bucket where users\xe2\x80\x99 persistent application settings are stored. You can allow the same persistent application settings to be used across multiple stacks by specifying the same settings group for each stack.\n\n\n
:type AccessEndpoints: list
:param AccessEndpoints: The list of interface VPC endpoint (interface endpoint) objects. Users of the stack can connect to AppStream 2.0 only through the specified endpoints.\n\n(dict) --Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.\n\nEndpointType (string) -- [REQUIRED]The type of interface endpoint.\n\nVpceId (string) --The identifier (ID) of the VPC in which the interface endpoint is used.\n\n\n\n\n
:type EmbedHostDomains: list
:param EmbedHostDomains: The domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions.\n\n(string) -- Specifies a valid domain that can embed AppStream. Valid examples include: ['testorigin.tt--com', 'testingorigin.com.us', 'test.com.us'] Invalid examples include: ['test,com', '.com', 'h*llo.com'. '']\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Stack': {
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
}
}
Response Structure
(dict) --
Stack (dict) --
Information about the stack.
Arn (string) --
The ARN of the stack.
Name (string) --
The name of the stack.
Description (string) --
The description to display.
DisplayName (string) --
The stack name to display.
CreatedTime (datetime) --
The time the stack was created.
StorageConnectors (list) --
The storage connectors to enable.
(dict) --
Describes a connector that enables persistent storage for users.
ConnectorType (string) --
The type of storage connector.
ResourceIdentifier (string) --
The ARN of the storage connector.
Domains (list) --
The names of the domains for the account.
(string) -- GSuite domain for GDrive integration.
RedirectURL (string) --
The URL that users are redirected to after their streaming session ends.
FeedbackURL (string) --
The URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed.
StackErrors (list) --
The errors for the stack.
(dict) --
Describes a stack error.
ErrorCode (string) --
The error code.
ErrorMessage (string) --
The error message.
UserSettings (list) --
The actions that are enabled or disabled for users during their streaming sessions. By default these actions are enabled.
(dict) --
Describes an action and whether the action is enabled or disabled for users during their streaming sessions.
Action (string) --
The action that is enabled or disabled.
Permission (string) --
Indicates whether the action is enabled or disabled.
ApplicationSettings (dict) --
The persistent application settings for users of the stack.
Enabled (boolean) --
Specifies whether persistent application settings are enabled for users during their streaming sessions.
SettingsGroup (string) --
The path prefix for the S3 bucket where users\xe2\x80\x99 persistent application settings are stored.
S3BucketName (string) --
The S3 bucket where users\xe2\x80\x99 persistent application settings are stored. When persistent application settings are enabled for the first time for an account in an AWS Region, an S3 bucket is created. The bucket is unique to the AWS account and the Region.
AccessEndpoints (list) --
The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream 2.0 only through the specified endpoints.
(dict) --
Describes an interface VPC endpoint (interface endpoint) that lets you create a private connection between the virtual private cloud (VPC) that you specify and AppStream 2.0. When you specify an interface endpoint for a stack, users of the stack can connect to AppStream 2.0 only through that endpoint. When you specify an interface endpoint for an image builder, administrators can connect to the image builder only through that endpoint.
EndpointType (string) --
The type of interface endpoint.
VpceId (string) --
The identifier (ID) of the VPC in which the interface endpoint is used.
EmbedHostDomains (list) --
The domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions.
(string) -- Specifies a valid domain that can embed AppStream. Valid examples include: ["testorigin.tt--com", "testingorigin.com.us", "test.com.us"] Invalid examples include: ["test,com", ".com", "h*llo.com". ""]
Exceptions
AppStream.Client.exceptions.ResourceNotFoundException
AppStream.Client.exceptions.ResourceInUseException
AppStream.Client.exceptions.InvalidRoleException
AppStream.Client.exceptions.InvalidParameterCombinationException
AppStream.Client.exceptions.LimitExceededException
AppStream.Client.exceptions.InvalidAccountStatusException
AppStream.Client.exceptions.IncompatibleImageException
AppStream.Client.exceptions.OperationNotPermittedException
AppStream.Client.exceptions.ConcurrentModificationException
:return: {
'Stack': {
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'DisplayName': 'string',
'CreatedTime': datetime(2015, 1, 1),
'StorageConnectors': [
{
'ConnectorType': 'HOMEFOLDERS'|'GOOGLE_DRIVE'|'ONE_DRIVE',
'ResourceIdentifier': 'string',
'Domains': [
'string',
]
},
],
'RedirectURL': 'string',
'FeedbackURL': 'string',
'StackErrors': [
{
'ErrorCode': 'STORAGE_CONNECTOR_ERROR'|'INTERNAL_SERVICE_ERROR',
'ErrorMessage': 'string'
},
],
'UserSettings': [
{
'Action': 'CLIPBOARD_COPY_FROM_LOCAL_DEVICE'|'CLIPBOARD_COPY_TO_LOCAL_DEVICE'|'FILE_UPLOAD'|'FILE_DOWNLOAD'|'PRINTING_TO_LOCAL_DEVICE',
'Permission': 'ENABLED'|'DISABLED'
},
],
'ApplicationSettings': {
'Enabled': True|False,
'SettingsGroup': 'string',
'S3BucketName': 'string'
},
'AccessEndpoints': [
{
'EndpointType': 'STREAMING',
'VpceId': 'string'
},
],
'EmbedHostDomains': [
'string',
]
}
}
:returns:
(string) -- GSuite domain for GDrive integration.
"""
pass
| 34.611301
| 1,460
| 0.69054
| 25,338
| 222,966
| 5.981688
| 0.035757
| 0.009996
| 0.030185
| 0.011084
| 0.923748
| 0.909226
| 0.897212
| 0.891188
| 0.880618
| 0.868458
| 0
| 0.008342
| 0.225842
| 222,966
| 6,441
| 1,461
| 34.616674
| 0.869724
| 0.971292
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
6ed296ce9e68d89cfda17495d9d9588896aba236
| 74
|
py
|
Python
|
losses/__init__.py
|
DaseiNaN/TSE_VF
|
f31f8ba89383956ef72904d1a9bb68cee4b79b1a
|
[
"MIT"
] | null | null | null |
losses/__init__.py
|
DaseiNaN/TSE_VF
|
f31f8ba89383956ef72904d1a9bb68cee4b79b1a
|
[
"MIT"
] | null | null | null |
losses/__init__.py
|
DaseiNaN/TSE_VF
|
f31f8ba89383956ef72904d1a9bb68cee4b79b1a
|
[
"MIT"
] | null | null | null |
from .dpcl_loss import *
from .danet_loss import *
from .pit_loss import *
| 24.666667
| 25
| 0.77027
| 12
| 74
| 4.5
| 0.5
| 0.555556
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148649
| 74
| 3
| 26
| 24.666667
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42d4cc1c06ff47c89a763bee425ec1bbb0da8593
| 27,242
|
py
|
Python
|
typographeur/ligatures.py
|
Contexte/typographeur
|
f4220ef329245b375a65e486ab0b8a93afcd219a
|
[
"MIT"
] | 14
|
2018-06-15T09:28:32.000Z
|
2021-08-02T09:21:42.000Z
|
typographeur/ligatures.py
|
Contexte/typographeur
|
f4220ef329245b375a65e486ab0b8a93afcd219a
|
[
"MIT"
] | 21
|
2018-06-15T12:35:58.000Z
|
2021-02-24T22:22:27.000Z
|
typographeur/ligatures.py
|
Contexte/typographeur
|
f4220ef329245b375a65e486ab0b8a93afcd219a
|
[
"MIT"
] | 2
|
2020-06-25T14:42:09.000Z
|
2021-02-08T16:06:42.000Z
|
"""
Ligature dictionary
"""
OE_CLASSIQUE = (
'Belœil',
'Lœvenbruck',
'Marcq-en-Barœul',
'Mons-en-Barœul',
'Nœux-les-Mines',
'Phœbé',
'Plœmeur',
'Stœhr',
'Vandœuvre',
'Vandœuvre-lès-Nancy',
'accroche-cœur',
'accroche-cœurs',
'accœurer',
'acœlomate',
'acœlomates',
'angio-œdème',
'angio-œdèmes',
'anti-œstrogène',
'anti-œstrogènes',
'antiœstrogène',
'antiœstrogènes',
'arrière-chœur',
'arrière-chœurs',
'arrête-bœuf',
'assa-fœtida',
'assa-fœtidas',
'avant-chœur',
'avant-chœurs',
'belle-sœur',
'belles-sœurs',
'bicœur',
'bicœurs',
'biocœnose',
'biocœnoses',
'bœuf',
'bœufs',
'cache-cœur',
"chef-d'œuvre",
"chefs-d'œuvre",
'chœur',
'chœurs',
'consœur',
'consœurs',
'contrecœur',
'contrecœurs',
'crève-cœur',
'cœlacanthe',
'cœlacanthes',
'cœlentéré',
'cœlentérés',
'cœliaque',
'cœliaques',
'cœliochirurgie',
'cœliochirurgies',
'cœlioscope',
'cœlioscopes',
'cœlioscopie',
'cœlioscopies',
'cœlioscopique',
'cœlioscopiques',
'cœlomate',
'cœlomates',
'cœlome',
'cœlomes',
'cœlomique',
'cœlomiques',
'cœlostat',
'cœlostats',
'cœnesthésie',
'cœnesthésies',
'cœnure',
'cœnures',
'cœnurose',
'cœnuroses',
'cœtera',
'cœur',
'cœur-de-pigeon',
'cœurs',
'cœurs-de-pigeon',
'cœursage',
'cœursages',
'cœurse',
'cœurses',
'demi-sœur',
'demi-sœurs',
'désœuvrement',
'désœuvrements',
'désœuvrer',
'désœuvrée',
'désœuvrées',
'entre-nœud',
'entre-nœuds',
'fœtale',
'fœtales',
'fœticide',
'fœticides',
'fœto-maternelle',
'fœto-maternelles',
'fœtologie',
'fœtologies',
'fœtopathie',
'fœtopathies',
'fœtoscopie',
'fœtoscopies',
'fœtoscopique',
'fœtoscopiques',
'fœtus',
'garde-bœufs',
'gastro-œsophagienne',
'gastro-œsophagiennes',
'haut-le-cœur',
"hors-d'œuvre",
'lymphœdème',
'lymphœdèmes',
"main-d'œuvre",
"mains-d'œuvre",
'manœuvrabilité',
'manœuvrabilités',
'manœuvrable',
'manœuvrables',
'manœuvre',
'manœuvrer',
'manœuvres',
'manœuvrière',
'manœuvrières',
'mire-œufs',
'monocœur',
'monocœurs',
'monœcie',
'monœcies',
'multicœur',
'multicœurs',
'myxœdème',
'myxœdèmes',
'myxœdémateuse',
'myxœdémateuses',
'mœurs',
'nœud',
'nœuds',
'phyto-œstrogène',
'phyto-œstrogènes',
'phytoœstrogène',
'phytoœstrogènes',
'phœniciculture',
'phœnicicultures',
'phœnix',
'pique-bœuf',
'pique-bœufs',
'pomœrium',
'pomœriums',
'préœdipienne',
'préœdipiennes',
'périœsophagienne',
'périœsophagiennes',
'pœcilandrie',
'pœcilandries',
'pœcile',
'pœciles',
'pœcilitique',
'pœcilitiques',
'pœcilogynie',
'pœcilogynies',
'pœcilotherme',
'pœcilothermes',
'pœcilothermie',
'pœcilothermies',
'quadricœur',
'quadricœurs',
'rai-de-cœur',
'rais-de-cœur',
'rancœur',
'rancœurs',
'sacré-cœur',
'sacré-cœurs',
'sans-cœur',
'sous-œuvre',
'sous-œuvres',
'stœchiométrie',
'stœchiométries',
'stœchiométrique',
'stœchiométriques',
'synœcisme',
'synœcismes',
'sœur',
'sœurette',
'sœurettes',
'sœurs',
"tape-à-l'œil",
"tire-l'œil",
"trompe-l'œil",
'vœu',
'vœux',
'écœurante',
'écœurantes',
'écœurement',
'écœurements',
'écœurer',
'œ',
'œconomicus',
'œcuménicité',
'œcuménicités',
'œcuménique',
'œcuméniques',
'œcuménisme',
'œcuménismes',
'œdicnème',
'œdicnèmes',
'œdipe',
'œdipes',
'œdipienne',
'œdipiennes',
'œdème',
'œdèmes',
'œdémateuse',
'œdémateuses',
'œil',
'œil-de-bœuf',
'œil-de-chat',
'œil-de-perdrix',
'œil-de-pie',
'œillade',
'œillades',
'œillard',
'œillards',
'œiller',
'œillet',
'œilleton',
'œilletonnage',
'œilletonnages',
'œilletonner',
'œilletons',
'œillets',
'œillette',
'œillettes',
'œillère',
'œillères',
'œils',
'œils-de-bœuf',
'œils-de-chat',
'œils-de-perdrix',
'œils-de-pie',
'œkoumène',
'œkoumènes',
'œnanthe',
'œnanthes',
'œnanthique',
'œnanthiques',
'œnilisme',
'œnilismes',
'œnolique',
'œnoliques',
'œnolisme',
'œnolismes',
'œnologie',
'œnologies',
'œnologique',
'œnologiques',
'œnologue',
'œnologues',
'œnomètre',
'œnomètres',
'œnométrie',
'œnométries',
'œnométrique',
'œnométriques',
'œnophile',
'œnophiles',
'œnotechnie',
'œnotechnies',
'œnotechnique',
'œnotechniques',
'œnothera',
'œnotheras',
'œnothèque',
'œnothèques',
'œnothère',
'œnothères',
'œnothéracée',
'œnothéracées',
'œrsted',
'œrsteds',
'œrstite',
'œrstites',
'œsophage',
'œsophagectomie',
'œsophagectomies',
'œsophages',
'œsophagienne',
'œsophagiennes',
'œsophagique',
'œsophagiques',
'œsophagisme',
'œsophagismes',
'œsophagite',
'œsophagites',
'œsophagomalacie',
'œsophagomalacies',
'œsophagoplastie',
'œsophagoplasties',
'œsophagoscope',
'œsophagoscopes',
'œsophagoscopie',
'œsophagoscopies',
'œsophagostomie',
'œsophagostomies',
'œsophagotomie',
'œsophagotomies',
'œstradiol',
'œstradiols',
'œstradiène',
'œstradiènes',
'œstrale',
'œstrales',
'œstre',
'œstres',
'œstriol',
'œstriols',
'œstrogène',
'œstrogènes',
'œstrogénique',
'œstrogéniques',
'œstrogénothérapie',
'œstrogénothérapies',
'œstromane',
'œstromanes',
'œstromanie',
'œstromanies',
'œstrone',
'œstrones',
'œstroprogestative',
'œstroprogestatives',
'œstrus',
'œuf',
'œufrier',
'œufriers',
'œufs',
'œuvre',
'œuvrer',
'œuvres',
'œuvrette',
'œuvrettes',
'œuvée',
'œuvées')
AE_CLASSIQUE = (
'Lætitia',
'althæa',
'althæas',
'chamærops',
'cæcale',
'cæcales',
'cæcotrophie',
'cæcotrophies',
'cæcum',
'cæcums',
'cænogenèse',
'cænogenèses',
'cæsine',
'cæsium',
'cæsiums',
'cætera',
'elæis',
'hypernovæ',
'iléo-cæcale',
'iléo-cæcales',
'melæna',
'melænas',
'mélæna',
'mélænas',
'novæ',
'nævi',
'nævo-carcinome',
'nævo-carcinomes',
'nævocarcinome',
'nævocarcinomes',
'nævus',
'personæ',
'præsidium',
'præsidiums',
'supernovæ',
'tænia',
'tænias',
'uræus',
'vitæ',
'æ',
'ægagropile',
'ægagropiles',
'ægipan',
'ægipans',
'ægosome',
'ægosomes',
'ægyrine',
'ægyrines',
'æpyornis',
'æquo',
'æschne',
'æschnes',
'æschnidé',
'æschnidés',
'æternam',
'æthuse',
'æthuses')
OE_MODERNE = (
'Belœil',
'Lœvenbruck',
'Marcq-en-Barœul',
'Mons-en-Barœul',
'Nœux-les-Mines',
'Phœbé',
'Plœmeur',
'Stœhr',
'Vandœuvre',
'Vandœuvre-lès-Nancy',
'accroche-cœur',
'accroche-cœurs',
'accœurer',
'acœlomate',
'acœlomates',
'angio-œdème',
'angio-œdèmes',
'antiœstrogène',
'antiœstrogènes',
'arrière-chœur',
'arrière-chœurs',
'arrête-bœuf',
'assa-fœtida',
'assa-fœtidas',
'avant-chœur',
'avant-chœurs',
'belle-sœur',
'belles-sœurs',
'bicœur',
'bicœurs',
'bœuf',
'bœufs',
'cache-cœur',
"chef-d'œuvre",
"chefs-d'œuvre",
'chœur',
'chœurs',
'consœur',
'consœurs',
'contrecœur',
'contrecœurs',
'crève-cœur',
'cœlacanthe',
'cœlacanthes',
'cœlentéré',
'cœlentérés',
'cœliaque',
'cœliaques',
'cœliochirurgie',
'cœliochirurgies',
'cœlioscope',
'cœlioscopes',
'cœlioscopie',
'cœlioscopies',
'cœlioscopique',
'cœlioscopiques',
'cœlomate',
'cœlomates',
'cœlome',
'cœlomes',
'cœlomique',
'cœlomiques',
'cœlostat',
'cœlostats',
'cœur',
'cœur-de-pigeon',
'cœurs',
'cœurs-de-pigeon',
'cœursage',
'cœursages',
'cœurse',
'cœurses',
'demi-sœur',
'demi-sœurs',
'désœuvrement',
'désœuvrements',
'désœuvrer',
'désœuvrée',
'désœuvrées',
'entre-nœud',
'entre-nœuds',
'fœtale',
'fœtales',
'fœticide',
'fœticides',
'fœto-maternelle',
'fœto-maternelles',
'fœtologie',
'fœtologies',
'fœtopathie',
'fœtopathies',
'fœtoscopie',
'fœtoscopies',
'fœtoscopique',
'fœtoscopiques',
'fœtus',
'garde-bœufs',
'gastro-œsophagienne',
'gastro-œsophagiennes',
'haut-le-cœur',
"hors-d'œuvre",
'lymphœdème',
'lymphœdèmes',
"main-d'œuvre",
"mains-d'œuvre",
'manœuvrabilité',
'manœuvrabilités',
'manœuvrable',
'manœuvrables',
'manœuvre',
'manœuvrer',
'manœuvres',
'manœuvrière',
'manœuvrières',
'mire-œufs',
'monocœur',
'monocœurs',
'monœcie',
'monœcies',
'multicœur',
'multicœurs',
'myxœdème',
'myxœdèmes',
'myxœdémateuse',
'myxœdémateuses',
'mœurs',
'nœud',
'nœuds',
'phyto-œstrogène',
'phyto-œstrogènes',
'phytoœstrogène',
'phytoœstrogènes',
'phœniciculture',
'phœnicicultures',
'phœnix',
'pique-bœuf',
'pique-bœufs',
'préœdipienne',
'préœdipiennes',
'périœsophagienne',
'périœsophagiennes',
'pœcilandrie',
'pœcilandries',
'pœcile',
'pœciles',
'pœcilitique',
'pœcilitiques',
'pœcilogynie',
'pœcilogynies',
'pœcilotherme',
'pœcilothermes',
'pœcilothermie',
'pœcilothermies',
'quadricœur',
'quadricœurs',
'rai-de-cœur',
'rais-de-cœur',
'rancœur',
'rancœurs',
'sacré-cœur',
'sacré-cœurs',
'sans-cœur',
'sous-œuvre',
'sous-œuvres',
'stœchiométrie',
'stœchiométries',
'stœchiométrique',
'stœchiométriques',
'synœcisme',
'synœcismes',
'sœur',
'sœurette',
'sœurettes',
'sœurs',
"tape-à-l'œil",
"tire-l'œil",
"trompe-l'œil",
'vœu',
'vœux',
'écœurante',
'écœurantes',
'écœurement',
'écœurements',
'écœurer',
'œ',
'œconomicus',
'œcuménicité',
'œcuménicités',
'œcuménique',
'œcuméniques',
'œcuménisme',
'œcuménismes',
'œdicnème',
'œdicnèmes',
'œdipe',
'œdipes',
'œdipienne',
'œdipiennes',
'œdème',
'œdèmes',
'œdémateuse',
'œdémateuses',
'œil',
'œil-de-bœuf',
'œil-de-chat',
'œil-de-perdrix',
'œil-de-pie',
'œillade',
'œillades',
'œillard',
'œillards',
'œiller',
'œillet',
'œilleton',
'œilletonnage',
'œilletonnages',
'œilletonner',
'œilletons',
'œillets',
'œillette',
'œillettes',
'œillère',
'œillères',
'œils',
'œils-de-bœuf',
'œils-de-chat',
'œils-de-perdrix',
'œils-de-pie',
'œnanthe',
'œnanthes',
'œnanthique',
'œnanthiques',
'œnilisme',
'œnilismes',
'œnolique',
'œnoliques',
'œnolisme',
'œnolismes',
'œnologie',
'œnologies',
'œnologique',
'œnologiques',
'œnologue',
'œnologues',
'œnomètre',
'œnomètres',
'œnométrie',
'œnométries',
'œnométrique',
'œnométriques',
'œnophile',
'œnophiles',
'œnotechnie',
'œnotechnies',
'œnotechnique',
'œnotechniques',
'œnothera',
'œnotheras',
'œnothèque',
'œnothèques',
'œnothère',
'œnothères',
'œnothéracée',
'œnothéracées',
'œrsted',
'œrsteds',
'œrstite',
'œrstites',
'œsophage',
'œsophagectomie',
'œsophagectomies',
'œsophages',
'œsophagienne',
'œsophagiennes',
'œsophagique',
'œsophagiques',
'œsophagisme',
'œsophagismes',
'œsophagite',
'œsophagites',
'œsophagomalacie',
'œsophagomalacies',
'œsophagoplastie',
'œsophagoplasties',
'œsophagoscope',
'œsophagoscopes',
'œsophagoscopie',
'œsophagoscopies',
'œsophagostomie',
'œsophagostomies',
'œsophagotomie',
'œsophagotomies',
'œstrale',
'œstrales',
'œstre',
'œstres',
'œstrogène',
'œstrogènes',
'œstrogénique',
'œstrogéniques',
'œstrogénothérapie',
'œstrogénothérapies',
'œstromane',
'œstromanes',
'œstromanie',
'œstromanies',
'œstrus',
'œuf',
'œufrier',
'œufriers',
'œufs',
'œuvre',
'œuvrer',
'œuvres',
'œuvrette',
'œuvrettes',
'œuvée',
'œuvées')
AE_MODERNE = (
'Lætitia',
'chamærops',
'cæcale',
'cæcales',
'cæcotrophie',
'cæcotrophies',
'cæcum',
'cæcums',
'cænogenèse',
'cænogenèses',
'cæsine',
'cætera',
'hypernovæ',
'iléo-cæcale',
'iléo-cæcales',
'nævocarcinome',
'nævocarcinomes',
'nævus',
'personæ',
'tænia',
'tænias',
'uræus',
'vitæ',
'æ',
'ægagropile',
'ægagropiles',
'ægipan',
'ægipans',
'ægosome',
'ægosomes',
'ægyrine',
'ægyrines',
'æpyornis',
'æquo',
'æschne',
'æschnes',
'æschnidé',
'æschnidés',
'æternam')
OE_REFORME1990 = (
'Belœil',
'Lœvenbruck',
'Marcq-en-Barœul',
'Mons-en-Barœul',
'Nœux-les-Mines',
'Phœbé',
'Plœmeur',
'Stœhr',
'Vandœuvre',
'Vandœuvre-lès-Nancy',
'accroche-cœur',
'accroche-cœurs',
'accœurer',
'acœlomate',
'acœlomates',
'angio-œdème',
'angio-œdèmes',
'angstrœm',
'angstrœms',
'arrière-chœur',
'arrière-chœurs',
'arrête-bœuf',
'arrête-bœufs',
'avant-chœur',
'avant-chœurs',
'belle-sœur',
'belles-sœurs',
'bicœur',
'bicœurs',
'bœuf',
'bœufs',
'cache-cœur',
'cache-cœurs',
"chef-d'œuvre",
"chefs-d'œuvre",
'chœur',
'chœurs',
'consœur',
'consœurs',
'contrecœur',
'contrecœurs',
'crève-cœur',
'crève-cœurs',
'cœlacanthe',
'cœlacanthes',
'cœlentéré',
'cœlentérés',
'cœlomate',
'cœlomates',
'cœlome',
'cœlomes',
'cœlomique',
'cœlomiques',
'cœlostat',
'cœlostats',
'cœur',
'cœur-de-pigeon',
'cœurs',
'cœurs-de-pigeon',
'cœursage',
'cœursages',
'cœurse',
'cœurses',
'demi-sœur',
'demi-sœurs',
'désœuvrement',
'désœuvrements',
'désœuvrer',
'désœuvrée',
'désœuvrées',
'entrenœud',
'entrenœuds',
'fœtale',
'fœtales',
'fœticide',
'fœticides',
'fœto-maternelle',
'fœto-maternelles',
'fœtologie',
'fœtologies',
'fœtopathie',
'fœtopathies',
'fœtoscopie',
'fœtoscopies',
'fœtoscopique',
'fœtoscopiques',
'fœtus',
'garde-bœuf',
'garde-bœufs',
'gastroœsophagienne',
'gastroœsophagiennes',
'haut-le-cœur',
"hors-d'œuvre",
'lymphœdème',
'lymphœdèmes',
"main-d'œuvre",
"mains-d'œuvre",
'manœuvrabilité',
'manœuvrabilités',
'manœuvrable',
'manœuvrables',
'manœuvre',
'manœuvrer',
'manœuvres',
'manœuvrière',
'manœuvrières',
'mire-œuf',
'mire-œufs',
'monocœur',
'monocœurs',
'monœcie',
'monœcies',
'multicœur',
'multicœurs',
'myxœdème',
'myxœdèmes',
'myxœdémateuse',
'myxœdémateuses',
'mœurs',
'nœud',
'nœuds',
'phyto-œstrogène',
'phyto-œstrogènes',
'phytoœstrogène',
'phytoœstrogènes',
'phœniciculture',
'phœnicicultures',
'pique-bœuf',
'pique-bœufs',
'préœdipienne',
'préœdipiennes',
'périœsophagienne',
'périœsophagiennes',
'pœcilandrie',
'pœcilandries',
'pœcilitique',
'pœcilitiques',
'pœcilogynie',
'pœcilogynies',
'pœcilotherme',
'pœcilothermes',
'pœcilothermie',
'pœcilothermies',
'quadricœur',
'quadricœurs',
'rai-de-cœur',
'rais-de-cœur',
'rancœur',
'rancœurs',
'sacré-cœur',
'sacré-cœurs',
'sans-cœur',
'sans-cœurs',
'sous-œuvre',
'sous-œuvres',
'stœchiométrie',
'stœchiométries',
'stœchiométrique',
'stœchiométriques',
'synœcisme',
'synœcismes',
'sœur',
'sœurette',
'sœurettes',
'sœurs',
"tape-à-l'œil",
"tire-l'œil",
"trompe-l'œil",
'vœu',
'vœux',
'écœurante',
'écœurantes',
'écœurement',
'écœurements',
'écœurer',
'œ',
'œconomicus',
'œcuménicité',
'œcuménicités',
'œcuménique',
'œcuméniques',
'œcuménisme',
'œcuménismes',
'œdicnème',
'œdicnèmes',
'œdipe',
'œdipes',
'œdipienne',
'œdipiennes',
'œdème',
'œdèmes',
'œdémateuse',
'œdémateuses',
'œil',
'œil-de-bœuf',
'œil-de-chat',
'œil-de-perdrix',
'œil-de-pie',
'œillade',
'œillades',
'œillard',
'œillards',
'œiller',
'œillet',
'œilleton',
'œilletonnage',
'œilletonnages',
'œilletonner',
'œilletons',
'œillets',
'œillette',
'œillettes',
'œillère',
'œillères',
'œils',
'œils-de-bœuf',
'œils-de-chat',
'œils-de-perdrix',
'œils-de-pie',
'œnanthe',
'œnanthes',
'œnanthique',
'œnanthiques',
'œnilisme',
'œnilismes',
'œnolique',
'œnoliques',
'œnolisme',
'œnolismes',
'œnologie',
'œnologies',
'œnologique',
'œnologiques',
'œnologue',
'œnologues',
'œnomètre',
'œnomètres',
'œnométrie',
'œnométries',
'œnométrique',
'œnométriques',
'œnophile',
'œnophiles',
'œnotechnie',
'œnotechnies',
'œnotechnique',
'œnotechniques',
'œnothèque',
'œnothèques',
'œnothère',
'œnothères',
'œnothéra',
'œnothéracée',
'œnothéracées',
'œnothéras',
'œrsted',
'œrsteds',
'œrstite',
'œrstites',
'œsophage',
'œsophagectomie',
'œsophagectomies',
'œsophages',
'œsophagienne',
'œsophagiennes',
'œsophagique',
'œsophagiques',
'œsophagisme',
'œsophagismes',
'œsophagite',
'œsophagites',
'œsophagomalacie',
'œsophagomalacies',
'œsophagoplastie',
'œsophagoplasties',
'œsophagoscope',
'œsophagoscopes',
'œsophagoscopie',
'œsophagoscopies',
'œsophagostomie',
'œsophagostomies',
'œsophagotomie',
'œsophagotomies',
'œstrale',
'œstrales',
'œstre',
'œstres',
'œstromane',
'œstromanes',
'œstromanie',
'œstromanies',
'œstrus',
'œuf',
'œufrier',
'œufriers',
'œufs',
'œuvre',
'œuvrer',
'œuvres',
'œuvrette',
'œuvrettes',
'œuvée',
'œuvées')
AE_REFORME1990 = (
'Lætitia',
'cæcale',
'cæcales',
'cæcotrophie',
'cæcotrophies',
'cæcum',
'cæcums',
'cænogenèse',
'cænogenèses',
'cæsine',
'exæquo',
'exæquos',
'iléocæcale',
'iléocæcales',
'personæ',
'tænia',
'tænias',
'uræus',
'vitæ',
'æ',
'ægagropile',
'ægagropiles',
'ægipan',
'ægipans',
'ægyrine',
'ægyrines',
'æquo',
'æschne',
'æschnes',
'æschnidé',
'æschnidés',
'æternam')
OE_TOUTESVARIANTES = (
'Belœil',
'Lœvenbruck',
'Marcq-en-Barœul',
'Mons-en-Barœul',
'Nœux-les-Mines',
'Phœbé',
'Plœmeur',
'Stœhr',
'Vandœuvre',
'Vandœuvre-lès-Nancy',
'accroche-cœur',
'accroche-cœurs',
'accœurer',
'acœlomate',
'acœlomates',
'angio-œdème',
'angio-œdèmes',
'angstrœm',
'angstrœms',
'anti-œstrogène',
'anti-œstrogènes',
'antiœstrogène',
'antiœstrogènes',
'arrière-chœur',
'arrière-chœurs',
'arrête-bœuf',
'arrête-bœufs',
'assa-fœtida',
'assa-fœtidas',
'avant-chœur',
'avant-chœurs',
'belle-sœur',
'belles-sœurs',
'bicœur',
'bicœurs',
'biocœnose',
'biocœnoses',
'bœuf',
'bœufs',
'cache-cœur',
'cache-cœurs',
"chef-d'œuvre",
"chefs-d'œuvre",
'chœur',
'chœurs',
'consœur',
'consœurs',
'contrecœur',
'contrecœurs',
'crève-cœur',
'crève-cœurs',
'cœlacanthe',
'cœlacanthes',
'cœlentéré',
'cœlentérés',
'cœliaque',
'cœliaques',
'cœliochirurgie',
'cœliochirurgies',
'cœlioscope',
'cœlioscopes',
'cœlioscopie',
'cœlioscopies',
'cœlioscopique',
'cœlioscopiques',
'cœlomate',
'cœlomates',
'cœlome',
'cœlomes',
'cœlomique',
'cœlomiques',
'cœlostat',
'cœlostats',
'cœnesthésie',
'cœnesthésies',
'cœnure',
'cœnures',
'cœnurose',
'cœnuroses',
'cœtera',
'cœur',
'cœur-de-pigeon',
'cœurs',
'cœurs-de-pigeon',
'cœursage',
'cœursages',
'cœurse',
'cœurses',
'demi-sœur',
'demi-sœurs',
'désœuvrement',
'désœuvrements',
'désœuvrer',
'désœuvrée',
'désœuvrées',
'entre-nœud',
'entre-nœuds',
'entrenœud',
'entrenœuds',
'fœtale',
'fœtales',
'fœticide',
'fœticides',
'fœto-maternelle',
'fœto-maternelles',
'fœtologie',
'fœtologies',
'fœtopathie',
'fœtopathies',
'fœtoscopie',
'fœtoscopies',
'fœtoscopique',
'fœtoscopiques',
'fœtus',
'garde-bœuf',
'garde-bœufs',
'gastro-œsophagienne',
'gastro-œsophagiennes',
'gastroœsophagienne',
'gastroœsophagiennes',
'haut-le-cœur',
"hors-d'œuvre",
'lymphœdème',
'lymphœdèmes',
"main-d'œuvre",
"mains-d'œuvre",
'manœuvrabilité',
'manœuvrabilités',
'manœuvrable',
'manœuvrables',
'manœuvre',
'manœuvrer',
'manœuvres',
'manœuvrière',
'manœuvrières',
'mire-œuf',
'mire-œufs',
'monocœur',
'monocœurs',
'monœcie',
'monœcies',
'multicœur',
'multicœurs',
'myxœdème',
'myxœdèmes',
'myxœdémateuse',
'myxœdémateuses',
'mœurs',
'nœud',
'nœuds',
'phyto-œstrogène',
'phyto-œstrogènes',
'phytoœstrogène',
'phytoœstrogènes',
'phœniciculture',
'phœnicicultures',
'phœnix',
'pique-bœuf',
'pique-bœufs',
'pomœrium',
'pomœriums',
'préœdipienne',
'préœdipiennes',
'périœsophagienne',
'périœsophagiennes',
'pœcilandrie',
'pœcilandries',
'pœcile',
'pœciles',
'pœcilitique',
'pœcilitiques',
'pœcilogynie',
'pœcilogynies',
'pœcilotherme',
'pœcilothermes',
'pœcilothermie',
'pœcilothermies',
'quadricœur',
'quadricœurs',
'rai-de-cœur',
'rais-de-cœur',
'rancœur',
'rancœurs',
'sacré-cœur',
'sacré-cœurs',
'sans-cœur',
'sans-cœurs',
'sous-œuvre',
'sous-œuvres',
'stœchiométrie',
'stœchiométries',
'stœchiométrique',
'stœchiométriques',
'synœcisme',
'synœcismes',
'sœur',
'sœurette',
'sœurettes',
'sœurs',
"tape-à-l'œil",
"tire-l'œil",
"trompe-l'œil",
'vœu',
'vœux',
'écœurante',
'écœurantes',
'écœurement',
'écœurements',
'écœurer',
'œ',
'œconomicus',
'œcuménicité',
'œcuménicités',
'œcuménique',
'œcuméniques',
'œcuménisme',
'œcuménismes',
'œdicnème',
'œdicnèmes',
'œdipe',
'œdipes',
'œdipienne',
'œdipiennes',
'œdème',
'œdèmes',
'œdémateuse',
'œdémateuses',
'œil',
'œil-de-bœuf',
'œil-de-chat',
'œil-de-perdrix',
'œil-de-pie',
'œillade',
'œillades',
'œillard',
'œillards',
'œiller',
'œillet',
'œilleton',
'œilletonnage',
'œilletonnages',
'œilletonner',
'œilletons',
'œillets',
'œillette',
'œillettes',
'œillère',
'œillères',
'œils',
'œils-de-bœuf',
'œils-de-chat',
'œils-de-perdrix',
'œils-de-pie',
'œkoumène',
'œkoumènes',
'œnanthe',
'œnanthes',
'œnanthique',
'œnanthiques',
'œnilisme',
'œnilismes',
'œnolique',
'œnoliques',
'œnolisme',
'œnolismes',
'œnologie',
'œnologies',
'œnologique',
'œnologiques',
'œnologue',
'œnologues',
'œnomètre',
'œnomètres',
'œnométrie',
'œnométries',
'œnométrique',
'œnométriques',
'œnophile',
'œnophiles',
'œnotechnie',
'œnotechnies',
'œnotechnique',
'œnotechniques',
'œnothera',
'œnotheras',
'œnothèque',
'œnothèques',
'œnothère',
'œnothères',
'œnothéra',
'œnothéracée',
'œnothéracées',
'œnothéras',
'œrsted',
'œrsteds',
'œrstite',
'œrstites',
'œsophage',
'œsophagectomie',
'œsophagectomies',
'œsophages',
'œsophagienne',
'œsophagiennes',
'œsophagique',
'œsophagiques',
'œsophagisme',
'œsophagismes',
'œsophagite',
'œsophagites',
'œsophagomalacie',
'œsophagomalacies',
'œsophagoplastie',
'œsophagoplasties',
'œsophagoscope',
'œsophagoscopes',
'œsophagoscopie',
'œsophagoscopies',
'œsophagostomie',
'œsophagostomies',
'œsophagotomie',
'œsophagotomies',
'œstradiol',
'œstradiols',
'œstradiène',
'œstradiènes',
'œstrale',
'œstrales',
'œstre',
'œstres',
'œstriol',
'œstriols',
'œstrogène',
'œstrogènes',
'œstrogénique',
'œstrogéniques',
'œstrogénothérapie',
'œstrogénothérapies',
'œstromane',
'œstromanes',
'œstromanie',
'œstromanies',
'œstrone',
'œstrones',
'œstroprogestative',
'œstroprogestatives',
'œstrus',
'œuf',
'œufrier',
'œufriers',
'œufs',
'œuvre',
'œuvrer',
'œuvres',
'œuvrette',
'œuvrettes',
'œuvée',
'œuvées')
AE_TOUTESVARIANTES = (
'Lætitia',
'althæa',
'althæas',
'chamærops',
'cæcale',
'cæcales',
'cæcotrophie',
'cæcotrophies',
'cæcum',
'cæcums',
'cænogenèse',
'cænogenèses',
'cæsine',
'cæsium',
'cæsiums',
'cætera',
'elæis',
'exæquo',
'exæquos',
'hypernovæ',
'iléo-cæcale',
'iléo-cæcales',
'iléocæcale',
'iléocæcales',
'melæna',
'melænas',
'mélæna',
'mélænas',
'novæ',
'nævi',
'nævo-carcinome',
'nævo-carcinomes',
'nævocarcinome',
'nævocarcinomes',
'nævus',
'personæ',
'præsidium',
'præsidiums',
'supernovæ',
'tænia',
'tænias',
'uræus',
'vitæ',
'æ',
'ægagropile',
'ægagropiles',
'ægipan',
'ægipans',
'ægosome',
'ægosomes',
'ægyrine',
'ægyrines',
'æpyornis',
'æquo',
'æschne',
'æschnes',
'æschnidé',
'æschnidés',
'æternam',
'æthuse',
'æthuses')
ligature_dictionaries = {
"classique": {"œ": OE_CLASSIQUE, "æ": AE_CLASSIQUE},
"moderne": {"œ": OE_MODERNE, "æ": AE_MODERNE},
"reforme1990": {"œ": OE_REFORME1990, "æ": AE_REFORME1990},
"toutesvariantes": {"œ": OE_TOUTESVARIANTES, "æ": AE_TOUTESVARIANTES},
}
| 17.712614
| 74
| 0.537662
| 1,918
| 27,242
| 7.627737
| 0.216371
| 0.008202
| 0.005742
| 0.006288
| 0.971907
| 0.965755
| 0.963226
| 0.96015
| 0.956254
| 0.951196
| 0
| 0.00102
| 0.280009
| 27,242
| 1,537
| 75
| 17.724138
| 0.744876
| 0.000697
| 0
| 0.990814
| 0
| 0
| 0.542054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42d96902d0671af4ff2c0a070acab9649eeb922e
| 85
|
py
|
Python
|
tfds_aihub/k_fashion_image/__init__.py
|
jeongukjae/tfds-aihub
|
8e583337a97ee93ba6924f792880ad446bb256ec
|
[
"Apache-2.0"
] | null | null | null |
tfds_aihub/k_fashion_image/__init__.py
|
jeongukjae/tfds-aihub
|
8e583337a97ee93ba6924f792880ad446bb256ec
|
[
"Apache-2.0"
] | null | null | null |
tfds_aihub/k_fashion_image/__init__.py
|
jeongukjae/tfds-aihub
|
8e583337a97ee93ba6924f792880ad446bb256ec
|
[
"Apache-2.0"
] | null | null | null |
"""k_fashion_image dataset."""
from tfds_aihub.k_fashion_image import KFashionImage
| 21.25
| 52
| 0.823529
| 12
| 85
| 5.416667
| 0.75
| 0.246154
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082353
| 85
| 3
| 53
| 28.333333
| 0.833333
| 0.282353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42f605165b66820a7c33c1e36ae8fc84ce9a0ab5
| 113
|
py
|
Python
|
chapter5/5_17Writing Bytes to a Text File/5_17.py
|
atigerboy/PythonCookBook
|
e9238c7676063b5077a7645707ecc51052063d8d
|
[
"MIT"
] | null | null | null |
chapter5/5_17Writing Bytes to a Text File/5_17.py
|
atigerboy/PythonCookBook
|
e9238c7676063b5077a7645707ecc51052063d8d
|
[
"MIT"
] | null | null | null |
chapter5/5_17Writing Bytes to a Text File/5_17.py
|
atigerboy/PythonCookBook
|
e9238c7676063b5077a7645707ecc51052063d8d
|
[
"MIT"
] | null | null | null |
import sys
#sys.stdout.write(b'Hello\n')#error, no str
sys.stdout.buffer.write(b'Hello\n')
print('Jalape\u00f1o')
| 28.25
| 43
| 0.743363
| 21
| 113
| 4
| 0.666667
| 0.214286
| 0.261905
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028037
| 0.053097
| 113
| 4
| 44
| 28.25
| 0.757009
| 0.362832
| 0
| 0
| 0
| 0
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6e078372ce6e00ce3b828a85404f835ac068cc02
| 7,429
|
py
|
Python
|
u24_lymphocyte/third_party/treeano/sandbox/nodes/input_scaling.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 45
|
2015-04-26T04:45:51.000Z
|
2022-01-24T15:03:55.000Z
|
u24_lymphocyte/third_party/treeano/sandbox/nodes/input_scaling.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 8
|
2018-07-20T20:54:51.000Z
|
2020-06-12T05:36:04.000Z
|
u24_lymphocyte/third_party/treeano/sandbox/nodes/input_scaling.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 22
|
2018-05-21T23:57:20.000Z
|
2022-02-21T00:48:32.000Z
|
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
import treeano.sandbox.utils
@treeano.register_node("clip_scaling")
class ClipScalingNode(treeano.NodeImpl):
hyperparameter_names = ("learnable",
"mins",
"maxs")
def compute_output(self, network, in_vw):
learnable = network.find_hyperparameter(["learnable"], False)
mins = network.find_hyperparameter(["mins"])
maxs = network.find_hyperparameter(["maxs"])
assert mins.ndim == maxs.ndim == 1
assert mins.shape == maxs.shape
mins = treeano.utils.as_fX(mins)
maxs = treeano.utils.as_fX(maxs)
num_scales = mins.shape[0]
if learnable:
mins_var = network.create_vw(
"mins",
shape=mins.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(mins)],
).variable
maxs_var = network.create_vw(
"maxs",
shape=maxs.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(maxs)],
).variable
else:
if treeano.utils.is_variable(mins):
mins_var = mins
else:
mins_var = T.constant(mins)
if treeano.utils.is_variable(maxs):
maxs_var = maxs
else:
maxs_var = T.constant(maxs)
in_pattern = list(range(in_vw.ndim))
# insert after channel dim
in_pattern.insert(2, "x")
param_pattern = ["x"] * in_vw.ndim
param_pattern.insert(2, 0)
in_b = in_vw.variable.dimshuffle(*in_pattern)
mins_b = mins_var.dimshuffle(*param_pattern)
maxs_b = maxs_var.dimshuffle(*param_pattern)
range_b = maxs_b - mins_b
# TODO constrain range to be > 0?
clipped = T.clip(in_b - mins_b, 0, range_b)
scaled = clipped / range_b
# reshape newly created dim into dim 1
out_ss = list(in_vw.symbolic_shape())
out_ss[1] *= num_scales
out_var = scaled.reshape(tuple(out_ss))
out_shape = list(in_vw.shape)
if out_shape[1] is not None:
out_shape[1] *= num_scales
out_shape = tuple(out_shape)
network.create_vw(
"default",
variable=out_var,
shape=out_shape,
tags={}
)
@treeano.register_node("tanh_scaling")
class TanhScalingNode(treeano.NodeImpl):
hyperparameter_names = ("learnable",
"means",
"scales")
def compute_output(self, network, in_vw):
learnable = network.find_hyperparameter(["learnable"], False)
means = network.find_hyperparameter(["means"])
scales = network.find_hyperparameter(["scales"])
assert means.ndim == scales.ndim == 1
assert means.shape == scales.shape
means = treeano.utils.as_fX(means)
scales = treeano.utils.as_fX(scales)
num_scales = means.shape[0]
if learnable:
means_var = network.create_vw(
"means",
shape=means.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(means)],
).variable
scales_var = network.create_vw(
"scales",
shape=scales.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(scales)],
).variable
else:
if treeano.utils.is_variable(means):
means_var = means
else:
means_var = T.constant(means)
if treeano.utils.is_variable(scales):
scales_var = scales
else:
scales_var = T.constant(scales)
in_pattern = list(range(in_vw.ndim))
# insert after channel dim
in_pattern.insert(2, "x")
param_pattern = ["x"] * in_vw.ndim
param_pattern.insert(2, 0)
in_b = in_vw.variable.dimshuffle(*in_pattern)
means_b = means_var.dimshuffle(*param_pattern)
scales_b = scales_var.dimshuffle(*param_pattern)
# TODO constrain scales to be > 0?
scaled = T.tanh((in_b - means_b) / scales_b)
# reshape newly created dim into dim 1
out_ss = list(in_vw.symbolic_shape())
out_ss[1] *= num_scales
out_var = scaled.reshape(tuple(out_ss))
out_shape = list(in_vw.shape)
if out_shape[1] is not None:
out_shape[1] *= num_scales
out_shape = tuple(out_shape)
network.create_vw(
"default",
variable=out_var,
shape=out_shape,
tags={"output"},
)
@treeano.register_node("rbf_scaling")
class RBFScalingNode(treeano.NodeImpl):
hyperparameter_names = ("learnable",
"means",
"scales")
def compute_output(self, network, in_vw):
learnable = network.find_hyperparameter(["learnable"], False)
means = network.find_hyperparameter(["means"])
scales = network.find_hyperparameter(["scales"])
assert means.ndim == scales.ndim == 1
assert means.shape == scales.shape
means = treeano.utils.as_fX(means)
scales = treeano.utils.as_fX(scales)
num_scales = means.shape[0]
if learnable:
means_var = network.create_vw(
"means",
shape=means.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(means)],
).variable
scales_var = network.create_vw(
"scales",
shape=scales.shape,
is_shared=True,
tags={"parameter"},
default_inits=[treeano.inits.ConstantInit(scales)],
).variable
else:
if treeano.utils.is_variable(means):
means_var = means
else:
means_var = T.constant(means)
if treeano.utils.is_variable(scales):
scales_var = scales
else:
scales_var = T.constant(scales)
in_pattern = list(range(in_vw.ndim))
# insert after channel dim
in_pattern.insert(2, "x")
param_pattern = ["x"] * in_vw.ndim
param_pattern.insert(2, 0)
in_b = in_vw.variable.dimshuffle(*in_pattern)
means_b = means_var.dimshuffle(*param_pattern)
scales_b = scales_var.dimshuffle(*param_pattern)
# TODO constrain scales to be > 0?
scaled = T.exp(-T.sqr(in_b - means_b) / scales_b)
# reshape newly created dim into dim 1
out_ss = list(in_vw.symbolic_shape())
out_ss[1] *= num_scales
out_var = scaled.reshape(tuple(out_ss))
out_shape = list(in_vw.shape)
if out_shape[1] is not None:
out_shape[1] *= num_scales
out_shape = tuple(out_shape)
network.create_vw(
"default",
variable=out_var,
shape=out_shape,
tags={"output"},
)
| 31.747863
| 69
| 0.550276
| 825
| 7,429
| 4.732121
| 0.106667
| 0.018443
| 0.057633
| 0.02459
| 0.803535
| 0.786373
| 0.786373
| 0.777152
| 0.777152
| 0.777152
| 0
| 0.006396
| 0.347557
| 7,429
| 233
| 70
| 31.88412
| 0.799051
| 0.038094
| 0
| 0.766304
| 0
| 0
| 0.038117
| 0
| 0
| 0
| 0
| 0.004292
| 0.032609
| 1
| 0.016304
| false
| 0
| 0.027174
| 0
| 0.076087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e0aa76ea56cb42bac92cae80dd571bc6b2f9161
| 803
|
py
|
Python
|
Python/rot13.py
|
joaovdmcs/Hacktoberfest2019
|
e7f89ebda34a69ddf6d6fa928ddeb7bbb370e599
|
[
"MIT"
] | null | null | null |
Python/rot13.py
|
joaovdmcs/Hacktoberfest2019
|
e7f89ebda34a69ddf6d6fa928ddeb7bbb370e599
|
[
"MIT"
] | null | null | null |
Python/rot13.py
|
joaovdmcs/Hacktoberfest2019
|
e7f89ebda34a69ddf6d6fa928ddeb7bbb370e599
|
[
"MIT"
] | null | null | null |
def Rot13(String):
nova_string = ""
for i in range(len(String)):
if String[i] != " ":
if ord("A") <= ord(String[i]) and ord(String[i]) <= ord("Z"):
novoC = ord(String[i])+13
if ord("Z") < novoC:
novoC = novoC - 26
nova_string = nova_string + chr(novoC)
else:
nova_string = nova_string + chr(novoC)
elif ord("a") <= ord(String[i]) and ord(String[i]) <= ord("z"):
novoC = ord(String[i])+13
if ord("z") < novoC:
novoC = novoC - 26
nova_string = nova_string + chr(novoC)
else:
nova_string = nova_string + chr(novoC)
else:
nova_string = nova_string + String[i]
else:
nova_string = nova_string + String[i]
return nova_string
while True:
try:
a = raw_input()
print Rot13(a)
except EOFError:
break
| 22.942857
| 66
| 0.579078
| 117
| 803
| 3.846154
| 0.25641
| 0.311111
| 0.248889
| 0.266667
| 0.72
| 0.72
| 0.72
| 0.635556
| 0.635556
| 0.635556
| 0
| 0.020374
| 0.266501
| 803
| 34
| 67
| 23.617647
| 0.743633
| 0
| 0
| 0.482759
| 0
| 0
| 0.008717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.034483
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e3b2795911f3d19372cae78fa4db9a8f1308f34
| 4,646
|
py
|
Python
|
kthCharConcatSubStrings.py
|
atishbits/101
|
4b4a8e56d82fe2706f065ded7877deebe8f6164f
|
[
"MIT"
] | null | null | null |
kthCharConcatSubStrings.py
|
atishbits/101
|
4b4a8e56d82fe2706f065ded7877deebe8f6164f
|
[
"MIT"
] | null | null | null |
kthCharConcatSubStrings.py
|
atishbits/101
|
4b4a8e56d82fe2706f065ded7877deebe8f6164f
|
[
"MIT"
] | null | null | null |
# Enter your code here. Read input from STDIN. Print output to STDOUT
import sys
import math
import time
def printKthChar(string, K):
length = len(string)
substringList = []
start = time.time()
#obtain all substrings
for num in range(length):
tmpstr = ''
for jum in range(num, length):
tmpstr+=string[jum]
substringList.append(tmpstr)
end = time.time()
print "time taken to get all substrings", end - start
start = time.time()
#now sort the list lexicographically
substringList.sort()
#print substringList
end = time.time()
print "time taken to sort the substrings", end - start
curPos = 0
pos = 0
prevElem = ''
for elem in substringList:
if prevElem == elem:
continue
prevElem = elem
pos += len(elem)
if pos > K-1:
print elem[K-1 - curPos]
return
else:
curPos = pos
string = "toefpqnvnqducsamtcmqiqyjazyhizgcnmjefzzurcpturfqtsgbkgnmfntombsbodftcgnukenfvvxrpdszlfoqvhghjylzcssgzhajoqqjnnacsvddcrwxbchkdslhkfnlmqvainmvbozvjeboyxlzbslemwnxbdiptfsdtgywuztgduubthavcnbojplmuwepdwswcquyroeuiayciaxkoaazrmolksanqsmskgsbuzyfzyhjygvogwqakelwymjogxiteauabufbsomtuijdgbdqfqmfexvlvdebgadhxjkrxxmuhwznxedfurmbqimllsotusdrdnlcmxwttxgtblayemzvuniihxbdrgvuugwiftcbskuwyuucgvxdkglrfouwvrqrzmzfhbgaocwmysogpojzuusbloxginxhbwolawloajcydqfsgqtallbyzaejpdtrbhcdgptiavhfooxueqxrdapebvoqahjcqlhwcbfcyohocglgyiabkpgnnwgndprtdzsfczkaprrraczykzawrikeeqtjflcikwqmvaokrfdthfstguqxccdisxrzivkusaipayctgfnnmhjtbbsfdynxvgwfdgbvilnkjpgpwtfjaocdruiwsxtpdrwgazeikrtugzjcuqlwepzxmjtegowlgutguxqeqlywmkbiaoghefgqqcwwqujypdejaxpriqpxcsfcjmlntsfmudkmaulpxrazorszhkyjcfolrbanvhbrnlenfreeieezvyyfiyqndgaxkdkfmlzzyfmewqiewodundzvbrifylcioopabocztofrhowfcbbphcniuxpuiteyjvzmkbmfrrxahvimyjbfmqqiohcydixmdwzpznvsgutordapmdxawnikumkxuhmyflneakzgguuxxwpjcmmyzuivhuvbcjbhupowzncghzfgvurxgdxhyhfhcckelmnhbbdcqfkzeqfcmxbtazcaghhijsowecdhfglymwxsofwaeavgzahhhqstkgrqkvzqblqphnkxugxygvfhvoreglxttpjemlwnwmdfbncyvbwcwdlrtewcpvvjmgnyrlmnzruaewybyxdwcroyvkanfvwsdlsvwekvxfxddyfdviazboxzaxmfuizytstryfoyklbolgtgrtishfthffpbpqzkkublarjmdkcorwxirhcbmwktbzvtswdcthpvyyhdlmuckrmbyocmmniqofjgbmluvcpubzxoltqvkeykubzhqjxeognpuamqcdmdclrpfjmrplekofreoqieyxvsxhkanyfrmzhrkvkagqmwvfalvlptilnisaihapzmoidzdqbmgnicslnpyfiiqxqkzasaqrmebsfeiisqvdupoiempoufmnpnnjndaakduricacuvfpntzrhogeturkjeuxkuzwatnkxnmrlsgynfrxtzbotszfytcipbepuuzrhmkugrstykqsjtiflhkmaowqpbhlxheevozbhtwgfpapiiwwqiisweklhsygbclysmbahxgnogezoavgifzgljtvdrbevocncdpkdvxtdbsudnanuztglmdodevavznxffhicedghwaklcxgesoperfjxcjsczqhexrgeuonsoaetzkxsaowrtgowphaqzjuwcdcwrqriqvbphahhogyjmohdvhwusqtuvvrpnixdfatmactqitzwhjcfqybjqudmswbfgalncgzeisutlvrvetaxrbikynyqkbxsbigfohjzcfupalkhfkeyomimzieljdgmoorevcdxhzpklzrsmvqajboijuwvxchlsbqndvnnvcxidoaplsruthcfdacbcjoumgksezfzdehgsjydbrxxbbcecehgpwdecpygodhrjqzdbzgdtfauieymkgtbewfindrbiyutrtwsvcxojxiubignqzpuxwdkcunmuhgndciyhfosdwmgetwvulrujuglquyndgagiigpouktswbxpxumkngukmhaghnknqswyznovxjowlnnihzvwfpguwhcgwotohrnifddenrayeoinpfkxusrtbtzykumrmbbtfezuwcucseqhkceguvzwrawbvkujmvetbdpzfjcxnufxylesjfqciodfzzonwsgxxxxdgbcvxhtvvzpegggoulvvmliifohdlggthipeskbnlrtrzzivmfryrzgwqnzduhxdsokmylblcuedvmzhuqflpnjfcllysicmyoawzbjexnhucgbwyihoxsvadgyyqckoqlmsovwojfilomjmuscskysogsmzxypnjcfazdokjyxxkgjhymzjmtxunlvkjlyvpfxojnbulytwfeddseogznbmyyxkjxfaccpnpsjcqcavhdbbjriifjvfhurtrwttakjrcsutxuqgathjnpuuapzkjrfapyvslgjnbdiyabgawnljffdhwfrfywhpxfhinrxowiowlwyhmktrpwymdstbqdrpkyzxrxoohemsblbnvuemreawywyrzrgjriijfwxoblgewhrtexgxditcetvfkdoclzlsxkgavoeuyvncstbweuaioxoaaecofpgdzpdxfirdfehyakwefyovxcvzjanopwtrlxoshhymlhmnsktxkiuhmphxpwmhsfyfeoalvyaifmyxzrwjbqtqgcnxybetjeansabnabygpwfqpcarsvhaxwyxyeshjvwbvzryaresnjiclkvgfegebgczlwiwrgzpgsnijtayclbqxywddejelnfykpubvvsdofmxyowdbzvzztecxnikbnileeoccjfqorppinujopioimqfazsdocqvhgxjpcbgwsvggppueaiombvocntsqjnzqkkhplnnditlzigfkqtxtpnvchouqbtiodqdqdquolfpvnvggqdzfsxkzlgsoaaedsjwybqmmxbinwotntsunpgncnhtnjxschqaigoxgpftovmdqgzdygqcvzvkjvenevpoddljuscknqpdxphvxzxsbvemqizwfokjtysnqxagnplkeuhdwfxxcbjumiqrycatavitsibfymsehzidgfcijlcvvumvxnozkyschtjhidlpdvuldwumjyhettzqiprsnkwwfidnlreooycrwyffwmjrikjsbbnrljnsrtgdmmtclvwjtbpsoajyntqqudhhpxbgsjlgwhijeesaghswkduzympuqzbfwfnqobykigtonmovvithmpgmbwhrviytpmlenjoxrilgwabtkubwjjjnhseecdxsrkwgvkdnuotsqxmbtpzfyksgcwkfbkxuwtasfdfasaskzgltvnywxqcaoohsaemxznpeswgmybxyhkrfxrbvhfxxtepuket"
printKthChar(string, 5665842)
'''
def main():
return
T = int(raw_input())
for input in range(T):
string = raw_input()
K = int(raw_input())
printKthChar(string, K)
#call the main function
main()
'''
| 86.037037
| 3,434
| 0.888506
| 149
| 4,646
| 27.684564
| 0.402685
| 0.007758
| 0.009212
| 0.007758
| 0.013091
| 0.013091
| 0.013091
| 0
| 0
| 0
| 0
| 0.002589
| 0.085665
| 4,646
| 53
| 3,435
| 87.660377
| 0.968456
| 0.030779
| 0
| 0.121212
| 0
| 0
| 0.814002
| 0.798833
| 0
| 1
| 0
| 0.018868
| 0
| 0
| null | null | 0
| 0.090909
| null | null | 0.151515
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e5255292f78b0a4de74dd144079172029a6cc55
| 142
|
py
|
Python
|
dark_proteome_visualization/handlers/elements.py
|
Zethson/dark-proteome-visualization
|
fef978e967c8c655f13c9d7bdc4b0ea6722c1702
|
[
"MIT"
] | 2
|
2019-01-12T19:59:56.000Z
|
2019-01-19T08:48:33.000Z
|
dark_proteome_visualization/handlers/elements.py
|
Zethson/dark-proteome-visualization
|
fef978e967c8c655f13c9d7bdc4b0ea6722c1702
|
[
"MIT"
] | 23
|
2018-12-18T21:04:17.000Z
|
2019-01-23T19:16:15.000Z
|
dark_proteome_visualization/handlers/elements.py
|
Zethson/dark-proteome-visualization
|
fef978e967c8c655f13c9d7bdc4b0ea6722c1702
|
[
"MIT"
] | 1
|
2019-01-12T21:08:04.000Z
|
2019-01-12T21:08:04.000Z
|
from flask import render_template
from ..app import app
@app.route("/elements")
def elements():
return render_template("elements.html")
| 17.75
| 43
| 0.746479
| 19
| 142
| 5.473684
| 0.578947
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133803
| 142
| 7
| 44
| 20.285714
| 0.845528
| 0
| 0
| 0
| 0
| 0
| 0.15493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
285f21987b2f0ad439f91ae3add5be492825a97d
| 217
|
py
|
Python
|
utils.py
|
haesleinhuepf/label_neighbor_filters
|
a0f46030d28280d65cab515ae6272c444a080f85
|
[
"CC-BY-4.0"
] | 2
|
2021-11-19T12:33:34.000Z
|
2021-11-20T07:03:50.000Z
|
utils.py
|
haesleinhuepf/label_neighbor_filters
|
a0f46030d28280d65cab515ae6272c444a080f85
|
[
"CC-BY-4.0"
] | null | null | null |
utils.py
|
haesleinhuepf/label_neighbor_filters
|
a0f46030d28280d65cab515ae6272c444a080f85
|
[
"CC-BY-4.0"
] | null | null | null |
from pyclesperanto_prototype import imshow
def show(image, min_display_intensity=0, max_display_intensity=1):
imshow(image, min_display_intensity=min_display_intensity, max_display_intensity=max_display_intensity)
| 72.333333
| 107
| 0.875576
| 30
| 217
| 5.9
| 0.466667
| 0.542373
| 0.322034
| 0.271186
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009852
| 0.064516
| 217
| 3
| 107
| 72.333333
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
288b901f2c6fc30dddc84058583640c247ca466a
| 16,642
|
py
|
Python
|
tests/test_views.py
|
fabiobatalha/ratchet
|
8c0358d8821aff60a599705dd859ee8d66205d3b
|
[
"BSD-2-Clause"
] | 1
|
2019-03-16T05:13:25.000Z
|
2019-03-16T05:13:25.000Z
|
tests/test_views.py
|
fabiobatalha/ratchet
|
8c0358d8821aff60a599705dd859ee8d66205d3b
|
[
"BSD-2-Clause"
] | 1
|
2015-06-23T15:00:11.000Z
|
2015-06-23T15:00:11.000Z
|
tests/test_views.py
|
fabiobatalha/ratchet
|
8c0358d8821aff60a599705dd859ee8d66205d3b
|
[
"BSD-2-Clause"
] | 1
|
2019-03-16T05:13:21.000Z
|
2019-03-16T05:13:21.000Z
|
import unittest
import datetime
import pymongo
import json
from pyramid import testing
from pyramid import httpexceptions
class ViewTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.collection = pymongo.Connection('mongodb://localhost/')['test_scielo_network']['accesses']
def tearDown(self):
testing.tearDown()
self.collection.remove()
def test_index(self):
from ratchet.views import index
request = testing.DummyRequest()
response = index(request)
self.assertEqual(response.text, 'Ratchet API')
def test_endpoints(self):
from ratchet.views import endpoints
request = testing.DummyRequest()
response = endpoints(request)
self.assertEqual(response['articles']['list_endpoint'], '/api/v1/articles/')
self.assertEqual(response['journals']['list_endpoint'], '/api/v1/journals/')
self.assertEqual(response['issues']['list_endpoint'], '/api/v1/issues/')
self.assertEqual(response['general']['list_endpoint'], '/api/v1/general/')
def test_articles(self):
from ratchet.views import articles, general_post
post_data = {'code': 'S0104-77602014000100002', 'page': 'html', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': 'S0100-07602014000100002', 'page': 'abstract', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
response = articles(request)
self.assertEqual(len(response['objects']), 2)
def test_articles_offset_exceeded_lt(self):
from ratchet.views import articles, general_post
post_data = {'code': 'S0104-77602014000100002', 'page': 'html', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': 'S0100-07602014000100002', 'page': 'abstract', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': -1}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = articles(request)
def test_articles_offset_exceeded_gt(self):
from ratchet.views import articles, general_post
post_data = {'code': 'S0104-77602014000100002', 'page': 'html', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': 'S0100-07602014000100002', 'page': 'abstract', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': 3}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = articles(request)
def test_article(self):
from ratchet.views import article, general_post
post_data = {'code': 'S0104-77602014000100002', 'page': 'abstract', 'type': 'article', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='S0104-77602014000100002'))
response = article(request)
self.assertEqual(response['code'], 'S0104-77602014000100002')
self.assertEqual(response['total'], 1)
def test_article_invalid_issn(self):
from ratchet.views import article
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='xxx'))
with self.assertRaises(httpexceptions.HTTPBadRequest):
article(request)
def test_issues(self):
from ratchet.views import issues, general_post
post_data = {'code': '0104-776020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-076020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
response = issues(request)
self.assertEqual(len(response['objects']), 2)
def test_issues_offset_exceeded_lt(self):
from ratchet.views import issues, general_post
post_data = {'code': '0104-776020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-076020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': -1}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = issues(request)
def test_issues_offset_exceeded_gt(self):
from ratchet.views import issues, general_post
post_data = {'code': '0104-776020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-076020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': 3}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = issues(request)
def test_issue(self):
from ratchet.views import issue, general_post
post_data = {'code': '0104-776020140001', 'page': 'toc', 'type': 'issue', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='0104-776020140001'))
response = issue(request)
self.assertEqual(response['code'], '0104-776020140001')
self.assertEqual(response['total'], 1)
def test_issue_invalid_issn(self):
from ratchet.views import issue
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='xxx'))
with self.assertRaises(httpexceptions.HTTPBadRequest):
issue(request)
def test_journals(self):
from ratchet.views import journals, general_post
post_data = {'code': '0104-7760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-0760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
response = journals(request)
self.assertEqual(len(response['objects']), 2)
def test_journals_offset_exceeded_lt(self):
from ratchet.views import journals, general_post
post_data = {'code': '0104-7760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-0760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': -1}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = journals(request)
def test_journals_offset_exceeded_gt(self):
from ratchet.views import journals, general_post
post_data = {'code': '0104-7760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
post_data = {'code': '0100-0760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(params={'offset': 3}, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
response = journals(request)
def test_journal(self):
from ratchet.views import journal, general_post
post_data = {'code': '0104-7760', 'page': 'journal', 'type': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='0104-7760'))
response = journal(request)
self.assertEqual(response['code'], '0104-7760')
self.assertEqual(response['total'], 1)
def test_journal_invalid_issn(self):
from ratchet.views import journal
request = testing.DummyRequest(db=self.collection)
request.matchdict.update(dict(code='xxx'))
with self.assertRaises(httpexceptions.HTTPBadRequest):
journal(request)
def test_general_get_invalid_type_doc(self):
from ratchet.views import general_get
params = {'code': 'scl', 'type': 'xxx'}
request = testing.DummyRequest(params=params, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
general_get(request)
def test_general_get_invalid_offset_out_of_range_gt(self):
from ratchet.views import general_get
params = {'code': 'scl', 'type': 'journal', 'offset': 1000}
request = testing.DummyRequest(params=params, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
general_get(request)
def test_general_get_invalid_offset_out_of_range_lt(self):
from ratchet.views import general_get
params = {'code': 'scl', 'type': 'journal', 'offset': -1}
request = testing.DummyRequest(params=params, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
general_get(request)
def test_general_bulk(self):
from ratchet.views import general_bulk
post_data = {
'data': json.dumps({
"code": "S0034-89102009000400003",
"journal": "0034-8910",
"issue": "0034-891020090004",
"abstract.y2011.m10.d01": 100,
"abstract.y2011.m10.d02": 100,
"abstract.y2011.m10.d03": 100,
"abstract.y2012.m11.d01": 10,
"abstract.y2012.m11.a02": 10,
"abstract.y2012.m11.a03": 10,
"abstract.y2012.m10.total": 300,
"abstract.y2012.m11.total": 30,
"abstract.y2012.total": 330,
"abstract.total": 330,
"total": 330,
"type": "article"
})
}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPCreated):
general_bulk(request)
self.assertEqual(
self.collection.find_one()['abstract']['y2011']['m10']['d01'],
100
)
def test_general_bulk_unauthorized(self):
from ratchet.views import general_bulk
post_data = {
'admintoken': 'invalid',
'data': json.dumps({
"code": "S0034-89102009000400003",
"journal": "0034-8910",
"issue": "0034-891020090004",
"abstract.y2011.m10.d01": 100,
"abstract.y2011.m10.d02": 100,
"abstract.y2011.m10.d03": 100,
"abstract.y2012.m11.d01": 10,
"abstract.y2012.m11.a02": 10,
"abstract.y2012.m11.a03": 10,
"abstract.y2012.m10.total": 300,
"abstract.y2012.m11.total": 30,
"abstract.y2012.total": 330,
"abstract.total": 330,
"total": 330,
"type": "article"
})
}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPUnauthorized):
general_bulk(request)
def test_general_post(self):
from ratchet.views import general_post
post_data = {'code': 'scl', 'page': 'journal', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPCreated):
general_post(request)
self.assertEqual(
self.collection.find_one()['journal']['y2014']['m12']['d25'],
1
)
def test_general_post_unauthorized(self):
from ratchet.views import general_post
post_data = {'code': 'scl', 'page': 'journal', 'access_date': '2014-12-25', 'admintoken': 'invalid'}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPUnauthorized):
general_post(request)
def test_general_post_invalid_date(self):
from ratchet.views import general_post
post_data = {'code': 'scl', 'page': 'journal', 'access_date': '2014-1x-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
general_post(request)
def test_general_post_invalid_type_doc(self):
from ratchet.views import general_post
post_data = {'code': 'scl', 'page': 'journal', 'type': 'xxxx', 'access_date': '2014-12-25'}
request = testing.DummyRequest(post=post_data, db=self.collection)
with self.assertRaises(httpexceptions.HTTPBadRequest):
general_post(request)
def test_general_post_current_datetime(self):
from ratchet.views import general_post
post_data = {'code': 'scl', 'page': 'journal'}
request = testing.DummyRequest(post=post_data, db=self.collection)
try:
general_post(request)
except:
pass
day = 'd%02d' % datetime.date.today().day
month = 'm%02d' % datetime.date.today().month
year = 'y%02d' % datetime.date.today().year
self.assertEqual(
self.collection.find_one()['journal'][year][month][day],
1
)
| 32.127413
| 123
| 0.60834
| 1,752
| 16,642
| 5.648402
| 0.080479
| 0.045271
| 0.126112
| 0.084883
| 0.881265
| 0.863379
| 0.841148
| 0.80285
| 0.784964
| 0.758488
| 0
| 0.075848
| 0.264812
| 16,642
| 518
| 124
| 32.127413
| 0.732979
| 0
| 0
| 0.74
| 0
| 0
| 0.157003
| 0.036832
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.082857
| false
| 0.062857
| 0.094286
| 0
| 0.18
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
95415c3d0297e427cd1f459bd189d9e92315ae0f
| 3,413
|
py
|
Python
|
python/shapeFactory.py
|
limingnihao/tetris
|
76100695cfdf3547a297718b6d23bcd1d4796044
|
[
"Apache-2.0"
] | 2
|
2020-07-19T13:19:14.000Z
|
2021-09-10T04:15:17.000Z
|
python/shapeFactory.py
|
limingnihao/tetris
|
76100695cfdf3547a297718b6d23bcd1d4796044
|
[
"Apache-2.0"
] | null | null | null |
python/shapeFactory.py
|
limingnihao/tetris
|
76100695cfdf3547a297718b6d23bcd1d4796044
|
[
"Apache-2.0"
] | 1
|
2021-09-10T04:15:36.000Z
|
2021-09-10T04:15:36.000Z
|
import shape as vo
import random
class ShapeFactory(object):
shapeData = [
[[[0, 1, 1, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 0]],
[[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 0, 0],
[0, 1, 1, 1],
[0, 0, 0, 0]]],
[[[0, 0, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 0, 0]],
[[0, 0, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]],
[[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]],
[[[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 0, 0]],
[[0, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 1, 0],
[0, 1, 1, 0],
[0, 0, 1, 0],
[0, 0, 0, 0]]],
[[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 0]],
[[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 0]],
[[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]]],
[[[0, 0, 0, 0],
[0, 0, 1, 1],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 0, 1, 1],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 0]]],
[[[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 0, 1, 0]],
[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]],
[[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0]],
[[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0]]],
[[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]],
[[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]]]
]
shapeColor = [0xCC6666, 0x66CC66, 0x6666CC, 0xCCCC66, 0xCC66CC, 0x66CCCC, 0xDAAA00]
def __init__(self):
print('init')
def product(self, size, offset):
i = random.randint(0, len(self.shapeColor) - 1)
j = random.randint(0, len(self.shapeData) - 1)
color = self.shapeColor[i]
data = self.shapeData[j]
shape = vo.Shape(color, data, size, offset)
return shape
def next(self, color, data, size, offset):
shape = vo.Shape(color, data, size, offset)
shape.pointX = 0
shape.pointY = 0
return shape
| 23.376712
| 87
| 0.252564
| 531
| 3,413
| 1.615819
| 0.067797
| 0.622378
| 0.70979
| 0.713287
| 0.671329
| 0.594406
| 0.594406
| 0.522145
| 0.522145
| 0.518648
| 0
| 0.286139
| 0.507471
| 3,413
| 145
| 88
| 23.537931
| 0.224271
| 0
| 0
| 0.833333
| 0
| 0
| 0.001172
| 0
| 0
| 0
| 0.016408
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.015152
| 0
| 0.075758
| 0.007576
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
95569f813e1e87cc79571ad5a8e9e187f38fad63
| 3,329
|
py
|
Python
|
1-Lesson-Plans/10-Cryptography/3/resources/encrypter.py
|
BaleBase/lala
|
33193a7dd3563c7636493e52fa9a4956ec4b9dc6
|
[
"CNRI-Python"
] | null | null | null |
1-Lesson-Plans/10-Cryptography/3/resources/encrypter.py
|
BaleBase/lala
|
33193a7dd3563c7636493e52fa9a4956ec4b9dc6
|
[
"CNRI-Python"
] | null | null | null |
1-Lesson-Plans/10-Cryptography/3/resources/encrypter.py
|
BaleBase/lala
|
33193a7dd3563c7636493e52fa9a4956ec4b9dc6
|
[
"CNRI-Python"
] | 1
|
2021-06-08T06:50:23.000Z
|
2021-06-08T06:50:23.000Z
|
def password_chck(passwd):
SpecialSym =['$', '@', '#', '%']
val = True
if len(passwd) < 6:
print('length should be at least 6')
val = False
if len(passwd) > 20:
print('length should be not be greater than 8')
val = False
if not any(char.isdigit() for char in passwd):
print('Password should have at least one numeral')
val = False
if not any(char.isupper() for char in passwd):
print('Password should have at least one uppercase letter')
val = False
if not any(char.islower() for char in passwd):
print('Password should have at least one lowercase letter')
val = False
if not any(char in SpecialSym for char in passwd):
print('Password should have at least one of the symbols $@#')
val = False
if val:
return val
def passwrd_check(passwd):
SpecialSym =['$', '@', '#', '%']
val = True
if len(passwd) < 6:
print('length should be at least 6')
val = False
if len(passwd) > 20:
print('length should be not be greater than 8')
val = False
if not any(char.isdigit() for char in passwd):
print('Password should have at least one numeral')
val = False
if not any(char.isupper() for char in passwd):
print('Password should have at least one uppercase letter')
val = False
if not any(char.islower() for char in passwd):
print('Password should have at least one lowercase letter')
val = False
if not any(char in SpecialSym for char in passwd):
print('Password should have at least one of the symbols $@#')
val = False
if val:
return val
def ceasar(text,s):
cipherText= ""
for ch in text:
print ("Encrypted text is ")
def caesar(text,s):
cipherText = ""
for ch in text:
if ch.isalpha():
stayInAlphabet = ord(ch) + (s - 2)
if stayInAlphabet > ord('z'):
stayInAlphabet -= 26
finalLetter = chr(stayInAlphabet)
cipherText += finalLetter
#print ("Your ciphertext is: ", cipherText)
return cipherText
#check the above function
text = input("What is your Password? ")
def pssword_check(passwd):
SpecialSym =['$', '@', '#', '%']
val = True
if len(passwd) < 6:
print('length should be at least 6')
val = False
if len(passwd) > 20:
print('length should be not be greater than 8')
val = False
if not any(char.isdigit() for char in passwd):
print('Password should have at least one numeral')
val = False
if not any(char.isupper() for char in passwd):
print('Password should have at least one uppercase letter')
val = False
if not any(char.islower() for char in passwd):
print('Password should have at least one lowercase letter')
val = False
if not any(char in SpecialSym for char in passwd):
print('Password should have at least one of the symbols $@#')
val = False
if val:
return val
print ("Your Password is: " + text)
print ("Your Encrypted Password is: " + caesar(text,21))
| 27.286885
| 70
| 0.570141
| 431
| 3,329
| 4.396752
| 0.148492
| 0.075989
| 0.094987
| 0.082322
| 0.819525
| 0.819525
| 0.819525
| 0.792084
| 0.792084
| 0.792084
| 0
| 0.008985
| 0.331331
| 3,329
| 121
| 71
| 27.512397
| 0.842318
| 0.019826
| 0
| 0.813953
| 0
| 0
| 0.268016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05814
| false
| 0.418605
| 0
| 0
| 0.104651
| 0.244186
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
955906ea0413a8b72534be60c3a9c07590276652
| 2,280
|
py
|
Python
|
epytope/Data/pssms/bimas/mat/C_0602_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/bimas/mat/C_0602_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/bimas/mat/C_0602_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
C_0602_9 = {0: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0953101798043, 'I': 0.0953101798043, 'H': 0.0, 'K': 0.0953101798043, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': -2.30258509299, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0953101798043}, 1: {'A': 0.0953101798043, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0953101798043, 'P': 0.0953101798043, 'S': 0.0, 'R': 0.0953101798043, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 2: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 3: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 4: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 1.09861228867, 'I': 1.09861228867, 'H': 0.0, 'K': 0.69314718056, 'M': 1.09861228867, 'L': 1.09861228867, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 5: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.69314718056, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.69314718056, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.69314718056, 'Y': 0.0}, 6: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0953101798043, 'M': 0.0, 'L': 0.0, 'N': 0.0953101798043, 'Q': 0.0953101798043, 'P': 0.0, 'S': 0.0, 'R': 0.0953101798043, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 7: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 8: {'A': 0.0, 'C': 0.0, 'E': -2.30258509299, 'D': -2.30258509299, 'G': -2.30258509299, 'F': 0.0, 'I': 1.60943791243, 'H': -2.30258509299, 'K': -2.30258509299, 'M': 0.69314718056, 'L': 2.30258509299, 'N': -1.60943791243, 'Q': -2.30258509299, 'P': -2.30258509299, 'S': -1.60943791243, 'R': -2.30258509299, 'T': 0.0, 'W': 0.0, 'V': 1.60943791243, 'Y': 1.60943791243}, -1: {'con': -1.60943791243}}
| 2,280
| 2,280
| 0.419737
| 556
| 2,280
| 1.717626
| 0.066547
| 0.301571
| 0.028272
| 0.037696
| 0.561257
| 0.524607
| 0.524607
| 0.509948
| 0.506806
| 0.506806
| 0
| 0.4043
| 0.163596
| 2,280
| 1
| 2,280
| 2,280
| 0.096487
| 0
| 0
| 0
| 0
| 0
| 0.080228
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95f309e7af3514880a30f57f90da6192e2c7803f
| 22,480
|
py
|
Python
|
demo_libraries/dyamic_systems_limited_memory_library/dynamic_systems_animators.py
|
jermwatt/blog
|
3dd0d464d7a17c1c7a6508f714edc938dc3c03e9
|
[
"MIT"
] | 14
|
2019-04-17T23:55:14.000Z
|
2021-08-08T02:18:49.000Z
|
demo_libraries/dyamic_systems_limited_memory_library/dynamic_systems_animators.py
|
jermwatt/blog
|
3dd0d464d7a17c1c7a6508f714edc938dc3c03e9
|
[
"MIT"
] | null | null | null |
demo_libraries/dyamic_systems_limited_memory_library/dynamic_systems_animators.py
|
jermwatt/blog
|
3dd0d464d7a17c1c7a6508f714edc938dc3c03e9
|
[
"MIT"
] | 3
|
2019-04-10T22:46:27.000Z
|
2020-11-06T09:16:30.000Z
|
# import standard plotting and animation
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from matplotlib.ticker import FormatStrFormatter
import matplotlib.animation as animation
from mpl_toolkits.mplot3d import Axes3D
from IPython.display import clear_output
import matplotlib.ticker as ticker
# import standard libraries
import math
import time
import copy
from inspect import signature
class Visualizer:
'''
animators for time series
'''
#### animate moving average ####
def animate_system(self,x,y,T,savepath,**kwargs):
# produce figure
fig = plt.figure(figsize = (9,4))
gs = gridspec.GridSpec(1, 3, width_ratios=[1,7,1])
ax = plt.subplot(gs[0]); ax.axis('off')
ax1 = plt.subplot(gs[1]);
ax2 = plt.subplot(gs[2]); ax2.axis('off')
artist = fig
# view limits
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
# start animation
num_frames = len(y) - T + 1
print ('starting animation rendering...')
def animate(k):
# clear panels
ax1.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot x
ax1.scatter(np.arange(1,x.size + 1),x,c = 'k',edgecolor = 'w',s = 40,linewidth = 1,zorder = 3);
ax1.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# plot moving average - initial conditions
if k == 1:
# plot x
ax1.scatter(np.arange(1,T + 1), y[:T],c = 'darkorange',edgecolor = 'w',s = 120,linewidth = 1,zorder = 2);
ax1.plot(np.arange(1,T + 1), y[:T],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax1.axvline(x = 1, c='deepskyblue')
ax1.axvline(x = T, c='deepskyblue')
# plot moving average - everything after and including initial conditions
if k > 1:
j = k-1
# plot
ax1.scatter(np.arange(1,T + j + 1),y[:T + j],c = 'darkorange',edgecolor = 'w',s = 120,linewidth = 1,zorder = 2);
ax1.plot(np.arange(1,T + j + 1),y[:T + j],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax1.axvline(x = j, c='deepskyblue')
ax1.axvline(x = j + T - 1, c='deepskyblue')
# label axes
ax1.set_xlim([xmin,xmax])
ax1.set_ylim([ymin,ymax])
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()
#### animate range of moving average calculations ####
def animate_system_range(self,x,func,params,savepath,**kwargs):
playback = 1
if 'playback' in kwargs:
playback = kwargs['playback']
# produce figure
fig = plt.figure(figsize = (9,4))
gs = gridspec.GridSpec(1, 3, width_ratios=[1,7,1])
ax = plt.subplot(gs[0]); ax.axis('off')
ax1 = plt.subplot(gs[1]);
ax2 = plt.subplot(gs[2]); ax2.axis('off')
artist = fig
# view limits
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
# start animation
num_frames = len(params)+1
print ('starting animation rendering...')
def animate(k):
# clear panels
ax1.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot x
ax1.scatter(np.arange(1,x.size + 1),x,c = 'k',edgecolor = 'w',s = 40,linewidth = 1,zorder = 3);
ax1.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# create y
if k == 0:
T = params[0]
y = func(x,T)
ax1.set_title(r'Original data')
if k > 0:
T = params[k-1]
y = func(x,T)
ax1.scatter(np.arange(1,y.size + 1),y,c = 'darkorange',edgecolor = 'w',s = 120,linewidth = 1,zorder = 2);
ax1.plot(np.arange(1,y.size + 1),y,alpha = 0.5,c = 'darkorange',zorder = 2);
ax1.set_title(r'$D = $ ' + str(T))
# label axes
ax1.set_xlabel(r'$p$',fontsize = 13)
ax1.set_xlim([xmin,xmax])
ax1.set_ylim([ymin,ymax])
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=1, extra_args=['-vcodec', 'libx264'])
clear_output()
#### animate vector system with heatmap ####
def animate_vector_system(self,x,D,model,func,savepath,**kwargs):
x = np.array(x)
h,old_bins = func([0])
bins = []
for i in range(len(old_bins)-1):
b1 = old_bins[i]
b2 = old_bins[i+1]
n = (b1 + b2)/2
n = np.round(n,2)
bins.append(n)
y = model(x,D,func)
num_windows = len(y) - 1
# produce figure
fig = plt.figure(figsize = (11,10))
gs = gridspec.GridSpec(2, 3, width_ratios=[1,7,1],height_ratios=[0.75,1])
ax1 = plt.subplot(gs[0]); ax1.axis('off')
ax2 = plt.subplot(gs[1]);
ax3 = plt.subplot(gs[2]); ax3.axis('off')
ax4 = plt.subplot(gs[3]); ax4.axis('off')
ax5 = plt.subplot(gs[4]);
ax6 = plt.subplot(gs[5]); ax6.axis('off')
artist = fig
# view limits
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
# make colormap
# a,b = np.meshgrid(np.arange(num_windows+1),np.arange(len(bins)-1))
# s = ax1.pcolormesh(a, b, np.array(y).T,cmap = 'hot',vmin = 0,vmax = 1) #,edgecolor = 'k') # hot, gist_heat, cubehelix
# ax1.cla(); ax1.axis('off');
# fig.colorbar(s, ax=ax5)
# start animation
num_frames = len(x) - D + 2
print ('starting animation rendering...')
def animate(k):
# clear panels
ax2.cla()
ax5.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot x
ax2.scatter(np.arange(1,x.size + 1),x,c = 'k',edgecolor = 'w',s = 80,linewidth = 1,zorder = 3);
ax2.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# plot moving average - initial conditions
if k == 0:
# plot x
ax2.scatter(np.arange(1,D + 1), x[:D],c = 'darkorange',edgecolor = 'w',s = 200,linewidth = 1,zorder = 2);
ax2.plot(np.arange(1,D + 1), x[:D],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax2.axvline(x = 1, c='deepskyblue')
ax2.axvline(x = D, c='deepskyblue')
# plot histogram
self.plot_heatmap(ax5,y[:2],bins,num_windows)
# plot moving average - everything after and including initial conditions
if k > 0:
j = k
# plot
ax2.scatter(np.arange(j,D + j),x[j-1:D + j - 1],c = 'darkorange',edgecolor = 'w',s = 200,linewidth = 1,zorder = 2);
ax2.plot(np.arange(j,D + j),x[j-1:D + j - 1],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax2.axvline(x = j, c='deepskyblue')
ax2.axvline(x = j + D - 1, c='deepskyblue')
# plot histogram
self.plot_heatmap(ax5,y[:j+1],bins,num_windows)
# label axes
ax2.set_xlim([xmin,xmax])
ax2.set_ylim([ymin,ymax])
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()
def plot_heatmap(self,ax,y,bins,num_windows):
y=np.array(y).T
### plot ###
num_chars,num_samples = y.shape
num_chars += 1
a,b = np.meshgrid(np.arange(num_samples),np.arange(num_chars))
### y-axis Customize minor tick labels ###
# make custom labels
num_bins = len(bins)+1
y_ticker_range = np.arange(0.5,num_bins,10).tolist()
new_bins = [bins[v] for v in range(0,len(bins),10)]
y_char_range = [str(s) for s in new_bins]
# assign major or minor ticklabels? - chosen major by default
ax.yaxis.set_major_locator(ticker.FixedLocator(y_ticker_range))
ax.yaxis.set_major_formatter(ticker.FixedFormatter(y_char_range))
ax.xaxis.set_ticks_position('bottom') # the rest is the same
ax.set_xticks([],[])
ax.set_yticks([],[])
ax.set_ylabel('values',rotation = 90,fontsize=15)
ax.set_xlabel('window',fontsize=15)
# ax.set_title(title,fontsize = 15)
cmap = 'hot_r'
#cmap = 'RdPu'
s = ax.pcolormesh(a, b, 4*y,cmap = cmap,vmin = 0,vmax = 1) #,edgecolor = 'k') # hot, gist_heat, cubehelix
ax.set_ylim([-1,len(bins)])
ax.set_xlim([0,num_windows])
# for i in range(len(bins)):
# ax.hlines(y=i, xmin=0, xmax=num_windows, linewidth=1, color='k',alpha = 0.75)
#### animate vector system with heatmap ####
def animate_vector_histogram(self,x,D,model,func,savepath,**kwargs):
x = np.array(x)
h,old_bins = func([0])
bins = []
for i in range(len(old_bins)-1):
b1 = old_bins[i]
b2 = old_bins[i+1]
n = (b1 + b2)/2
n = np.round(n,2)
bins.append(n)
y = model(x,D,func)
num_windows = len(y) - 1
# produce figure
fig = plt.figure(figsize = (11,10))
gs = gridspec.GridSpec(3, 3, width_ratios=[1,7,1],height_ratios=[1,1,1.5])
ax1 = plt.subplot(gs[0]); ax1.axis('off')
ax2 = plt.subplot(gs[1]);
ax3 = plt.subplot(gs[2]); ax3.axis('off')
axa = plt.subplot(gs[3]); axa.axis('off')
axb = plt.subplot(gs[7]);
axc = plt.subplot(gs[5]); axc.axis('off')
ax4 = plt.subplot(gs[6]); ax4.axis('off')
ax5 = plt.subplot(gs[4]);
ax6 = plt.subplot(gs[8]); ax6.axis('off')
artist = fig
# view limits
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
# start animation
num_frames = len(x) - D + 2
print ('starting animation rendering...')
def animate(k):
# clear panels
ax2.cla()
ax5.cla()
axb.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot x
ax2.scatter(np.arange(1,x.size + 1),x,c = 'k',edgecolor = 'w',s = 80,linewidth = 1,zorder = 3);
ax2.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# plot moving average - initial conditions
if k == 0:
# plot x
ax2.scatter(np.arange(1,D + 1), x[:D],c = 'darkorange',edgecolor = 'w',s = 200,linewidth = 1,zorder = 2);
ax2.plot(np.arange(1,D + 1), x[:D],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax2.axvline(x = 1, c='deepskyblue')
ax2.axvline(x = D, c='deepskyblue')
# plot histogram
self.plot_histogram(ax5,y[0],bins)
self.plot_heatmap(axb,y[:2],bins,num_windows)
# plot moving average - everything after and including initial conditions
if k > 0:
j = k
# plot
ax2.scatter(np.arange(j,D + j),x[j-1:D + j - 1],c = 'darkorange',edgecolor = 'w',s = 200,linewidth = 1,zorder = 2);
ax2.plot(np.arange(j,D + j),x[j-1:D + j - 1],alpha = 0.5,c = 'darkorange',zorder = 2);
# make vertical visual guides
ax2.axvline(x = j, c='deepskyblue')
ax2.axvline(x = j + D - 1, c='deepskyblue')
# plot histogram
self.plot_histogram(ax5,y[j],bins)
# plot histogram
self.plot_heatmap(axb,y[:j+1],bins,num_windows)
# label axes
ax2.set_xlim([xmin,xmax])
ax2.set_ylim([ymin,ymax])
ax2.set_xlabel(r'$p$',fontsize=14)
ax2.set_ylabel(r'$x_p$',rotation=0,fontsize=14)
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()
def plot_histogram(self,ax,h,bins,**kwargs):
# plot hist
ax.bar(bins,h,align='center',width=0.1,edgecolor='k',color='magenta',linewidth=1.5)
# label axes
ax.set_xlabel(r'$values$',fontsize = 13)
ax.set_ylabel(r'count',fontsize = 13,rotation = 90,labelpad = 15)
ymin = 0
xmin = min(bins) - 0.1
xmax = max(bins) + 0.1
ymax = 0.5
ax.set_xlim([xmin,xmax])
ax.set_ylim([ymin,ymax])
#### animate spectrogram construction ####
def animate_dct_spectrogram(self,x,D,model,func,savepath,**kwargs):
# produce heatmap
y = model(x,D,func)
num_windows = y.shape[1]-1
# produce figure
fig = plt.figure(figsize = (12,8))
gs = gridspec.GridSpec(2, 3, width_ratios=[1,7,1],height_ratios=[1,1])
ax1 = plt.subplot(gs[0]); ax1.axis('off')
ax2 = plt.subplot(gs[1]);
ax3 = plt.subplot(gs[2]); ax3.axis('off')
ax4 = plt.subplot(gs[3]); ax4.axis('off')
ax5 = plt.subplot(gs[4]);
ax5.set_yticks([],[])
ax5.axis('off')
ax6 = plt.subplot(gs[5]); ax6.axis('off')
artist = fig
# view limits for top panel
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
vmin = np.min(np.log(1 + y).flatten())
vmax = np.max(np.log(1 + y).flatten())
# start animation
num_frames = len(x) - D + 2
print ('starting animation rendering...')
def animate(k):
# clear panels
ax2.cla()
ax5.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot signal
ax2.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# plot moving average - initial conditions
if k == 0:
# plot x
ax2.plot(np.arange(1,D + 1), x[:D],alpha = 0.5,c = 'magenta',zorder = 2,linewidth=8);
# plot spectrogram
ax5.imshow(np.log(1 + y[:,:1]),aspect='auto',cmap='jet',origin='lower',vmin = vmin, vmax = vmax)
# plot moving average - everything after and including initial conditions
if k > 0:
j = k
# plot
ax2.plot(np.arange(j,D + j),x[j-1:D + j - 1],alpha = 0.5,c = 'magenta',zorder = 2,linewidth=8);
# plot histogram
ax5.imshow(np.log(1 + y[:,:j+1]),aspect='auto',cmap='jet',origin='lower', vmin = vmin, vmax = vmax)
# label axes
ax2.set_xlim([xmin,xmax])
ax2.set_ylim([ymin,ymax])
ax2.set_xlabel(r'$p$',fontsize=14)
ax2.set_ylabel(r'$x_p$',rotation=0,fontsize=14)
ax5.set_xlim([0,num_windows])
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()
#### animate spectrogram construction ####
def animate_mlp_outputs(self,x,D,model,func,savepath,**kwargs):
# produce heatmap
y = model(x,D,func)
num_windows = y.shape[1]-1
# produce figure
fig = plt.figure(figsize = (12,8))
gs = gridspec.GridSpec(2, 3, width_ratios=[1,7,1],height_ratios=[1,1])
ax1 = plt.subplot(gs[0]); ax1.axis('off')
ax2 = plt.subplot(gs[1]);
ax3 = plt.subplot(gs[2]); ax3.axis('off')
ax4 = plt.subplot(gs[3]); ax4.axis('off')
ax5 = plt.subplot(gs[4]);
ax5.set_yticks([],[])
ax5.axis('off')
ax6 = plt.subplot(gs[5]); ax6.axis('off')
artist = fig
# view limits for top panel
xmin = -3
xmax = len(x) + 3
ymin = np.min(x)
ymax = np.max(x)
ygap = (ymax - ymin)*0.15
ymin -= ygap
ymax += ygap
vmin = np.min(np.log(1 + y).flatten())
vmax = np.max(np.log(1 + y).flatten())
# start animation
num_frames = len(x) - D + 2
print ('starting animation rendering...')
def animate(k):
# clear panels
ax2.cla()
ax5.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
# plot signal
ax2.plot(np.arange(1,x.size + 1),x,alpha = 0.5,c = 'k',zorder = 3);
# plot moving average - initial conditions
if k == 0:
# plot x
ax2.plot(np.arange(1,D + 1), x[:D],alpha = 0.5,c = 'magenta',zorder = 2,linewidth=8);
# plot spectrogram
ax5.imshow(np.log(1 + y[:,:1]),aspect='auto',cmap='jet',origin='lower',vmin = vmin, vmax = vmax)
# plot moving average - everything after and including initial conditions
if k > 0:
j = k
# plot
ax2.plot(np.arange(j,D + j),x[j-1:D + j - 1],alpha = 0.5,c = 'magenta',zorder = 2,linewidth=8);
# plot histogram
ax5.imshow(np.log(1 + y[:,:j+1]),aspect='auto',cmap='jet',origin='lower', vmin = vmin, vmax = vmax)
# label axes
ax2.set_xlim([xmin,xmax])
ax2.set_ylim([ymin,ymax])
ax2.set_xlabel(r'$p$',fontsize=14)
ax2.set_ylabel(r'$x_p$',rotation=0,fontsize=14)
ax5.set_xlim([0,num_windows])
return artist,
anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)
# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()
| 36.258065
| 131
| 0.479893
| 2,826
| 22,480
| 3.75867
| 0.09448
| 0.031068
| 0.037281
| 0.012804
| 0.825268
| 0.799661
| 0.796648
| 0.781774
| 0.773112
| 0.766899
| 0
| 0.045781
| 0.38105
| 22,480
| 620
| 132
| 36.258065
| 0.717623
| 0.120507
| 0
| 0.770449
| 0
| 0
| 0.061967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036939
| false
| 0
| 0.031662
| 0
| 0.087071
| 0.047493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c28056595ea2c0cef5dd285ea627db78fb8c3f1c
| 225
|
py
|
Python
|
royalspells/__init__.py
|
Steffo99/royalspells
|
8d789dcb7487a2130b63e32e4a33a2a6868d0847
|
[
"MIT"
] | null | null | null |
royalspells/__init__.py
|
Steffo99/royalspells
|
8d789dcb7487a2130b63e32e4a33a2a6868d0847
|
[
"MIT"
] | null | null | null |
royalspells/__init__.py
|
Steffo99/royalspells
|
8d789dcb7487a2130b63e32e4a33a2a6868d0847
|
[
"MIT"
] | null | null | null |
from .main import SpellType, DamageComponent, HealingComponent, StatsComponent, StatusEffectComponent, Spell
__all__ = ["SpellType", "DamageComponent", "HealingComponent", "StatsComponent", "StatusEffectComponent", "Spell"]
| 56.25
| 114
| 0.804444
| 16
| 225
| 11.0625
| 0.625
| 0.271186
| 0.451977
| 0.610169
| 0.903955
| 0.903955
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 225
| 3
| 115
| 75
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0.355556
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
c29e830b48b148f0fe0a00504cc41856c468a44d
| 37
|
py
|
Python
|
src/lib/shlex.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/shlex.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/shlex.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("shlex")
| 18.5
| 36
| 0.756757
| 6
| 37
| 3.833333
| 0.666667
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c2aa9ec8ed025da8e6fb683e9ee26d9bbc92007d
| 978
|
py
|
Python
|
src/IceRayPy/core/render/pixel.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 2
|
2020-09-04T12:27:15.000Z
|
2022-01-17T14:49:40.000Z
|
src/IceRayPy/core/render/pixel.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | null | null | null |
src/IceRayPy/core/render/pixel.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 1
|
2020-09-04T12:27:52.000Z
|
2020-09-04T12:27:52.000Z
|
class Constant:
m_cargo = {}
def __init__( self, P_dll ):
self.m_cargo={}
self.m_cargo['dll']=P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Render_Pixel_Constant0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Render_Pixel_Release( self.m_cargo['this'] )
class UV:
m_cargo = {}
def __init__( self, P_dll ):
self.m_cargo={}
self.m_cargo['dll']=P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Render_Pixel_UV0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Render_Pixel_Release( self.m_cargo['this'] )
class Basic:
m_cargo = {}
def __init__( self, P_dll ):
self.m_cargo={}
self.m_cargo['dll']=P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Render_PixelBasic0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Render_Pixel_Release( self.m_cargo['this'] )
| 25.076923
| 84
| 0.59407
| 134
| 978
| 3.828358
| 0.149254
| 0.245614
| 0.350877
| 0.22807
| 0.916179
| 0.916179
| 0.916179
| 0.916179
| 0.916179
| 0.916179
| 0
| 0.004138
| 0.258691
| 978
| 38
| 85
| 25.736842
| 0.703448
| 0
| 0
| 0.75
| 0
| 0
| 0.054313
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c2b641cab5731912552c653debe6af029257500b
| 5,568
|
py
|
Python
|
9-loops/pytest-exercises/test_loops2.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | null | null | null |
9-loops/pytest-exercises/test_loops2.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | 1
|
2018-07-18T18:01:22.000Z
|
2019-06-14T15:06:28.000Z
|
9-loops/pytest-exercises/test_loops2.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | null | null | null |
from loops2 import *
def test_normalize_names():
assert normalize_names([
' AbbY ',
' JoHhNy',
'Abe',
]) == ['abby', 'johhny', 'abe']
assert normalize_names([]) == []
assert normalize_names([' J o e l ']) == ['j o e l']
def test_remove_empty():
assert remove_empty([]) == []
assert remove_empty(['']) == []
assert remove_empty(['john', '']) == ['john']
assert remove_empty(['', 'john']) == ['john']
assert remove_empty(['', ' ', '', ' ', '', '']) == [' ', ' ']
def test_split_first_last():
assert split_first_last([]) == []
assert split_first_last(['Abe Lincoln']) == [['Abe', 'Lincoln']]
assert split_first_last([
'Abe Lincoln',
'George Washington',
'Benjamin Franklin',
]) == [
['Abe', 'Lincoln'],
['George', 'Washington'],
['Benjamin', 'Franklin'],
]
def test_normalized_first_last():
assert normalized_first_last([]) == []
assert normalized_first_last(['Abe Lincoln']) == [['abe', 'lincoln']]
assert normalized_first_last([
'Abe Lincoln',
'George Washington',
'Benjamin Franklin',
]) == [
['abe', 'lincoln'],
['george', 'washington'],
['benjamin', 'franklin'],
]
assert normalized_first_last([
' Abe Lincoln',
' George Washington ',
' Benjamin Franklin ',
]) == [
['abe', 'lincoln'],
['george', 'washington'],
['benjamin', 'franklin'],
]
def test_total_revenue():
assert total_revenue([]) == 0
assert total_revenue([['burrito', 'food', 3]]) == 3
assert total_revenue([['burrito', 'food', 3], ['taco', 'food', 2]]) == 5
assert total_revenue([
['burrito', 'food', 3],
['taco', 'food', 2],
['shirt', 'clothing', 2],
]) == 7
assert total_revenue([
['burrito', 'food', 3],
['taco', 'food', 2],
['shirt', 'clothing', 2],
['shirt', 'clothing', 5],
]) == 12
def test_total_item_revenue():
assert total_item_revenue([], 'burrito') == 0
assert total_item_revenue([['burrito', 'food', 3]], 'burrito') == 3
assert total_item_revenue([['burrito', 'food', 3], ['taco', 'food', 2]],
'taco') == 2
assert total_item_revenue(
[['burrito', 'food', 3], ['taco', 'food', 2], ['shirt', 'clothing', 2]],
'taco') == 2
assert total_item_revenue(
[['burrito', 'food', 3], ['taco', 'food', 2], ['shirt', 'clothing', 2],
['shirt', 'clothing', 5]], 'shirt') == 7
def test_total_category_revenue():
assert total_category_revenue([], 'food') == 0
assert total_category_revenue([['burrito', 'food', 3]], 'clothing') == 0
assert total_category_revenue([
['burrito', 'food', 3],
['taco', 'food', 2],
], 'food') == 5
assert total_category_revenue([
['burrito', 'food', 3],
['taco', 'food', 2],
['shirt', 'clothing', 2],
], 'clothing') == 2
assert total_category_revenue([
['burrito', 'food', 3],
['taco', 'food', 2],
['shirt', 'clothing', 2],
['shirt', 'clothing', 5],
], 'food') == 5
def test_total_minutes_used():
assert total_minutes_used([]) == 0
assert total_minutes_used([['(555)555-5555', '(222)222-2222', 3]]) == 3
assert total_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
]) == 5
assert total_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
]) == 7
assert total_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
['(333)333-3333', '(444)444-4444', 5],
]) == 12
def test_total_number_minutes_used():
assert total_number_minutes_used([], '(555)555-5555') == 0
assert total_number_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
], '(555)555-5555') == 3
assert total_number_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
], '(111)111-1111') == 2
assert total_number_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
], '(222)222-2222') == 5
assert total_number_minutes_used([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
['(333)333-3333', '(444)444-4444', 5],
], '(333)333-3333') == 7
def test_is_number_over_limit():
assert not is_number_over_limit([], '(555)555-5555', 5)
assert is_number_over_limit([
['(555)555-5555', '(222)222-2222', 3],
], '(555)555-5555', 2)
assert is_number_over_limit([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
], '(111)111-1111', 2)
assert not is_number_over_limit([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
], '(222)222-2222', 6)
assert is_number_over_limit([
['(555)555-5555', '(222)222-2222', 3],
['(111)111-1111', '(222)222-2222', 2],
['(333)333-3333', '(444)444-4444', 2],
['(333)333-3333', '(444)444-4444', 5],
], '(333)333-3333', 6)
| 32.752941
| 80
| 0.517062
| 663
| 5,568
| 4.167421
| 0.090498
| 0.09953
| 0.083243
| 0.082519
| 0.884546
| 0.839305
| 0.826638
| 0.753167
| 0.684401
| 0.656171
| 0
| 0.189577
| 0.248743
| 5,568
| 169
| 81
| 32.946746
| 0.470954
| 0
| 0
| 0.6
| 0
| 0
| 0.300108
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.066667
| true
| 0
| 0.006667
| 0
| 0.073333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2bc6aea56ae60458b898ee91a73cbe742310b6d
| 155
|
py
|
Python
|
Learning/2-Operators.py
|
DishantIsrani/Python-Learning
|
f810fd64adeecd34fd2d95182f6be2bdfb4f9ac6
|
[
"MIT"
] | null | null | null |
Learning/2-Operators.py
|
DishantIsrani/Python-Learning
|
f810fd64adeecd34fd2d95182f6be2bdfb4f9ac6
|
[
"MIT"
] | null | null | null |
Learning/2-Operators.py
|
DishantIsrani/Python-Learning
|
f810fd64adeecd34fd2d95182f6be2bdfb4f9ac6
|
[
"MIT"
] | null | null | null |
a=3
b=4
print("The value of a + b is ", 3+4)
print("The value of a - b is ", 3-4)
print("The value of a * b is ", 3*4)
print("The value of a / b is ", 3/4)
| 25.833333
| 36
| 0.567742
| 40
| 155
| 2.2
| 0.225
| 0.272727
| 0.409091
| 0.636364
| 0.965909
| 0.965909
| 0.965909
| 0.965909
| 0.965909
| 0.965909
| 0
| 0.084746
| 0.23871
| 155
| 6
| 37
| 25.833333
| 0.661017
| 0
| 0
| 0
| 0
| 0
| 0.564103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
66d2c5b08cf7f68615ea919e72d20b5b1a2f33d4
| 1,660
|
py
|
Python
|
attacks.py
|
lan-qing/inverse_adversarial_training
|
0f7b02d5b59eef257aac6ff99de5acf5073ec8aa
|
[
"MIT"
] | null | null | null |
attacks.py
|
lan-qing/inverse_adversarial_training
|
0f7b02d5b59eef257aac6ff99de5acf5073ec8aa
|
[
"MIT"
] | null | null | null |
attacks.py
|
lan-qing/inverse_adversarial_training
|
0f7b02d5b59eef257aac6ff99de5acf5073ec8aa
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import numpy as np
def pgd_attack_reverse(model, images, labels, eps=1.0, alpha=0.1, iters=20, half=False, double=False):
images = images.cuda()
labels = labels.cuda()
loss = nn.CrossEntropyLoss()
if half:
loss.half()
if double:
loss.double()
ori_images = images.data
for i in range(iters):
images.requires_grad = True
outputs = model(images)
model.zero_grad()
cost = loss(outputs, labels)
cost.backward()
adv_images = images - alpha * images.grad.sign()
eta = torch.clamp(adv_images - ori_images, min=-eps, max=eps)
images = torch.clamp(ori_images + eta, min=0, max=1).detach_()
return images
def pgd_attack_reverse_binary(model, images, labels, eps=1.0, alpha=0.1, iters=20, half=False, double=False,
verbose=False):
images = images.cuda()
labels = labels.cuda()
loss = nn.BCEWithLogitsLoss()
if half:
loss.half()
if double:
loss.double()
ori_images = images.data
for i in range(iters):
images.requires_grad = True
outputs = model(images)
model.zero_grad()
cost = loss(outputs, labels)
# print(outputs, labels, cost)
cost.backward()
adv_images = images - alpha * images.grad.sign()
eta = torch.clamp(adv_images - ori_images, min=-eps, max=eps)
images = torch.clamp(ori_images + eta, min=0, max=1).detach_()
if verbose:
outputs = model(images)
model.zero_grad()
cost = loss(outputs, labels)
print(cost)
return images
| 28.62069
| 108
| 0.603614
| 216
| 1,660
| 4.537037
| 0.236111
| 0.073469
| 0.055102
| 0.070408
| 0.815306
| 0.815306
| 0.815306
| 0.815306
| 0.815306
| 0.732653
| 0
| 0.013389
| 0.28012
| 1,660
| 57
| 109
| 29.122807
| 0.806695
| 0.016867
| 0
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.06383
| 0
| 0.148936
| 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd0711a3512443f92d2d7090af547ecee0ed172c
| 200
|
py
|
Python
|
test/templates/reader_resources/source_file_no_hooks.py
|
Takmo/handyman
|
d2c56543900f840039785e92082ad10133c36a1a
|
[
"MIT"
] | null | null | null |
test/templates/reader_resources/source_file_no_hooks.py
|
Takmo/handyman
|
d2c56543900f840039785e92082ad10133c36a1a
|
[
"MIT"
] | null | null | null |
test/templates/reader_resources/source_file_no_hooks.py
|
Takmo/handyman
|
d2c56543900f840039785e92082ad10133c36a1a
|
[
"MIT"
] | null | null | null |
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
print("silly line")
| 18.181818
| 19
| 0.7
| 30
| 200
| 4.666667
| 0.1
| 0.714286
| 1
| 1.221429
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.1
| 200
| 10
| 20
| 20
| 0.777778
| 0
| 0
| 1
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
dd77ebf5c2bd4158fefd6ee53b9eca0df5151066
| 136
|
py
|
Python
|
languages/python/design_struct_calcsize.py
|
PrabhuLoganathan/Language-Specific
|
f7bad8488514b9fc264f94231313de802e7c5096
|
[
"BSD-3-Clause"
] | null | null | null |
languages/python/design_struct_calcsize.py
|
PrabhuLoganathan/Language-Specific
|
f7bad8488514b9fc264f94231313de802e7c5096
|
[
"BSD-3-Clause"
] | null | null | null |
languages/python/design_struct_calcsize.py
|
PrabhuLoganathan/Language-Specific
|
f7bad8488514b9fc264f94231313de802e7c5096
|
[
"BSD-3-Clause"
] | null | null | null |
import struct
print struct.pack("BHB",1,2,3)
print struct.pack("!BHB",1,2,3)
print struct.calcsize("BHB")
print struct.calcsize("!BHB")
| 22.666667
| 31
| 0.720588
| 24
| 136
| 4.083333
| 0.375
| 0.44898
| 0.306122
| 0.367347
| 0.540816
| 0.540816
| 0.540816
| 0.540816
| 0.540816
| 0
| 0
| 0.047619
| 0.073529
| 136
| 5
| 32
| 27.2
| 0.730159
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.8
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
661e76062de5c232a77a0b7fb3db02f6e5c56481
| 2,160
|
py
|
Python
|
src/turbot/versions/versions/9b107d322c46_use_bigint_for_author_ids.py
|
theastropath/turbot
|
c623cd9af73876efdd315f3d7dd09448a06d3331
|
[
"MIT"
] | 10
|
2020-04-11T23:43:42.000Z
|
2021-06-18T17:31:09.000Z
|
src/turbot/versions/versions/9b107d322c46_use_bigint_for_author_ids.py
|
theastropath/turbot
|
c623cd9af73876efdd315f3d7dd09448a06d3331
|
[
"MIT"
] | 116
|
2020-04-15T20:37:49.000Z
|
2022-03-29T22:21:25.000Z
|
src/turbot/versions/versions/9b107d322c46_use_bigint_for_author_ids.py
|
theastropath/turbot
|
c623cd9af73876efdd315f3d7dd09448a06d3331
|
[
"MIT"
] | 3
|
2020-04-11T23:56:34.000Z
|
2020-06-18T17:44:34.000Z
|
"""Use BIGINT for author ids
Revision ID: 9b107d322c46
Revises: 1afdca2a2389
Create Date: 2020-05-17 11:34:03.356515
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "9b107d322c46"
down_revision = "1afdca2a2389"
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table("bugs") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("fossils") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("songs") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("art") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("fish") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("prices") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
with op.batch_alter_table("users") as b:
b.alter_column("author", existing_type=sa.Integer(), type_=sa.BigInteger())
def downgrade():
with op.batch_alter_table("bugs") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("fossils") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("songs") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("art") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("fish") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("prices") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
with op.batch_alter_table("users") as b:
b.alter_column("author", existing_type=sa.BigInteger(), type_=sa.Integer())
| 34.83871
| 83
| 0.696759
| 313
| 2,160
| 4.57508
| 0.175719
| 0.117318
| 0.107542
| 0.156425
| 0.819832
| 0.819832
| 0.819832
| 0.819832
| 0.819832
| 0.819832
| 0
| 0.027322
| 0.152778
| 2,160
| 61
| 84
| 35.409836
| 0.755191
| 0.071296
| 0
| 0.777778
| 0
| 0
| 0.088088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0950557f1599691cd856249cb9196ff7d9eb0ef
| 14,728
|
py
|
Python
|
killrvideo/video_catalog/video_catalog_events_pb2.py
|
KillrVideo/killrvideo-python
|
55a610c97fd53c405edb2459c2722fc03857cb83
|
[
"Apache-2.0"
] | 30
|
2018-12-04T21:34:07.000Z
|
2022-02-19T09:14:25.000Z
|
killrvideo/video_catalog/video_catalog_events_pb2.py
|
KillrVideo/killrvideo-python
|
55a610c97fd53c405edb2459c2722fc03857cb83
|
[
"Apache-2.0"
] | 5
|
2019-08-26T18:46:35.000Z
|
2021-06-01T23:51:20.000Z
|
killrvideo/video_catalog/video_catalog_events_pb2.py
|
KillrVideo/killrvideo-python
|
55a610c97fd53c405edb2459c2722fc03857cb83
|
[
"Apache-2.0"
] | 7
|
2019-06-14T07:45:06.000Z
|
2021-05-20T10:06:49.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: video-catalog/video_catalog_events.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from common import common_types_pb2 as common_dot_common__types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='video-catalog/video_catalog_events.proto',
package='killrvideo.video_catalog.events',
syntax='proto3',
serialized_options=_b('\252\002\036KillrVideo.VideoCatalog.Events'),
serialized_pb=_b('\n(video-catalog/video_catalog_events.proto\x12\x1fkillrvideo.video_catalog.events\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x63ommon/common_types.proto\"\x85\x01\n\x15UploadedVideoAccepted\x12)\n\x08video_id\x18\x01 \x01(\x0b\x32\x17.killrvideo.common.Uuid\x12\x12\n\nupload_url\x18\x02 \x01(\t\x12-\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xab\x02\n\x12UploadedVideoAdded\x12)\n\x08video_id\x18\x01 \x01(\x0b\x32\x17.killrvideo.common.Uuid\x12(\n\x07user_id\x18\x02 \x01(\x0b\x32\x17.killrvideo.common.Uuid\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x10\n\x08location\x18\x05 \x01(\t\x12\x1e\n\x16preview_image_location\x18\x06 \x01(\t\x12\x0c\n\x04tags\x18\x07 \x03(\t\x12.\n\nadded_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xaa\x02\n\x11YouTubeVideoAdded\x12)\n\x08video_id\x18\x01 \x01(\x0b\x32\x17.killrvideo.common.Uuid\x12(\n\x07user_id\x18\x02 \x01(\x0b\x32\x17.killrvideo.common.Uuid\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x10\n\x08location\x18\x05 \x01(\t\x12\x1e\n\x16preview_image_location\x18\x06 \x01(\t\x12\x0c\n\x04tags\x18\x07 \x03(\t\x12.\n\nadded_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB!\xaa\x02\x1eKillrVideo.VideoCatalog.Eventsb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,common_dot_common__types__pb2.DESCRIPTOR,])
_UPLOADEDVIDEOACCEPTED = _descriptor.Descriptor(
name='UploadedVideoAccepted',
full_name='killrvideo.video_catalog.events.UploadedVideoAccepted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_id', full_name='killrvideo.video_catalog.events.UploadedVideoAccepted.video_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='upload_url', full_name='killrvideo.video_catalog.events.UploadedVideoAccepted.upload_url', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='killrvideo.video_catalog.events.UploadedVideoAccepted.timestamp', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=138,
serialized_end=271,
)
_UPLOADEDVIDEOADDED = _descriptor.Descriptor(
name='UploadedVideoAdded',
full_name='killrvideo.video_catalog.events.UploadedVideoAdded',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_id', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.video_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.user_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.description', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.location', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preview_image_location', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.preview_image_location', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tags', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.tags', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='added_date', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.added_date', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='killrvideo.video_catalog.events.UploadedVideoAdded.timestamp', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=274,
serialized_end=573,
)
_YOUTUBEVIDEOADDED = _descriptor.Descriptor(
name='YouTubeVideoAdded',
full_name='killrvideo.video_catalog.events.YouTubeVideoAdded',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_id', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.video_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.user_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.description', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.location', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preview_image_location', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.preview_image_location', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tags', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.tags', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='added_date', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.added_date', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='killrvideo.video_catalog.events.YouTubeVideoAdded.timestamp', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=576,
serialized_end=874,
)
_UPLOADEDVIDEOACCEPTED.fields_by_name['video_id'].message_type = common_dot_common__types__pb2._UUID
_UPLOADEDVIDEOACCEPTED.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_UPLOADEDVIDEOADDED.fields_by_name['video_id'].message_type = common_dot_common__types__pb2._UUID
_UPLOADEDVIDEOADDED.fields_by_name['user_id'].message_type = common_dot_common__types__pb2._UUID
_UPLOADEDVIDEOADDED.fields_by_name['added_date'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_UPLOADEDVIDEOADDED.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_YOUTUBEVIDEOADDED.fields_by_name['video_id'].message_type = common_dot_common__types__pb2._UUID
_YOUTUBEVIDEOADDED.fields_by_name['user_id'].message_type = common_dot_common__types__pb2._UUID
_YOUTUBEVIDEOADDED.fields_by_name['added_date'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_YOUTUBEVIDEOADDED.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
DESCRIPTOR.message_types_by_name['UploadedVideoAccepted'] = _UPLOADEDVIDEOACCEPTED
DESCRIPTOR.message_types_by_name['UploadedVideoAdded'] = _UPLOADEDVIDEOADDED
DESCRIPTOR.message_types_by_name['YouTubeVideoAdded'] = _YOUTUBEVIDEOADDED
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
UploadedVideoAccepted = _reflection.GeneratedProtocolMessageType('UploadedVideoAccepted', (_message.Message,), dict(
DESCRIPTOR = _UPLOADEDVIDEOACCEPTED,
__module__ = 'video_catalog.video_catalog_events_pb2'
# @@protoc_insertion_point(class_scope:killrvideo.video_catalog.events.UploadedVideoAccepted)
))
_sym_db.RegisterMessage(UploadedVideoAccepted)
UploadedVideoAdded = _reflection.GeneratedProtocolMessageType('UploadedVideoAdded', (_message.Message,), dict(
DESCRIPTOR = _UPLOADEDVIDEOADDED,
__module__ = 'video_catalog.video_catalog_events_pb2'
# @@protoc_insertion_point(class_scope:killrvideo.video_catalog.events.UploadedVideoAdded)
))
_sym_db.RegisterMessage(UploadedVideoAdded)
YouTubeVideoAdded = _reflection.GeneratedProtocolMessageType('YouTubeVideoAdded', (_message.Message,), dict(
DESCRIPTOR = _YOUTUBEVIDEOADDED,
__module__ = 'video_catalog.video_catalog_events_pb2'
# @@protoc_insertion_point(class_scope:killrvideo.video_catalog.events.YouTubeVideoAdded)
))
_sym_db.RegisterMessage(YouTubeVideoAdded)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 51.138889
| 1,472
| 0.770913
| 1,895
| 14,728
| 5.677573
| 0.090765
| 0.049075
| 0.058556
| 0.072869
| 0.826843
| 0.811414
| 0.789572
| 0.744772
| 0.731759
| 0.731759
| 0
| 0.0365
| 0.110809
| 14,728
| 287
| 1,473
| 51.317073
| 0.785049
| 0.03035
| 0
| 0.688462
| 1
| 0.003846
| 0.253205
| 0.222728
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026923
| 0
| 0.026923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b09f0bd1a7c6cdbb89ff33d4ac81fe99c3895d74
| 68,610
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 5.66814e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202693,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 2.02403e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.37014,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.640949,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.367602,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.37869,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.365866,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.59504,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 3.82383e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0134179,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0970309,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0992334,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0970348,
'Execution Unit/Register Files/Runtime Dynamic': 0.112651,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.234468,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.602318,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.70173,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00418506,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00418506,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00366468,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00142932,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0014255,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0134603,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0394294,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0953956,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.06798,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.360868,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.324006,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.58549,
'Instruction Fetch Unit/Runtime Dynamic': 0.83316,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0735557,
'L2/Runtime Dynamic': 0.0164575,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.12899,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.41381,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0935588,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0935588,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.57259,
'Load Store Unit/Runtime Dynamic': 1.96876,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.2307,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.4614,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0818762,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0826797,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.377285,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0600512,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.677331,
'Memory Management Unit/Runtime Dynamic': 0.142731,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.0657,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.38294e-05,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0189271,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.191629,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.21057,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.87341,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0495641,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.241619,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.265484,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.115467,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.186244,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0940096,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.39572,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0913577,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.47056,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0501557,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0048432,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0536668,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0358184,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.103822,
'Execution Unit/Register Files/Runtime Dynamic': 0.0406616,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.125455,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.311761,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.39514,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000350763,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000350763,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000321899,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000133574,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000514534,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00153796,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00277766,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0344332,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.19024,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0797223,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.116951,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.51506,
'Instruction Fetch Unit/Runtime Dynamic': 0.235422,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0461905,
'L2/Runtime Dynamic': 0.00399715,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.58949,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.654417,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0437523,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0437523,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.7961,
'Load Store Unit/Runtime Dynamic': 0.91394,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.107886,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.215771,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.038289,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0389808,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.136181,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0130749,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.358064,
'Memory Management Unit/Runtime Dynamic': 0.0520558,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.7754,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.131936,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00681518,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0566939,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.195445,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.796,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0980588,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.279708,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.621169,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.189292,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.30532,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.154116,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.648728,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.121259,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.1021,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.117352,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00793974,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0904003,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0587193,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.207753,
'Execution Unit/Register Files/Runtime Dynamic': 0.066659,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.214968,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.561858,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.96233,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 2.12583e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 2.12583e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 1.85394e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 7.18975e-06,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000843507,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000904563,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000202983,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0564483,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.5906,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.138913,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.191724,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.98337,
'Instruction Fetch Unit/Runtime Dynamic': 0.388193,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0399103,
'L2/Runtime Dynamic': 0.0110213,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.61875,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.15711,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0770515,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0770515,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.98261,
'Load Store Unit/Runtime Dynamic': 1.61416,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.189996,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.379992,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0674302,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0680134,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.22325,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0228204,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.495192,
'Memory Management Unit/Runtime Dynamic': 0.0908338,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.1927,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.3087,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0122971,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.090661,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.411658,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.47819,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.144124,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.31589,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.828759,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.300802,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.485183,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.244904,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.03089,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.216969,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.6789,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.15657,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.012617,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.143143,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0933104,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.299713,
'Execution Unit/Register Files/Runtime Dynamic': 0.105927,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.337601,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.760197,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.61828,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000561624,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000561624,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000492783,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000192738,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00134041,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00295644,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00525586,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0897017,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.7058,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.185361,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.304667,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.20123,
'Instruction Fetch Unit/Runtime Dynamic': 0.587942,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0331568,
'L2/Runtime Dynamic': 0.00690861,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.74092,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.20526,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.081004,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.081004,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.12344,
'Load Store Unit/Runtime Dynamic': 1.68575,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.199742,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.399484,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0708891,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0713841,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.354766,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0303957,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.63265,
'Memory Management Unit/Runtime Dynamic': 0.10178,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 22.2588,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.411865,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0185837,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.14569,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.576138,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.5768,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.853906871776287,
'Runtime Dynamic': 3.853906871776287,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.210099,
'Runtime Dynamic': 0.065232,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 81.5027,
'Peak Power': 114.615,
'Runtime Dynamic': 18.7896,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 81.2926,
'Total Cores/Runtime Dynamic': 18.7244,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.210099,
'Total L3s/Runtime Dynamic': 0.065232,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.065646
| 124
| 0.681971
| 8,090
| 68,610
| 5.77775
| 0.067491
| 0.123572
| 0.112961
| 0.093449
| 0.940375
| 0.931881
| 0.919451
| 0.887018
| 0.861944
| 0.842497
| 0
| 0.131664
| 0.224326
| 68,610
| 914
| 125
| 75.065646
| 0.746632
| 0
| 0
| 0.642232
| 0
| 0
| 0.657402
| 0.048097
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0d8549f40efa10eb21367bfdf2f23a35bdda06a
| 3,278
|
py
|
Python
|
api/models.py
|
Keaiii3/WeCloud
|
83e4d68f2b6ba61058a41ef680cf8a305961c20d
|
[
"Apache-2.0"
] | null | null | null |
api/models.py
|
Keaiii3/WeCloud
|
83e4d68f2b6ba61058a41ef680cf8a305961c20d
|
[
"Apache-2.0"
] | null | null | null |
api/models.py
|
Keaiii3/WeCloud
|
83e4d68f2b6ba61058a41ef680cf8a305961c20d
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
class User(models.Model):
user_id=models.IntegerField(primary_key=True,null=False)
username=models.CharField(max_length=50,null=True)
password=models.CharField(max_length=50,null=True)
email=models.CharField(max_length=50,null=True)
size=models.BigIntegerField(max_length=11,null=True)
class Meta:
db_table = "user"
class Img(models.Model):
file_id=models.IntegerField(primary_key=True,null=False)
filename=models.CharField(max_length=255,null=True)
type=models.CharField(max_length=20,null=True)
size=models.BigIntegerField(max_length=11,null=True)
date=models.DateField(max_length=20,null=True)
path=models.CharField(max_length=255,null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table="img"
class Coffer:
file_id = models.IntegerField(primary_key=True, null=False)
filename = models.CharField(max_length=255, null=True)
type = models.CharField(max_length=20, null=True)
size = models.BigIntegerField(max_length=11, null=True)
date = models.DateField(max_length=20, null=True)
path = models.CharField(max_length=255, null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table = "coffer"
class Note:
file_id = models.IntegerField(primary_key=True, null=False)
title = models.CharField(max_length=255,null=True)
content = models.CharField(max_length=255,null=True)
date = models.DateField(max_length=20, null=True)
display = models.IntegerField(max_length=11,null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table = "note"
class Radio:
file_id = models.IntegerField(primary_key=True, null=False)
filename = models.CharField(max_length=255, null=True)
type = models.CharField(max_length=20, null=True)
size = models.BigIntegerField(max_length=11, null=True)
date = models.DateField(max_length=20, null=True)
path = models.CharField(max_length=255, null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table = "radio"
class Trash:
file_id = models.IntegerField(primary_key=True, null=False)
filename = models.CharField(max_length=255, null=True)
type = models.CharField(max_length=20, null=True)
size = models.BigIntegerField(max_length=11, null=True)
date = models.DateField(max_length=20, null=True)
path = models.CharField(max_length=255, null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table = "trash"
class Doc:
file_id = models.IntegerField(primary_key=True, null=False)
filename = models.CharField(max_length=255, null=True)
type = models.CharField(max_length=20, null=True)
size = models.BigIntegerField(max_length=11, null=True)
date = models.DateField(max_length=20, null=True)
path = models.CharField(max_length=255, null=True)
user_id=models.ForeignKey(User,on_delete=models.CASCADE,max_length=11,null=False)
class Meta:
db_table = "doc"
| 37.678161
| 85
| 0.73734
| 473
| 3,278
| 4.957717
| 0.107822
| 0.14968
| 0.153518
| 0.204691
| 0.898081
| 0.89339
| 0.89339
| 0.826866
| 0.808529
| 0.788486
| 0
| 0.032063
| 0.143685
| 3,278
| 87
| 86
| 37.678161
| 0.803349
| 0.007322
| 0
| 0.676471
| 0
| 0
| 0.009225
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.014706
| 0.014706
| 0
| 0.897059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9fd8b83ae21bae4de30c5064a783931697c01794
| 3,427
|
py
|
Python
|
tests/AlignmentFileFetch_bench.py
|
ajcr/pysam
|
527bb239ddaa799fce5ca005939d83805ae86e1d
|
[
"MIT"
] | null | null | null |
tests/AlignmentFileFetch_bench.py
|
ajcr/pysam
|
527bb239ddaa799fce5ca005939d83805ae86e1d
|
[
"MIT"
] | null | null | null |
tests/AlignmentFileFetch_bench.py
|
ajcr/pysam
|
527bb239ddaa799fce5ca005939d83805ae86e1d
|
[
"MIT"
] | null | null | null |
"""Benchmarking module for AlignmentFile functionality"""
import pytest
from TestUtils import BAM_DATADIR
from AlignmentFileFetchTestUtils import *
def test_build_fetch_from_bam_with_samtoolsshell(benchmark):
result = benchmark(build_fetch_with_samtoolsshell,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert result == 3270
def test_build_fetch_from_bam_with_samtoolspipe(benchmark):
result = benchmark(build_fetch_with_samtoolspipe,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert result == 3270
def test_build_fetch_from_bam_with_pysam(benchmark):
result = benchmark(build_fetch_with_pysam,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert result == 3270
def test_build_query_sequences_from_bam_with_samtoolsshell(benchmark):
result = benchmark(build_query_sequences_with_samtoolsshell,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3270
def test_build_query_sequences_from_bam_with_samtoolspipe(benchmark):
result = benchmark(build_query_sequences_with_samtoolspipe,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3270
def test_build_query_sequences_from_bam_with_pysam(benchmark):
result = benchmark(build_query_sequences_with_pysam,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3270
def test_build_query_qualities_from_bam_with_pysam(benchmark):
result = benchmark(build_query_qualities_with_pysam,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3270
def test_build_query_sequences_from_bam_flagfilter_with_samtoolsshell(benchmark):
result = benchmark(build_query_sequences_flagfilter_with_samtoolsshell,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3124
def test_build_query_sequences_from_bam_flagfilter_with_samtoolspipe(benchmark):
result = benchmark(build_query_sequences_flagfilter_with_samtoolspipe,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3124
def test_build_query_sequences_from_bam_flagfilter_with_pysam(benchmark):
result = benchmark(build_query_sequences_flagfilter_with_pysam,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3124
def test_build_query_sequences_from_bam_directflagfilter_with_pysam(benchmark):
result = benchmark(build_query_sequences_flagfilter_with_pysam,
os.path.join(BAM_DATADIR, "ex2.bam"))
assert len(result) == 3124
@pytest.mark.aligned_pairs
def test_build_aligned_pairs_default_with_pysam(benchmark):
result = benchmark(build_aligned_pairs_with_pysam,
os.path.join(BAM_DATADIR, "with_md.bam"))
assert len(result) == 3235
@pytest.mark.aligned_pairs
def test_build_aligned_pairs_matchesonly_with_pysam(benchmark):
result = benchmark(build_aligned_pairs_with_pysam,
os.path.join(BAM_DATADIR, "with_md.bam"),
matches_only=True)
assert len(result) == 3235
@pytest.mark.aligned_pairs
def test_build_aligned_pairs_withseq_with_pysam(benchmark):
result = benchmark(build_aligned_pairs_with_pysam,
os.path.join(BAM_DATADIR, "with_md.bam"),
with_seq=True)
assert len(result) == 3235
| 34.969388
| 81
| 0.715786
| 424
| 3,427
| 5.370283
| 0.110849
| 0.063241
| 0.073781
| 0.178305
| 0.911287
| 0.899429
| 0.892841
| 0.885375
| 0.718928
| 0.653491
| 0
| 0.024515
| 0.202509
| 3,427
| 97
| 82
| 35.329897
| 0.808635
| 0.014882
| 0
| 0.546875
| 0
| 0
| 0.032651
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 1
| 0.21875
| false
| 0
| 0.046875
| 0
| 0.265625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9ff94d2f387cfe788652cfcac7b433546489240f
| 43,059
|
py
|
Python
|
evaluation/sample_mrr_eval.py.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
evaluation/sample_mrr_eval.py.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
evaluation/sample_mrr_eval.py.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
import sys
sys.path += ['../utils']
import csv
from tqdm import tqdm
import collections
import gzip
import pickle
import numpy as np
import faiss
import os
import pytrec_eval
import json
from msmarco_eval import quality_checks_qids, compute_metrics, load_reference
# location for dumpped query and passage/document embeddings which is output_dir
#checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_12_02_04/ann_data/'
# checkpoint = 150000 # embedding from which checkpoint(ie: 200000)
# data_type = 0 # 0 for document, 1 for passage
# test_set = 1 # 0 for dev_set, 1 for eval_set
# raw_data_dir = '/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/'
# processed_data_dir = '/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/ann_data_roberta-base-fast-doc_512'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_12_02_04/ann_data/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512'
# checkpoint = 0
# data_type = 0
# test_set = 1
# checkpoint_path ='/home/dihe/Projects/data/raw_data/test_roberta_decode_doc/ann_data/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512'
#--------------------------------------------------------------------------------------
# checkpoint = 0
# data_type = 0
# test_set = 0
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_12_19_01/ann_data2/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# checkpoint = 0
# data_type = 0
# test_set =0
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_12_23_02/ann_data400000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# checkpoint = 0
# data_type = 0
# test_set = 0
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_12_23_02/ann_data4/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# checkpoint = 0
# data_type = 0
# test_set = 1
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data820000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev2_512'
# # processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev2_512'
# # checkpoint_path2 ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data820000/'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doceval_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_820000/ann_data/'
# query_emb_num=4
checkpoint = 0
data_type = 0
test_set = 1
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data820000/'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev2_512'
# checkpoint_path2 ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data820000/'
#training mrr
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data_sample20q/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_data10000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check_10000/ann_data_sampleq/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_21_01/check/ann_data10000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check_10000/ann_data_sample20q/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# query_emb_num=4
#-------------------------------
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# query_emb_num=4
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
# query_emb_num=4
checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_28_01/check3/ann_data10000/'
raw_data_dir = '/home/dihe/Projects/data/raw_data/'
processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
query_emb_num=4
#dev mrr
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_21_01/check/ann_data280000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_data10000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_21_01/check/ann_data300000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_28_01/check3/ann_data10000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = processed_data_dir
# checkpoint_path2 =checkpoint_path
# query_emb_num=8
#sample20
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data/'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_training_data_0'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_training_data_0'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_data100000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check_100000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_training_data_0'
# ann_path='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_training_data_0'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_21_01/check/ann_data10000/'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check_10000/ann_data_sample20q/'
# # checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data900000/'
# # checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_training_data_0'
# # checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# # checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_training_data_0'
# # checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# # checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# ann_path='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_28_01/ann_data/ann_training_data_0'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_28_01/check3/ann_data10000/'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_28_01_check3_10000/ann_data_sample20q/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# query_emb_num=4
# processed_data_dir_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
# processed_data_dir_query_origin='/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-doc_512/'
#sample200
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data/'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data910000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_910000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check/ann_data100000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_21_01_check_100000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check/ann_data1000000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/eval_exp_21_04_14_01_1000000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/Projects/data/raw_data/exp_01_05_09/ann_data900000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/eval_exp_01_05_09_900000/ann_data_sample20q/'
# checkpoint_path ='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_05_28_01/check3/ann_data10000/'
# raw_data_dir = '/home/dihe/Projects/data/raw_data/'
# processed_data_dir = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-docdev_512'
# processed_data_dir2 = '/home/dihe/Projects/data/raw_data/ann_data_roberta-base-fast-trainqueryeval2_512'
# checkpoint_path2 ='/home/dihe/Projects/data/raw_data/exp_21_05_28_01_check3_10000/ann_data_sample20q/'
# query_emb_num=4
# processed_data_dir_query_origin=processed_data_dir2
# checkpoint_path_origin='/home/dihe/cudnn_file/recommender_shuqi/MIND_data/raw_data/exp_21_04_14_01/check3/ann_data30000/'
# checkpoint_path_query_origin='/home/dihe/Projects/data/raw_data/exp_21_04_14_01_check3_30000/ann_data_sample20q/'
if data_type == 0:
topN = 100
else:
topN = 1000
qidmap_path = processed_data_dir2+"/qid2offset.pickle"
pidmap_path = processed_data_dir+"/pid2offset.pickle"
# if data_type == 0:
# if test_set == 1:
# query_path = raw_data_dir+"/docleaderboard-queries.tsv"
# passage_path = raw_data_dir+"/docleaderboard-top100.tsv"
# else:
# query_path = raw_data_dir+"/msmarco-docdev-queries.tsv"
# passage_path = raw_data_dir+"/msmarco-docdev-top100"
# else:
# if test_set == 1:
# query_path = raw_data_dir+"/msmarco-test2019-queries.tsv"
# passage_path = raw_data_dir+"/msmarco-passagetest2019-top1000.tsv"
# else:
# query_path = raw_data_dir+"/queries.dev.small.tsv"
# passage_path = raw_data_dir+"/top1000.dev.tsv"
with open(qidmap_path, 'rb') as handle:
qidmap = pickle.load(handle)
with open(pidmap_path, 'rb') as handle:
pidmap = pickle.load(handle)
pidmap_re={}
for item in pidmap:
assert pidmap[item] not in pidmap_re
pidmap_re[pidmap[item]]=item #'D'+str(item)
qidmap_re={}
for item in qidmap:
assert qidmap[item] not in qidmap_re
qidmap_re[qidmap[item]]=item
def get_reverse_dict(mydict):
mydict_re={}
print(mydict)
for item in mydict:
assert mydict[item] not in mydict_re
mydict_re[mydict[item]]=item
return mydict_re
count_none=0
dev_query_positive_id = {}
query_positive_id_path = os.path.join(raw_data_dir, "msmarco-doctrain-qrels.tsv")
#query_positive_id_path = os.path.join(raw_data_dir, "msmarco-docdev-qrels.tsv")
with open(query_positive_id_path, 'r', encoding='utf8') as f:
tsvreader = csv.reader(f, delimiter=" ")
for [topicid,_, docid, rel] in tsvreader:
topicid = int(topicid)
docid = int(docid[1:])
if topicid not in dev_query_positive_id:
dev_query_positive_id[topicid] = {}
dev_query_positive_id[topicid][docid] = int(rel)
assert len(dev_query_positive_id[topicid])==1
if docid not in pidmap:
count_none+=1
print('count_none: ',count_none)
# qset = set()
# with gzip.open(query_path, 'rt', encoding='utf-8') if query_path[-2:] == "gz" else open(query_path, 'rt', encoding='utf-8') as f:
# tsvreader = csv.reader(f, delimiter="\t")
# for [qid, query] in tsvreader:
# qset.add(qid)
# bm25 = collections.defaultdict(set)
# with gzip.open(passage_path, 'rt', encoding='utf-8') if passage_path[-2:] == "gz" else open(passage_path, 'rt', encoding='utf-8') as f:
# for line in tqdm(f):
# if data_type == 0:
# [qid, Q0, pid, rank, score, runstring] = line.split(' ')
# pid = pid[1:]
# else:
# [qid, pid, query, passage] = line.split("\t")
# #print('???',qid)
# if qid in qset and int(qid) in qidmap:
# bm25[qidmap[int(qid)]].add(pidmap[int(pid)])
# # else:
# # print('???',qid,qid in qset)
# #assert 1==0
# print("number of queries with " +str(topN) + " BM25 passages:", len(bm25))
def get_sample20():
#train_queries={}
with open(processed_data_dir_query_origin+"/qid2offset_train.pickle", 'rb') as handle:
qidmap_origin = pickle.load(handle)
with open(processed_data_dir_query_origin+"/pid2offset.pickle", 'rb') as handle:
pidmap_origin = pickle.load(handle)
qidmap_origin_re={}
for item in qidmap_origin:
assert qidmap_origin[item] not in qidmap_origin_re
qidmap_origin_re[qidmap_origin[item]]=item
pidmap_origin_re={}
for item in pidmap_origin:
assert pidmap_origin[item] not in pidmap_origin_re
pidmap_origin_re[pidmap_origin[item]]=item
train_q_sample20={}
with open(ann_path, 'r') as f:
ann_training_data = f.readlines()
# aligned_size = (len(ann_training_data) // 8) * 8
# ann_training_data = ann_training_data[:aligned_size]
# passage_embedding2id=[]
# for i in range(8):
# # with open(checkpoint_path + "passage_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# # passage_embedding.append(pickle.load(handle))
# # print('ok???',3,i)
# with open(checkpoint_path_origin + "passage_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# passage_embedding2id.append(pickle.load(handle))
# print('ok???',4,i)
# #passage_embedding2id = np.concatenate(passage_embedding2id, axis=0)
# dev_query_embedding2id=[]
# for i in range(4):
# print('???',checkpoint_path_query_origin + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb")
# #with open(checkpoint_path2 + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# with open(checkpoint_path_query_origin + "dev_query_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# dev_query_embedding2id.append(pickle.load(handle))
# print('ok???',2)
# if (not dev_query_embedding2id) or not (passage_embedding2id):
# print("No data found for checkpoint: ",checkpoint)
# passage_embedding2id = np.concatenate(passage_embedding2id, axis=0)
# dev_query_embedding2id=np.concatenate(dev_query_embedding2id, axis=0)
# passage_embedding2id2_r=reverse_dict(passage_embedding2id2)
# dev_query_embedding2id_r=reverse_dict(dev_query_embedding2id)
for line in ann_training_data:
line_arr=line.strip().split('\t')
# qid = qidmap_origin_re[dev_query_embedding2id[int(line_arr[0])]]
# if qid in dev_query_positive_id:
# pos_pid = pidmap_re[passage_embedding2id[int(line_arr[1])]]
# neg_pids = line_arr[2].split(',')
# neg_pids = [pidmap_re[passage_embedding2id[int(neg_pid)]] for neg_pid in neg_pids]
# train_q_sample20[qid]=neg_pids+[pos_pid]
qid = qidmap_origin_re[int(line_arr[0])]
if qid in dev_query_positive_id:
pos_pid = pidmap_origin_re[int(line_arr[1])]
neg_pids = line_arr[2].split(',')
neg_pids = [pidmap_origin_re[int(neg_pid)] for neg_pid in neg_pids]
train_q_sample20[qid]=neg_pids[:20]+[pos_pid]
else:
assert 1==0
return train_q_sample20
def get_sample200():
#dev_query_positive_id[topicid]
with open(processed_data_dir_query_origin+"/qid2offset.pickle", 'rb') as handle:
qidmap_origin = pickle.load(handle)
qidmap_origin_re={}
for item in qidmap_origin:
assert qidmap_origin[item] not in qidmap_origin_re
qidmap_origin_re[qidmap_origin[item]]=item
dev_query_embedding=[]
dev_query_embedding2id=[]
train_q_sample200={}
for i in range(4):
#try:
print('???',checkpoint_path_query_origin + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb")
with open(checkpoint_path_query_origin + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
dev_query_embedding.append(pickle.load(handle))
print('ok1???')
with open(checkpoint_path_query_origin + "dev_query_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
dev_query_embedding2id.append(pickle.load(handle))
print('ok???',2)
passage_embedding=[]
passage_embedding2id=[]
for i in range(8):
with open(checkpoint_path_origin + "passage_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
passage_embedding.append(pickle.load(handle))
print('ok???',3,i)
with open(checkpoint_path_origin + "passage_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
passage_embedding2id.append(pickle.load(handle))
print('ok???',4,i)
if (not dev_query_embedding) or (not dev_query_embedding2id) or (not passage_embedding) or not (passage_embedding2id):
print("No data found for checkpoint: ",checkpoint)
dev_query_embedding = np.concatenate(dev_query_embedding, axis=0)
dev_query_embedding2id = np.concatenate(dev_query_embedding2id, axis=0)
passage_embedding = np.concatenate(passage_embedding, axis=0)
passage_embedding2id = np.concatenate(passage_embedding2id, axis=0)
dim = passage_embedding.shape[1]
faiss.omp_set_num_threads(16)
cpu_index = faiss.IndexFlatIP(dim)
cpu_index.add(passage_embedding)
_, dev_I = cpu_index.search(dev_query_embedding, 200)
#dev_query_embedding2id_r=get_reverse_dict(dev_query_embedding2id)
#for item in train_queries:
query_list=[]
# for i,query_idx in enumerate(dev_query_embedding2id):
for i,query_idx in enumerate(range(len(dev_I))):
query_id = qidmap_origin_re[dev_query_embedding2id[query_idx]]
selected_ann_idx=dev_I[query_idx]
if query_id in dev_query_positive_id:
train_q_sample200[query_id]=[]
pos_id=list(dev_query_positive_id[query_id].keys())[0]
for idx in selected_ann_idx:
pred_pid = pidmap_re[passage_embedding2id[idx]]
train_q_sample200[query_id].append(pred_pid)
train_q_sample200[query_id]=train_q_sample200[query_id][:20]
if pos_id not in train_q_sample200[query_id]:
train_q_sample200[query_id]+=[pos_id]
# if i<5:
# print(query_id,dev_query_embedding2id[query_idx],query_idx)
# query_list.append(query_id)
#print([train_q_sample200[x] for x in query_list])
return train_q_sample200
def get_all(passage_embedding,passage_embedding2id):
for i in range(8):
#try:
# print('???',checkpoint_path2 + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb")
# with open(checkpoint_path2 + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# dev_query_embedding.append(pickle.load(handle))
# print('ok1???')
# with open(checkpoint_path2 + "dev_query_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
# dev_query_embedding2id.append(pickle.load(handle))
# print('ok???',2)
with open(checkpoint_path + "passage_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
passage_embedding.append(pickle.load(handle))
print('ok???',3,i)
with open(checkpoint_path + "passage_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
passage_embedding2id.append(pickle.load(handle))
print('ok???',4,i)
# except:
# break
if (not passage_embedding) or not (passage_embedding2id):
print("No data found for checkpoint: ",checkpoint)
passage_embedding = np.concatenate(passage_embedding, axis=0)
passage_embedding2id = np.concatenate(passage_embedding2id, axis=0)
return passage_embedding,passage_embedding2id
def convert_to_string_id(result_dict):
string_id_dict = {}
# format [string, dict[string, val]]
for k, v in result_dict.items():
_temp_v = {}
for inner_k, inner_v in v.items():
_temp_v[str(inner_k)] = inner_v
string_id_dict[str(k)] = _temp_v
return string_id_dict
def EvalDevQuery(query_embedding2id, passage_embedding2id, qidmap_re,pidmap_re, dev_query_positive_id,I_nearest_neighbor,topN,bm25=None):
prediction = {} #[qid][docid] = docscore, here we use -rank as score, so the higher the rank (1 > 2), the higher the score (-1 > -2)
#w=open('result_eval.txt','w')
total = 0
labeled = 0
Atotal = 0
Alabeled = 0
qids_to_ranked_candidate_passages = {}
mrr=0.0
mycount=0
for query_idx in range(len(I_nearest_neighbor)):
seen_pid = set()
query_id = qidmap_re[query_embedding2id[query_idx]]
if bm25 and query_id not in bm25:
#assert 1==0
continue
prediction[query_id] = {}
top_ann_pid = I_nearest_neighbor[query_idx].copy()
selected_ann_idx = top_ann_pid[:topN]
#print('???',topN)
#if train_q_sample20 !=None:
rank = 0
flag=0
if query_id in qids_to_ranked_candidate_passages:
assert 1==0,"query not in"
pass
else:
# By default, all PIDs in the list of 1000 are 0. Only override those that are given
tmp = [0] * 1000
qids_to_ranked_candidate_passages[query_id] = tmp
mycount+=1
for idx in selected_ann_idx:
pred_pid = pidmap_re[passage_embedding2id[idx]]
if not pred_pid in seen_pid:
# this check handles multiple vector per document
qids_to_ranked_candidate_passages[query_id][rank]=pred_pid
#w.write(str(query_id)+'\t'+str(pred_pid)+'\t'+str(rank+1)+'\n')
# assert len(dev_query_positive_id[query_id]) ==1
# for item in dev_query_positive_id[query_id]:
# assert item in pidmap
assert pred_pid in pidmap
if pred_pid in dev_query_positive_id[query_id]:
mrr += 1/(rank + 1)
flag=1
#print('rank: ',rank)
Atotal += 1
if pred_pid not in dev_query_positive_id[query_id]:
Alabeled += 1
if rank < 10:
total += 1
if pred_pid not in dev_query_positive_id[query_id]:
labeled += 1
rank += 1
prediction[query_id][pred_pid] = -rank
seen_pid.add(pred_pid)
#assert rank!=0, "pos not in"
# w.close()
evaluator = pytrec_eval.RelevanceEvaluator(
convert_to_string_id(dev_query_positive_id), {'map_cut', 'ndcg_cut', 'recip_rank','recall'})
eval_query_cnt = 0
result = evaluator.evaluate(convert_to_string_id(prediction))
print('???',mrr/mycount,mycount,mrr,len(I_nearest_neighbor))
qids_to_relevant_passageids = {}
for qid in dev_query_positive_id:
qid = int(qid)
if qid in qids_to_relevant_passageids:
pass
else:
qids_to_relevant_passageids[qid] = []
for pid in dev_query_positive_id[qid]:
#assert pid>0
if pid>0:
qids_to_relevant_passageids[qid].append(pid)
if data_type == 0:
MaxMRRRank=100
else:
MaxMRRRank=10
ms_mrr = compute_metrics(qids_to_relevant_passageids, qids_to_ranked_candidate_passages,MaxMRRRank=MaxMRRRank)
# ms_mrr = compute_metrics(dev_query_positive_id, qids_to_ranked_candidate_passages,MaxMRRRank=MaxMRRRank)
# print('???',ms_mrr)
ndcg = 0
Map = 0
mrr = 0
recall = 0
recall_1000 = 0
for k in result.keys():
eval_query_cnt += 1
ndcg += result[k]["ndcg_cut_10"]
Map += result[k]["map_cut_10"]
mrr += result[k]["recip_rank"]
recall += result[k]["recall_"+str(topN)]
final_ndcg = ndcg / eval_query_cnt
final_Map = Map / eval_query_cnt
final_mrr = mrr / eval_query_cnt
final_recall = recall / eval_query_cnt
hole_rate = labeled/total
Ahole_rate = Alabeled/Atotal
return final_ndcg, eval_query_cnt, final_Map, final_mrr, final_recall, hole_rate, ms_mrr, Ahole_rate, result, prediction
dev_query_embedding = []
dev_query_embedding2id = []
passage_embedding = []
passage_embedding2id = []
for i in range(query_emb_num):
#try:
print('???',checkpoint_path2 + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb")
with open(checkpoint_path2 + "dev_query_"+str(checkpoint)+"__emb_p__data_obj_"+str(i)+".pb", 'rb') as handle:
dev_query_embedding.append(pickle.load(handle))
print('ok1???')
with open(checkpoint_path2 + "dev_query_"+str(checkpoint)+"__embid_p__data_obj_"+str(i)+".pb", 'rb') as handle:
dev_query_embedding2id.append(pickle.load(handle))
print('ok???',2)
passage_embedding,passage_embedding2id=get_all(passage_embedding,passage_embedding2id)
# sample20 =get_sample200(passage_embedding)
# passage_embedding,passage_embedding2id =get_sample200(passage_embedding)
if (not dev_query_embedding) or (not dev_query_embedding2id):
print("No data found for checkpoint: ",checkpoint)
dev_query_embedding = np.concatenate(dev_query_embedding, axis=0)
dev_query_embedding2id = np.concatenate(dev_query_embedding2id, axis=0)
##reranking
#sample20 =get_sample20()
# sample20 =get_sample200()
# pidmap_t = collections.defaultdict(list)
# for i in range(len(passage_embedding2id)):
# pidmap_t[pidmap_re[passage_embedding2id[i]]].append(i) # abs pos(key) to rele pos(val)
# all_dev_I = []
# for i,qid in enumerate(range(len(dev_query_embedding2id))):
# qid_r=qidmap_re[dev_query_embedding2id[qid]]
# p_set = []
# p_set_map = {}
# if qid_r not in sample20:
# print('no')
# else:
# #print('yes')
# count = 0
# for k,pid in enumerate(sample20[qid_r]):
# if pid in pidmap_t:
# for val in pidmap_t[pid]:
# p_set.append(passage_embedding[val])
# p_set_map[count] = val # new rele pos(key) to old rele pos(val)
# count += 1
# else:
# print(pid,"not in passages")
# #print('???len(p_set)',len(p_set))
# if len(p_set)==0:
# all_dev_I.append([-1]*10)
# else:
# dim = passage_embedding.shape[1]
# faiss.omp_set_num_threads(16)
# cpu_index = faiss.IndexFlatIP(dim)
# p_set = np.asarray(p_set)
# cpu_index.add(p_set)
# _, dev_I = cpu_index.search(dev_query_embedding[i:i+1], len(p_set))
# # if i<5:
# # print(sample20[qid_r],qid_r,dev_query_embedding2id[qid],qid)
# # if i<5:
# # print(dev_I,dev_query_positive_id[qid_r])
# for j in range(len(dev_I[0])):
# dev_I[0][j] = p_set_map[dev_I[0][j]]
# # if i<5:
# # print(dev_I,dev_query_positive_id[qid_r])
# # print([pidmap_re[passage_embedding2id[x]] for x in dev_I[0]])
# # print('-----------------------')
# all_dev_I.append(dev_I[0])
# print(len(sample20),len(all_dev_I))
# result = EvalDevQuery(dev_query_embedding2id, passage_embedding2id, qidmap_re,pidmap_re, dev_query_positive_id, all_dev_I, 10,bm25=sample20)
# final_ndcg, eval_query_cnt, final_Map, final_mrr, final_recall, hole_rate, ms_mrr, Ahole_rate, metrics, prediction = result
# print("Reranking Results for checkpoint "+str(checkpoint))
# print("Reranking NDCG@10:" + str(final_ndcg))
# print("Reranking map@10:" + str(final_Map))
# print("Reranking pytrec_mrr:" + str(final_mrr))
# print("Reranking recall@"+str(topN)+":" + str(final_recall))
# print("Reranking hole rate@10:" + str(hole_rate))
# print("Reranking hole rate:" + str(Ahole_rate))
# print("Reranking ms_mrr:" + str(ms_mrr))
#full ranking
dim = passage_embedding.shape[1]
faiss.omp_set_num_threads(16)
cpu_index = faiss.IndexFlatIP(dim)
cpu_index.add(passage_embedding)
_, dev_I = cpu_index.search(dev_query_embedding, topN)
#print('???',dev_I[:10])
result = EvalDevQuery(dev_query_embedding2id, passage_embedding2id, qidmap_re,pidmap_re , dev_query_positive_id,dev_I, 10)
final_ndcg, eval_query_cnt, final_Map, final_mrr, final_recall, hole_rate, ms_mrr, Ahole_rate, metrics, prediction = result
print("Results for checkpoint "+str(checkpoint))
print("NDCG@10:" + str(final_ndcg))
print("map@10:" + str(final_Map))
print("pytrec_mrr:" + str(final_mrr))
print("recall@"+str(topN)+":" + str(final_recall))
print("hole rate@10:" + str(hole_rate))
print("hole rate:" + str(Ahole_rate))
print("ms_mrr:" + str(ms_mrr))
| 43.982635
| 143
| 0.719222
| 6,373
| 43,059
| 4.462576
| 0.051153
| 0.063256
| 0.08161
| 0.123066
| 0.824508
| 0.800246
| 0.7718
| 0.747925
| 0.73685
| 0.729044
| 0
| 0.054311
| 0.155043
| 43,059
| 978
| 144
| 44.027607
| 0.727373
| 0.652384
| 0
| 0.253623
| 0
| 0
| 0.0859
| 0.030584
| 0
| 0
| 0
| 0
| 0.036232
| 1
| 0.021739
| false
| 0.144928
| 0.043478
| 0
| 0.086957
| 0.086957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c65af3fce97d4f7200f6428dceed97507adeb716
| 7,832
|
py
|
Python
|
src/abaqus/Interaction/FilmCondition.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/Interaction/FilmCondition.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/Interaction/FilmCondition.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .Interaction import Interaction
from ..Region.Region import Region
class FilmCondition(Interaction):
"""The FilmCondition object defines film coefficients and associated sink temperatures for
coupled temperature-displacement analyses.
The FilmCondition object is derived from the Interaction object.
Notes
-----
This object can be accessed by:
.. code-block:: python
import interaction
mdb.models[name].interactions[name]
"""
def __init__(self, name: str, createStepName: str, surface: Region, definition: SymbolicConstant,
interactionProperty: str = '', sinkTemperature: float = 0, sinkAmplitude: str = '',
filmCoeff: float = 0, filmCoeffAmplitude: str = '', field: str = '',
sinkFieldName: str = '', sinkDistributionType: SymbolicConstant = UNIFORM):
"""This method creates a FilmCondition object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].FilmCondition
Parameters
----------
name
A String specifying the repository key.
createStepName
A String specifying the name of the step in which the FilmCondition object is created.
surface
A Region object specifying the name of the surface to which the film condition
interaction is applied.
definition
A SymbolicConstant specifying how the film condition is defined. Possible values are
EMBEDDED_COEFF, PROPERTY_REF, USER_SUB, and FIELD.
interactionProperty
A String specifying the name of the FilmConditionProp object associated with this
interaction. The *interactionProperty* argument applies only when
*definition*=PROPERTY_REF. The default value is an empty string.
sinkTemperature
A Float specifying the reference sink temperature, θ0θ0. The default value is 0.0.
sinkAmplitude
A String specifying the name of the Amplitude object that gives the variation of the
sink temperature, θ0θ0, with time. The default value is an empty string.Note:Use empty
string in an Abaqus/Standard analysis to specify that the reference sink temperature is
applied immediately at the beginning of the step or linearly over the step. Use empty
string in an Abaqus/Explicit analysis to specify that the reference sink temperature is
applied throughout the step.
filmCoeff
A Float specifying the reference film coefficient value, hh. The *filmCoeff* argument
applies when *definition*=EMBEDDED_COEFF, *definition*=USER_SUB, or *definition*=FIELD.
The default value is 0.0.
filmCoeffAmplitude
A String specifying the name of the Amplitude object that gives the variation of the
film coefficient, hh, with time. The default value is an empty string. Note: Use empty
string in an Abaqus/Standard analysis to specify that the reference film coefficient is
applied immediately at the beginning of the step or linearly over the step. Use empty
string in an Abaqus/Explicit analysis to specify that the reference film coefficient is
applied throughout the step.
field
A String specifying the name of the AnalyticalField object associated with this
interaction. The *field* argument applies only when *definition*=FIELD. The default
value is an empty string.
sinkFieldName
A String specifying the name of the AnalyticalField or DiscreteField object associated
with the sink temperature. The *sinkFieldName* argument applies only when
*sinkDistributionType*=ANALYTICAL_FIELD or *sinkDistributionType*=DISCRETE_FIELD. The
default value is an empty string.
sinkDistributionType
A SymbolicConstant specifying how the sink temperature is distributed. Possible values
are UNIFORM, ANALYTICAL_FIELD, and DISCRETE_FIELD. The default value is UNIFORM.
Returns
-------
A FilmCondition object.
"""
super().__init__()
pass
def setValues(self, interactionProperty: str = '', sinkTemperature: float = 0, sinkAmplitude: str = '',
filmCoeff: float = 0, filmCoeffAmplitude: str = '', field: str = '',
sinkFieldName: str = '', sinkDistributionType: SymbolicConstant = UNIFORM):
"""This method modifies the data for an existing FilmCondition object in the step where it
is created.
Parameters
----------
interactionProperty
A String specifying the name of the FilmConditionProp object associated with this
interaction. The *interactionProperty* argument applies only when
*definition*=PROPERTY_REF. The default value is an empty string.
sinkTemperature
A Float specifying the reference sink temperature, θ0θ0. The default value is 0.0.
sinkAmplitude
A String specifying the name of the Amplitude object that gives the variation of the
sink temperature, θ0θ0, with time. The default value is an empty string.Note:Use empty
string in an Abaqus/Standard analysis to specify that the reference sink temperature is
applied immediately at the beginning of the step or linearly over the step. Use empty
string in an Abaqus/Explicit analysis to specify that the reference sink temperature is
applied throughout the step.
filmCoeff
A Float specifying the reference film coefficient value, hh. The *filmCoeff* argument
applies when *definition*=EMBEDDED_COEFF, *definition*=USER_SUB, or *definition*=FIELD.
The default value is 0.0.
filmCoeffAmplitude
A String specifying the name of the Amplitude object that gives the variation of the
film coefficient, hh, with time. The default value is an empty string. Note: Use empty
string in an Abaqus/Standard analysis to specify that the reference film coefficient is
applied immediately at the beginning of the step or linearly over the step. Use empty
string in an Abaqus/Explicit analysis to specify that the reference film coefficient is
applied throughout the step.
field
A String specifying the name of the AnalyticalField object associated with this
interaction. The *field* argument applies only when *definition*=FIELD. The default
value is an empty string.
sinkFieldName
A String specifying the name of the AnalyticalField or DiscreteField object associated
with the sink temperature. The *sinkFieldName* argument applies only when
*sinkDistributionType*=ANALYTICAL_FIELD or *sinkDistributionType*=DISCRETE_FIELD. The
default value is an empty string.
sinkDistributionType
A SymbolicConstant specifying how the sink temperature is distributed. Possible values
are UNIFORM, ANALYTICAL_FIELD, and DISCRETE_FIELD. The default value is UNIFORM.
"""
pass
def setValuesInStep(self, stepName: str):
"""This method modifies the propagating data of an existing FilmCondition object in the
specified step.
Parameters
----------
stepName
A String specifying the name of the step in which the interaction is modified.
"""
pass
| 52.563758
| 107
| 0.665858
| 896
| 7,832
| 5.792411
| 0.15067
| 0.020231
| 0.046243
| 0.052408
| 0.812909
| 0.802312
| 0.78921
| 0.777649
| 0.777649
| 0.777649
| 0
| 0.003592
| 0.28907
| 7,832
| 148
| 108
| 52.918919
| 0.92852
| 0.761619
| 0
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1875
| false
| 0.1875
| 0.1875
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c6c433181a25dbecc4248e899c68c781ed50accc
| 139
|
py
|
Python
|
pysem/model_generation/__init__.py
|
planplus/pysem
|
6effa2e1e468c889e89109ac4a7a486b0813f02d
|
[
"MIT"
] | 2
|
2021-12-10T04:20:58.000Z
|
2022-01-07T06:57:17.000Z
|
pysem/model_generation/__init__.py
|
planplus/pysem
|
6effa2e1e468c889e89109ac4a7a486b0813f02d
|
[
"MIT"
] | null | null | null |
pysem/model_generation/__init__.py
|
planplus/pysem
|
6effa2e1e468c889e89109ac4a7a486b0813f02d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .description import generate_desc
from .parameters import generate_parameters
from .data import generate_data
| 27.8
| 43
| 0.791367
| 18
| 139
| 5.944444
| 0.555556
| 0.392523
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008197
| 0.122302
| 139
| 4
| 44
| 34.75
| 0.868852
| 0.151079
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
059a8f21ef58d72370460be34132486ab8e8698c
| 205
|
py
|
Python
|
supplemental_content/utils.py
|
PhilR8/cmcs-eregulations
|
82d63239e592a73c1d7d6967aa2b6ff9ccbdb26d
|
[
"CC0-1.0"
] | 6
|
2020-10-05T20:19:25.000Z
|
2022-03-17T18:34:59.000Z
|
supplemental_content/utils.py
|
PhilR8/cmcs-eregulations
|
82d63239e592a73c1d7d6967aa2b6ff9ccbdb26d
|
[
"CC0-1.0"
] | 95
|
2020-10-22T15:00:46.000Z
|
2022-03-31T19:10:20.000Z
|
supplemental_content/utils.py
|
PhilR8/cmcs-eregulations
|
82d63239e592a73c1d7d6967aa2b6ff9ccbdb26d
|
[
"CC0-1.0"
] | 7
|
2020-10-08T14:10:49.000Z
|
2022-01-24T18:36:13.000Z
|
class reverse_sort:
def __init__(self, obj):
self.obj = obj
def __eq__(self, other):
return other.obj == self.obj
def __lt__(self, other):
return other.obj < self.obj
| 20.5
| 36
| 0.6
| 28
| 205
| 3.928571
| 0.392857
| 0.254545
| 0.272727
| 0.363636
| 0.545455
| 0.545455
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0.292683
| 205
| 9
| 37
| 22.777778
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
059fbdc0c5215af8c95f598a0b648225d26167d2
| 52
|
py
|
Python
|
twitfs/utils.py
|
mmalecki/twitterfs
|
21a680dd64fb6d0ec597e631716fe4b0f6ab5b2c
|
[
"MIT"
] | 3
|
2016-08-24T12:01:49.000Z
|
2019-09-07T07:16:17.000Z
|
twitfs/utils.py
|
mmalecki/twitterfs
|
21a680dd64fb6d0ec597e631716fe4b0f6ab5b2c
|
[
"MIT"
] | null | null | null |
twitfs/utils.py
|
mmalecki/twitterfs
|
21a680dd64fb6d0ec597e631716fe4b0f6ab5b2c
|
[
"MIT"
] | 1
|
2020-09-22T19:33:33.000Z
|
2020-09-22T19:33:33.000Z
|
def repr_(*args):
return tuple(map(repr, args))
| 17.333333
| 33
| 0.653846
| 8
| 52
| 4.125
| 0.75
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 52
| 2
| 34
| 26
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
af453b6c25697f32e26be352a450d564ceea2e52
| 7,945
|
py
|
Python
|
tests/benchmark/test_validation.py
|
ytsarev/rally
|
a680e0ec5771c3104630a0eccc887256cb434e81
|
[
"Apache-2.0"
] | 1
|
2015-06-19T12:13:51.000Z
|
2015-06-19T12:13:51.000Z
|
tests/benchmark/test_validation.py
|
ytsarev/rally
|
a680e0ec5771c3104630a0eccc887256cb434e81
|
[
"Apache-2.0"
] | null | null | null |
tests/benchmark/test_validation.py
|
ytsarev/rally
|
a680e0ec5771c3104630a0eccc887256cb434e81
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glanceclient import exc as glance_exc
import mock
from novaclient import exceptions as nova_exc
from rally.benchmark import validation
from tests import fakes
from tests import test
class ValidationUtilsTestCase(test.TestCase):
def test_add_validator(self):
def test_validator():
pass
@validation.add_validator(test_validator)
def test_function():
pass
validators = getattr(test_function, "validators")
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0], test_validator)
@mock.patch("rally.osclients.Clients")
def test_image_exists(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
fakegclient.images.get = mock.MagicMock()
mock_osclients.glance.return_value = fakegclient
validator = validation.image_exists("image_id")
test_img_id = "test_image_id"
result = validator(clients=mock_osclients,
image_id=test_img_id)
fakegclient.images.get.assert_called_once_with(image=test_img_id)
self.assertTrue(result.is_valid)
self.assertIsNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_image_exists_fail(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
fakegclient.images.get = mock.MagicMock()
fakegclient.images.get.side_effect = glance_exc.HTTPNotFound
mock_osclients.glance.return_value = fakegclient
validator = validation.image_exists("image_id")
test_img_id = "test_image_id"
result = validator(clients=mock_osclients,
image_id=test_img_id)
fakegclient.images.get.assert_called_once_with(image=test_img_id)
self.assertFalse(result.is_valid)
self.assertIsNotNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_flavor_exists(self, mock_osclients):
fakenclient = fakes.FakeNovaClient()
fakenclient.flavors = mock.MagicMock()
mock_osclients.nova.return_value = fakenclient
validator = validation.flavor_exists("flavor_id")
test_flavor_id = 1
result = validator(clients=mock_osclients,
flavor_id=test_flavor_id)
fakenclient.flavors.get.assert_called_once_with(flavor=test_flavor_id)
self.assertTrue(result.is_valid)
self.assertIsNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_flavor_exists_fail(self, mock_osclients):
fakenclient = fakes.FakeNovaClient()
fakenclient.flavors = mock.MagicMock()
fakenclient.flavors.get.side_effect = nova_exc.NotFound(code=404)
mock_osclients.nova.return_value = fakenclient
validator = validation.flavor_exists("flavor_id")
test_flavor_id = 101
result = validator(clients=mock_osclients,
flavor_id=test_flavor_id)
fakenclient.flavors.get.assert_called_once_with(flavor=test_flavor_id)
self.assertFalse(result.is_valid)
self.assertIsNotNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_image_valid_on_flavor(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
image = fakes.FakeImage()
image.min_ram = 0
image.size = 0
image.min_disk = 0
fakegclient.images.get = mock.MagicMock(return_value=image)
mock_osclients.glance.return_value = fakegclient
fakenclient = fakes.FakeNovaClient()
flavor = fakes.FakeFlavor()
flavor.ram = 1
flavor.disk = 1
fakenclient.flavors.get = mock.MagicMock(return_value=flavor)
mock_osclients.nova.return_value = fakenclient
validator = validation.image_valid_on_flavor("flavor_id", "image_id")
result = validator(clients=mock_osclients,
flavor_id=flavor.id,
image_id=image.id)
fakenclient.flavors.get.assert_called_once_with(flavor=flavor.id)
fakegclient.images.get.assert_called_once_with(image=image.id)
self.assertTrue(result.is_valid)
self.assertIsNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_image_valid_on_flavor_fail(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
image = fakes.FakeImage()
image.min_ram = 1
image.size = 1
image.min_disk = 1
fakegclient.images.get = mock.MagicMock(return_value=image)
mock_osclients.glance.return_value = fakegclient
fakenclient = fakes.FakeNovaClient()
flavor = fakes.FakeFlavor()
flavor.ram = 0
flavor.disk = 0
fakenclient.flavors.get = mock.MagicMock(return_value=flavor)
mock_osclients.nova.return_value = fakenclient
validator = validation.image_valid_on_flavor("flavor_id", "image_id")
result = validator(clients=mock_osclients,
flavor_id=flavor.id,
image_id=image.id)
fakenclient.flavors.get.assert_called_once_with(flavor=flavor.id)
fakegclient.images.get.assert_called_once_with(image=image.id)
self.assertFalse(result.is_valid)
self.assertIsNotNone(result.msg)
@mock.patch("rally.osclients.Clients")
def test_image_valid_on_flavor_image_not_exist(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
fakegclient.images.get = mock.MagicMock()
fakegclient.images.get.side_effect = glance_exc.HTTPNotFound
mock_osclients.glance.return_value = fakegclient
fakenclient = fakes.FakeNovaClient()
flavor = fakes.FakeFlavor()
fakenclient.flavors.get = mock.MagicMock(return_value=flavor)
mock_osclients.nova.return_value = fakenclient
validator = validation.image_valid_on_flavor("flavor_id", "image_id")
test_img_id = "test_image_id"
result = validator(clients=mock_osclients,
flavor_id=flavor.id,
image_id=test_img_id)
fakenclient.flavors.get.assert_called_once_with(flavor=flavor.id)
fakegclient.images.get.assert_called_once_with(image=test_img_id)
self.assertFalse(result.is_valid)
self.assertEqual(result.msg, "Image with id 'test_image_id' not found")
@mock.patch("rally.osclients.Clients")
def test_image_valid_on_flavor_flavor_not_exist(self, mock_osclients):
fakegclient = fakes.FakeGlanceClient()
mock_osclients.glance.return_value = fakegclient
fakenclient = fakes.FakeNovaClient()
fakenclient.flavors = mock.MagicMock()
fakenclient.flavors.get.side_effect = nova_exc.NotFound(code=404)
mock_osclients.nova.return_value = fakenclient
validator = validation.image_valid_on_flavor("flavor_id", "image_id")
test_img_id = "test_image_id"
test_flavor_id = 101
result = validator(clients=mock_osclients,
flavor_id=test_flavor_id,
image_id=test_img_id)
fakenclient.flavors.get.assert_called_once_with(flavor=test_flavor_id)
self.assertFalse(result.is_valid)
self.assertEqual(result.msg, "Flavor with id '101' not found")
| 39.924623
| 79
| 0.685588
| 930
| 7,945
| 5.608602
| 0.149462
| 0.069785
| 0.046012
| 0.040069
| 0.810199
| 0.808282
| 0.808282
| 0.806748
| 0.794287
| 0.776074
| 0
| 0.005883
| 0.22983
| 7,945
| 198
| 80
| 40.126263
| 0.846544
| 0.075519
| 0
| 0.756757
| 0
| 0
| 0.05689
| 0.025102
| 0
| 0
| 0
| 0
| 0.195946
| 1
| 0.074324
| false
| 0.013514
| 0.040541
| 0
| 0.121622
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af5b33ae8624359cc9e4f459059542a5f0f720b0
| 4,289
|
py
|
Python
|
cifar10/attack_utils.py
|
yaodongyu/BiasVariance-AdversarialTraining
|
d3d1a4339e45e297dd52a1489b3f3512a3b7f191
|
[
"MIT"
] | 8
|
2021-03-19T09:16:23.000Z
|
2021-10-31T07:39:42.000Z
|
cifar100/attack_utils.py
|
yaodongyu/BiasVariance-AdversarialTraining
|
d3d1a4339e45e297dd52a1489b3f3512a3b7f191
|
[
"MIT"
] | null | null | null |
cifar100/attack_utils.py
|
yaodongyu/BiasVariance-AdversarialTraining
|
d3d1a4339e45e297dd52a1489b3f3512a3b7f191
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import torchvision.transforms as transforms
import torchvision
upper_limit, lower_limit = 1, 0
def clamp(X, lower_limit, upper_limit):
return torch.max(torch.min(X, upper_limit), lower_limit)
def attack_pgd(model, X, y, epsilon, alpha, attack_iters, norm):
model.eval()
delta = torch.zeros_like(X).cuda()
if norm == "l_inf":
delta.uniform_(-epsilon, epsilon)
elif norm == "l_2":
delta.normal_()
d_flat = delta.view(delta.size(0), -1)
n = d_flat.norm(p=2, dim=1).view(delta.size(0), 1, 1, 1)
r = torch.zeros_like(n).uniform_(0, 1)
delta *= r / n * epsilon
else:
raise ValueError
delta = clamp(delta, lower_limit - X, upper_limit - X)
delta.requires_grad = True
for _ in range(attack_iters):
output = model(X + delta)
index = slice(None, None, None)
if not isinstance(index, slice) and len(index) == 0:
break
loss = F.cross_entropy(output, y)
loss.backward()
grad = delta.grad.detach()
d = delta[index, :, :, :]
g = grad[index, :, :, :]
x = X[index, :, :, :]
if norm == "l_inf":
d = torch.clamp(d + alpha * torch.sign(g), min=-epsilon, max=epsilon)
elif norm == "l_2":
g_norm = torch.norm(g.view(g.shape[0], -1), dim=1).view(-1, 1, 1, 1)
scaled_g = g / (g_norm + 1e-10)
d = (d + scaled_g * alpha).view(d.size(0), -1).renorm(p=2, dim=0, maxnorm=epsilon).view_as(d)
d = clamp(d, lower_limit - x, upper_limit - x)
delta.data[index, :, :, :] = d
delta.grad.zero_()
model.train()
return delta.detach()
def attack_pgd_eval(model, X, y, epsilon, alpha, attack_iters, norm):
delta = torch.zeros_like(X).cuda()
if norm == "l_inf":
delta.uniform_(-epsilon, epsilon)
elif norm == "l_2":
delta.normal_()
d_flat = delta.view(delta.size(0), -1)
n = d_flat.norm(p=2, dim=1).view(delta.size(0), 1, 1, 1)
r = torch.zeros_like(n).uniform_(0, 1)
delta *= r / n * epsilon
else:
raise ValueError
delta = clamp(delta, lower_limit - X, upper_limit - X)
delta.requires_grad = True
for _ in range(attack_iters):
output = model(X + delta)
index = slice(None, None, None)
if not isinstance(index, slice) and len(index) == 0:
break
loss = F.cross_entropy(output, y)
loss.backward()
grad = delta.grad.detach()
d = delta[index, :, :, :]
g = grad[index, :, :, :]
x = X[index, :, :, :]
if norm == "l_inf":
d = torch.clamp(d + alpha * torch.sign(g), min=-epsilon, max=epsilon)
elif norm == "l_2":
g_norm = torch.norm(g.view(g.shape[0], -1), dim=1).view(-1, 1, 1, 1)
scaled_g = g / (g_norm + 1e-10)
d = (d + scaled_g * alpha).view(d.size(0), -1).renorm(p=2, dim=0, maxnorm=epsilon).view_as(d)
d = clamp(d, lower_limit - x, upper_limit - x)
delta.data[index, :, :, :] = d
delta.grad.zero_()
return delta.detach()
def attack_pgd_bv_eval(model, X, y, epsilon, alpha, attack_iters, norm):
delta = torch.zeros_like(X).cuda()
delta.requires_grad = True
for _ in range(attack_iters):
output = model(X + delta)
index = slice(None, None, None)
if not isinstance(index, slice) and len(index) == 0:
break
loss = F.cross_entropy(output, y)
loss.backward()
grad = delta.grad.detach()
d = delta[index, :, :, :]
g = grad[index, :, :, :]
x = X[index, :, :, :]
if norm == "l_inf":
d = torch.clamp(d + alpha * torch.sign(g), min=-epsilon, max=epsilon)
elif norm == "l_2":
g_norm = torch.norm(g.view(g.shape[0], -1), dim=1).view(-1, 1, 1, 1)
scaled_g = g / (g_norm + 1e-10)
d = (d + scaled_g * alpha).view(d.size(0), -1).renorm(p=2, dim=0, maxnorm=epsilon).view_as(d)
d = clamp(d, lower_limit - x, upper_limit - x)
delta.data[index, :, :, :] = d
delta.grad.zero_()
return delta.detach()
| 35.155738
| 105
| 0.553043
| 628
| 4,289
| 3.649682
| 0.138535
| 0.011344
| 0.010471
| 0.021815
| 0.876527
| 0.876527
| 0.858639
| 0.858639
| 0.843805
| 0.843805
| 0
| 0.02356
| 0.28748
| 4,289
| 121
| 106
| 35.446281
| 0.72644
| 0
| 0
| 0.858491
| 0
| 0
| 0.009326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.066038
| 0.009434
| 0.141509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afa80d119be61fee53e75381b90df70131b72f86
| 1,097
|
py
|
Python
|
general/list_diff.py
|
aakbar5/handy-python
|
14f98c624bbdab1fe0c78b9ee9feed0c8437485b
|
[
"MIT"
] | null | null | null |
general/list_diff.py
|
aakbar5/handy-python
|
14f98c624bbdab1fe0c78b9ee9feed0c8437485b
|
[
"MIT"
] | null | null | null |
general/list_diff.py
|
aakbar5/handy-python
|
14f98c624bbdab1fe0c78b9ee9feed0c8437485b
|
[
"MIT"
] | null | null | null |
""" List difference """
def get_lists_diff(py_list1, py_list2):
"""
Get elements of py_list1 which are not in py_list2.
"""
return list(set(py_list1) - set(py_list2))
list1 = [1, 2, 3, 4]
list2 = [1]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
list1 = [1]
list2 = [1, 2, 3, 4]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
list1 = [1, 2, 3, 4]
list2 = [1, 2, 3, 4]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
list1 = [1, 2, 3, 4]
list2 = [4, 2, 3, 1]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
list1 = [1, 2, 3, 4]
list2 = [10, 11, 12, 14, 15, 16]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
list1 = [1, 2, 3, 4]
list2 = [6, 7, 8, 9]
ret = get_lists_diff(list1, list2)
print("List1:", list1)
print("List2:", list2)
print("Diff: ", ret)
print("")
| 19.589286
| 53
| 0.615314
| 181
| 1,097
| 3.618785
| 0.18232
| 0.183206
| 0.128244
| 0.042748
| 0.798473
| 0.798473
| 0.798473
| 0.777099
| 0.777099
| 0.777099
| 0
| 0.112676
| 0.158614
| 1,097
| 55
| 54
| 19.945455
| 0.596966
| 0.061076
| 0
| 0.840909
| 0
| 0
| 0.106719
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0
| 0
| 0.045455
| 0.545455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
bb6b5a2d764e6c19779038b42cdbf01b1522f1e5
| 380
|
py
|
Python
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_ts/id3/frame_id/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_ts/id3/frame_id/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_ts/id3/frame_id/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_ts.id3.frame_id.frame_id_api import FrameIdApi
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_ts.id3.frame_id.customdata.customdata_api import CustomdataApi
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_ts.id3.frame_id.frame_id_id3_tag_list_query_params import FrameIdId3TagListQueryParams
| 95
| 147
| 0.910526
| 55
| 380
| 5.909091
| 0.381818
| 0.107692
| 0.138462
| 0.166154
| 0.643077
| 0.643077
| 0.643077
| 0.643077
| 0.643077
| 0.643077
| 0
| 0.013587
| 0.031579
| 380
| 3
| 148
| 126.666667
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bb8c97982da27e14cf20c25401356224b62c4645
| 141
|
py
|
Python
|
backend/backend/api/tests.py
|
abrahamy/DE-Forest-Watch
|
ebf1aa314fa72905229cae7feb22726c64f5d035
|
[
"MIT"
] | null | null | null |
backend/backend/api/tests.py
|
abrahamy/DE-Forest-Watch
|
ebf1aa314fa72905229cae7feb22726c64f5d035
|
[
"MIT"
] | null | null | null |
backend/backend/api/tests.py
|
abrahamy/DE-Forest-Watch
|
ebf1aa314fa72905229cae7feb22726c64f5d035
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
from django.urls import resolve
from django.contrib.auth.models import User
| 28.2
| 43
| 0.843972
| 22
| 141
| 5.409091
| 0.545455
| 0.336134
| 0.235294
| 0.336134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113475
| 141
| 4
| 44
| 35.25
| 0.952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bbafb48d108b14bd2f3f1703be81568c943c7258
| 169
|
py
|
Python
|
Codewars/6kyu/vasya-clerk/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/6kyu/vasya-clerk/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/6kyu/vasya-clerk/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.assert_equals(tickets([25, 25, 50]), 'YES')
test.assert_equals(tickets([25, 100]), 'NO')
test.assert_equals(tickets([25, 25, 50, 50, 100]), 'NO')
| 28.166667
| 56
| 0.650888
| 29
| 169
| 3.689655
| 0.448276
| 0.280374
| 0.448598
| 0.64486
| 0.775701
| 0.542056
| 0.542056
| 0
| 0
| 0
| 0
| 0.165563
| 0.106509
| 169
| 5
| 57
| 33.8
| 0.543046
| 0.08284
| 0
| 0
| 0
| 0
| 0.045752
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbf22dead144b073eda1bca527113564e1b8721b
| 2,181
|
py
|
Python
|
test/test_sv_filters.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 4
|
2020-03-25T06:09:39.000Z
|
2021-03-23T11:22:00.000Z
|
test/test_sv_filters.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 1
|
2020-10-02T14:50:30.000Z
|
2020-10-12T15:24:24.000Z
|
test/test_sv_filters.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 1
|
2021-02-20T11:32:34.000Z
|
2021-02-20T11:32:34.000Z
|
from .utils import *
input = os.path.join(dir_path, 'test_data', 'ex4.bcf')
def test_sv_biallelic_csq():
output = get_tmp_out()
test_args = dict(
input=input,
max_alt_alleles=1,
output=output,
ped=os.path.join(dir_path, "test_data", "test.ped"),
biallelic=True,
csq=[],
sv_gq=99,
sv_het_ab=0.3,
duphold_del_dhffc=0.7,
duphold_dup_dhbfc=1.3,
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_sv_biallelic_lof():
output = get_tmp_out()
test_args = dict(
input=input,
max_alt_alleles=1,
output=output,
ped=os.path.join(dir_path, "test_data", "test.ped"),
biallelic=True,
impact=['HIGH'],
sv_gq=99,
sv_het_ab=0.3,
duphold_del_dhffc=0.7,
duphold_dup_dhbfc=1.3,
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_sv_de_novo():
output = get_tmp_out()
test_args = dict(
input=input,
max_alt_alleles=1,
output=output,
ped=os.path.join(dir_path, "test_data", "test.ped"),
de_novo=True
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_sv_de_novo_filters():
output = get_tmp_out()
test_args = dict(
input=input,
max_alt_alleles=1,
output=output,
ped=os.path.join(dir_path, "test_data", "test.ped"),
csq=[],
sv_gq=99,
sv_het_ab=0.3,
duphold_del_dhffc=0.7,
duphold_dup_dhbfc=1.3,
de_novo=True
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
if __name__ == '__main__':
import nose
nose.run(defaultTest=__name__)
| 25.964286
| 64
| 0.580926
| 291
| 2,181
| 4.003436
| 0.213058
| 0.054936
| 0.042918
| 0.055794
| 0.898712
| 0.898712
| 0.898712
| 0.877253
| 0.877253
| 0.877253
| 0
| 0.019041
| 0.301696
| 2,181
| 83
| 65
| 26.277108
| 0.745896
| 0
| 0
| 0.805556
| 0
| 0
| 0.044017
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.055556
| false
| 0
| 0.027778
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a51fadf3c987598f8906e2933a6b7843c7c96a90
| 3,094
|
py
|
Python
|
tests/test_stage.py
|
rreben/zettelkasten_tools
|
1be7a6f2a259cf235defb545819c6e13e158884a
|
[
"MIT"
] | 1
|
2022-03-21T20:41:33.000Z
|
2022-03-21T20:41:33.000Z
|
tests/test_stage.py
|
rreben/zettelkasten_tools
|
1be7a6f2a259cf235defb545819c6e13e158884a
|
[
"MIT"
] | 3
|
2021-11-22T20:09:30.000Z
|
2022-01-04T22:18:06.000Z
|
tests/test_stage.py
|
rreben/tools4zettelkasten
|
eea0c3c3e869345cd6fd6c9773c90434c60bddd8
|
[
"MIT"
] | null | null | null |
# test_stage.py
# Copyright (c) 2021 Dr. Rupert Rebentisch
# Licensed under the MIT license
from .context import tools4zettelkasten as zt
def test_process_txt_file(tmp_path):
test_sub_dir = tmp_path / "subdir"
test_sub_dir.mkdir()
persistency_manager = zt.PersistencyManager(tmp_path / "subdir")
testfile = test_sub_dir / "test.md"
content = """# Eine längere Überschrift
and some content"""
testfile.write_text(content)
zt.process_txt_file(persistency_manager, "test.md")
comparefile = test_sub_dir / "Eine_laengere_Ueberschrift.md"
assert comparefile.exists()
def test_process_files_from_input(tmp_path):
test_sub_dir = tmp_path / "subdir"
test_sub_dir.mkdir()
persistency_manager = zt.PersistencyManager(tmp_path / "subdir")
first_testfile = test_sub_dir / "test.md"
content = """# Eine längere Überschrift
and some content"""
first_testfile.write_text(content)
second_testfile = test_sub_dir / "other.txt"
content = """# A very different topic
and also some different content"""
second_testfile.write_text(content)
zt.process_files_from_input(persistency_manager)
first_comparefile = test_sub_dir / "Eine_laengere_Ueberschrift.md"
second_comparefile = test_sub_dir / "A_very_different_topic.md"
assert first_comparefile.exists()
assert second_comparefile.exists()
def test_process_files_from_input_with_error(tmp_path):
test_sub_dir = tmp_path / "subdir"
test_sub_dir.mkdir()
persistency_manager = zt.PersistencyManager(tmp_path / "subdir")
first_testfile = test_sub_dir / "test.md"
# Has no valid header and should lead to error
content = """- Eine längere Überschrift
and some content"""
first_testfile.write_text(content)
second_testfile = test_sub_dir / "other.txt"
content = """# A very different topic
and also some different content"""
second_testfile.write_text(content)
zt.process_files_from_input(persistencyManager=persistency_manager)
# First file should not be changed
first_comparefile = test_sub_dir / "test.md"
second_comparefile = test_sub_dir / "A_very_different_topic.md"
assert first_comparefile.exists()
assert second_comparefile.exists()
def test_process_files_from_input_with_existing_id(tmp_path):
test_sub_dir = tmp_path / "subdir"
test_sub_dir.mkdir()
persistency_manager = zt.PersistencyManager(tmp_path / "subdir")
first_testfile = test_sub_dir / "test.md"
content = """# Eine längere Überschrift
and some content"""
first_testfile.write_text(content)
second_testfile = test_sub_dir / "04_10_Some_Old_Topic_123456789.md"
content = """# A very different topic
and also some different content"""
second_testfile.write_text(content)
zt.process_files_from_input(persistency_manager)
first_comparefile = test_sub_dir / "Eine_laengere_Ueberschrift.md"
second_comparefile = (
test_sub_dir / "04_10_A_very_different_topic_123456789.md")
assert first_comparefile.exists()
assert second_comparefile.exists()
| 35.976744
| 72
| 0.745314
| 407
| 3,094
| 5.297297
| 0.186732
| 0.071429
| 0.102041
| 0.058442
| 0.846475
| 0.82885
| 0.813544
| 0.813544
| 0.770872
| 0.743506
| 0
| 0.012058
| 0.169037
| 3,094
| 85
| 73
| 36.4
| 0.826527
| 0.052683
| 0
| 0.75
| 0
| 0
| 0.237265
| 0.072137
| 0
| 0
| 0
| 0
| 0.109375
| 1
| 0.0625
| false
| 0
| 0.015625
| 0
| 0.078125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a548a8f392d4a913bb2a229497d5178adda47a30
| 46
|
py
|
Python
|
tests/test_import.py
|
MyMusicTaste/nginx-error-log
|
cc85897e3cbea1dcbced0de025f676c997512b3f
|
[
"MIT"
] | 1
|
2021-05-16T11:11:25.000Z
|
2021-05-16T11:11:25.000Z
|
tests/test_import.py
|
MyMusicTaste/nginx-error-log
|
cc85897e3cbea1dcbced0de025f676c997512b3f
|
[
"MIT"
] | null | null | null |
tests/test_import.py
|
MyMusicTaste/nginx-error-log
|
cc85897e3cbea1dcbced0de025f676c997512b3f
|
[
"MIT"
] | null | null | null |
def test_import():
import nginx_error_log
| 15.333333
| 26
| 0.76087
| 7
| 46
| 4.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 27
| 23
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a5bb60b5e9e113405b448b89f5bad15b31474b5e
| 2,711
|
py
|
Python
|
AABBlib/fake_data.py
|
kopecmartin/grains-recognition
|
72eade0f60800a6d3c9361bb74ff35e3445a9baf
|
[
"MIT"
] | null | null | null |
AABBlib/fake_data.py
|
kopecmartin/grains-recognition
|
72eade0f60800a6d3c9361bb74ff35e3445a9baf
|
[
"MIT"
] | null | null | null |
AABBlib/fake_data.py
|
kopecmartin/grains-recognition
|
72eade0f60800a6d3c9361bb74ff35e3445a9baf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import numpy as np
borders_arr_easy = [[0, 4], [0, 5], [0, 6], [1, 1], [1, 2], [1, 3], [1, 7],
[2, 1], [2, 8], [3, 1], [3, 8], [4, 0], [4, 8], [5, 0],
[5, 9], [6, 1], [6, 8], [7, 2], [7, 6], [7, 7], [8, 3],
[8, 5], [9, 3], [9, 5], [9, 4]]
def bounded_boxes():
arr_easy = np.array(
[[0, 0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 0]])
arr_hard = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,0],
[1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0,1],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1,1],
[0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,1],
[0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1,1],
[0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,1],
[1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0],
[0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0,0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,0]])
return arr_easy, arr_hard
| 55.326531
| 85
| 0.300627
| 727
| 2,711
| 1.111417
| 0.037139
| 0.883663
| 1.117574
| 1.242574
| 0.821782
| 0.821782
| 0.804455
| 0.804455
| 0.804455
| 0.793317
| 0
| 0.424848
| 0.391369
| 2,711
| 48
| 86
| 56.479167
| 0.064848
| 0.007746
| 0
| 0.121951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.02439
| 0
| 0.073171
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
a5bc5273287d00ab62e33ea8bf2173cac2d92b88
| 4,388
|
py
|
Python
|
examples/drawing/sample22_arcs1.py
|
chromia/wandplus
|
815127aeee85dbac3bc8fca35971d2153b1898a9
|
[
"ImageMagick",
"MIT"
] | null | null | null |
examples/drawing/sample22_arcs1.py
|
chromia/wandplus
|
815127aeee85dbac3bc8fca35971d2153b1898a9
|
[
"ImageMagick",
"MIT"
] | null | null | null |
examples/drawing/sample22_arcs1.py
|
chromia/wandplus
|
815127aeee85dbac3bc8fca35971d2153b1898a9
|
[
"ImageMagick",
"MIT"
] | null | null | null |
#!/usr/bin/env python
from wand.image import Image
from wand.drawing import Drawing
from wand.color import Color
# http://www.imagemagick.org/Usage/draw/#arcs
w = 100
h = 60
bgcolor = Color('skyblue')
# original imagemagick command:
# Elliptical Arcs : A radius_x,y angle large,sweep x,y
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,15 0 0,0 70,20'" path_arc.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 15), to=(70, 20))
draw.path_finish()
draw(img)
img.save(filename='sample22a.png')
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,15 0 0,1 70,20'" path_arc2.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 15), to=(70, 20), clockwise=True)
draw.path_finish()
draw(img)
img.save(filename='sample22b.png')
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,15 0 1,0 70,20'" path_arc3.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 15), to=(70, 20), large_arc=True)
draw.path_finish()
draw(img)
img.save(filename='sample22c.png')
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,15 0 1,1 70,20'" path_arc4.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 15), to=(70, 20),
large_arc=True, clockwise=True)
draw.path_finish()
draw(img)
img.save(filename='sample22d.png')
# Closed and angled elliptical arcs (defined by two edge points)
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,20 20 0,0 70,20 Z '" path_arc5.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 20), to=(70, 20), rotation=20)
draw.path_close()
draw.path_finish()
draw(img)
img.save(filename='sample22e.png')
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,20 20 1,1 70,20 Z '" path_arc6.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 20), to=(70, 20), rotation=20,
large_arc=True, clockwise=True)
draw.path_close()
draw.path_finish()
draw(img)
img.save(filename='sample22f.png')
# convert -size 100x60 xc:skyblue -fill white -stroke black \
# -draw "path 'M 30,40 A 30,20 20 0,0 70,20 \
# A 30,20 20 1,0 30,40 Z '" path_arc7.gif
with Image(width=w, height=h, background=bgcolor) as img:
with Drawing() as draw:
draw.fill_color = Color('white')
draw.stroke_color = Color('black')
draw.path_start()
draw.path_move((30, 40))
draw.path_elliptic_arc(radius=(30, 20), to=(70, 20), rotation=20)
draw.path_elliptic_arc(radius=(30, 20), to=(30, 40), rotation=20,
large_arc=True)
draw.path_close()
draw.path_finish()
draw(img)
img.save(filename='sample22g.png')
| 36.264463
| 76
| 0.609389
| 652
| 4,388
| 4
| 0.141104
| 0.119632
| 0.069785
| 0.058282
| 0.835506
| 0.828988
| 0.825153
| 0.825153
| 0.794479
| 0.780675
| 0
| 0.085679
| 0.255242
| 4,388
| 120
| 77
| 36.566667
| 0.712362
| 0.269599
| 0
| 0.759036
| 0
| 0
| 0.052764
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.036145
| 0
| 0.036145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c2a678323f66b96d42dee2ca01f0d7c970a9eee
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_brand/na_brand_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_brand/na_brand_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_brand/na_brand_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Brand_Jng_Aatrox(Ratings):
pass
class NA_Brand_Jng_Ahri(Ratings):
pass
class NA_Brand_Jng_Akali(Ratings):
pass
class NA_Brand_Jng_Alistar(Ratings):
pass
class NA_Brand_Jng_Amumu(Ratings):
pass
class NA_Brand_Jng_Anivia(Ratings):
pass
class NA_Brand_Jng_Annie(Ratings):
pass
class NA_Brand_Jng_Ashe(Ratings):
pass
class NA_Brand_Jng_AurelionSol(Ratings):
pass
class NA_Brand_Jng_Azir(Ratings):
pass
class NA_Brand_Jng_Bard(Ratings):
pass
class NA_Brand_Jng_Blitzcrank(Ratings):
pass
class NA_Brand_Jng_Brand(Ratings):
pass
class NA_Brand_Jng_Braum(Ratings):
pass
class NA_Brand_Jng_Caitlyn(Ratings):
pass
class NA_Brand_Jng_Camille(Ratings):
pass
class NA_Brand_Jng_Cassiopeia(Ratings):
pass
class NA_Brand_Jng_Chogath(Ratings):
pass
class NA_Brand_Jng_Corki(Ratings):
pass
class NA_Brand_Jng_Darius(Ratings):
pass
class NA_Brand_Jng_Diana(Ratings):
pass
class NA_Brand_Jng_Draven(Ratings):
pass
class NA_Brand_Jng_DrMundo(Ratings):
pass
class NA_Brand_Jng_Ekko(Ratings):
pass
class NA_Brand_Jng_Elise(Ratings):
pass
class NA_Brand_Jng_Evelynn(Ratings):
pass
class NA_Brand_Jng_Ezreal(Ratings):
pass
class NA_Brand_Jng_Fiddlesticks(Ratings):
pass
class NA_Brand_Jng_Fiora(Ratings):
pass
class NA_Brand_Jng_Fizz(Ratings):
pass
class NA_Brand_Jng_Galio(Ratings):
pass
class NA_Brand_Jng_Gangplank(Ratings):
pass
class NA_Brand_Jng_Garen(Ratings):
pass
class NA_Brand_Jng_Gnar(Ratings):
pass
class NA_Brand_Jng_Gragas(Ratings):
pass
class NA_Brand_Jng_Graves(Ratings):
pass
class NA_Brand_Jng_Hecarim(Ratings):
pass
class NA_Brand_Jng_Heimerdinger(Ratings):
pass
class NA_Brand_Jng_Illaoi(Ratings):
pass
class NA_Brand_Jng_Irelia(Ratings):
pass
class NA_Brand_Jng_Ivern(Ratings):
pass
class NA_Brand_Jng_Janna(Ratings):
pass
class NA_Brand_Jng_JarvanIV(Ratings):
pass
class NA_Brand_Jng_Jax(Ratings):
pass
class NA_Brand_Jng_Jayce(Ratings):
pass
class NA_Brand_Jng_Jhin(Ratings):
pass
class NA_Brand_Jng_Jinx(Ratings):
pass
class NA_Brand_Jng_Kalista(Ratings):
pass
class NA_Brand_Jng_Karma(Ratings):
pass
class NA_Brand_Jng_Karthus(Ratings):
pass
class NA_Brand_Jng_Kassadin(Ratings):
pass
class NA_Brand_Jng_Katarina(Ratings):
pass
class NA_Brand_Jng_Kayle(Ratings):
pass
class NA_Brand_Jng_Kayn(Ratings):
pass
class NA_Brand_Jng_Kennen(Ratings):
pass
class NA_Brand_Jng_Khazix(Ratings):
pass
class NA_Brand_Jng_Kindred(Ratings):
pass
class NA_Brand_Jng_Kled(Ratings):
pass
class NA_Brand_Jng_KogMaw(Ratings):
pass
class NA_Brand_Jng_Leblanc(Ratings):
pass
class NA_Brand_Jng_LeeSin(Ratings):
pass
class NA_Brand_Jng_Leona(Ratings):
pass
class NA_Brand_Jng_Lissandra(Ratings):
pass
class NA_Brand_Jng_Lucian(Ratings):
pass
class NA_Brand_Jng_Lulu(Ratings):
pass
class NA_Brand_Jng_Lux(Ratings):
pass
class NA_Brand_Jng_Malphite(Ratings):
pass
class NA_Brand_Jng_Malzahar(Ratings):
pass
class NA_Brand_Jng_Maokai(Ratings):
pass
class NA_Brand_Jng_MasterYi(Ratings):
pass
class NA_Brand_Jng_MissFortune(Ratings):
pass
class NA_Brand_Jng_MonkeyKing(Ratings):
pass
class NA_Brand_Jng_Mordekaiser(Ratings):
pass
class NA_Brand_Jng_Morgana(Ratings):
pass
class NA_Brand_Jng_Nami(Ratings):
pass
class NA_Brand_Jng_Nasus(Ratings):
pass
class NA_Brand_Jng_Nautilus(Ratings):
pass
class NA_Brand_Jng_Nidalee(Ratings):
pass
class NA_Brand_Jng_Nocturne(Ratings):
pass
class NA_Brand_Jng_Nunu(Ratings):
pass
class NA_Brand_Jng_Olaf(Ratings):
pass
class NA_Brand_Jng_Orianna(Ratings):
pass
class NA_Brand_Jng_Ornn(Ratings):
pass
class NA_Brand_Jng_Pantheon(Ratings):
pass
class NA_Brand_Jng_Poppy(Ratings):
pass
class NA_Brand_Jng_Quinn(Ratings):
pass
class NA_Brand_Jng_Rakan(Ratings):
pass
class NA_Brand_Jng_Rammus(Ratings):
pass
class NA_Brand_Jng_RekSai(Ratings):
pass
class NA_Brand_Jng_Renekton(Ratings):
pass
class NA_Brand_Jng_Rengar(Ratings):
pass
class NA_Brand_Jng_Riven(Ratings):
pass
class NA_Brand_Jng_Rumble(Ratings):
pass
class NA_Brand_Jng_Ryze(Ratings):
pass
class NA_Brand_Jng_Sejuani(Ratings):
pass
class NA_Brand_Jng_Shaco(Ratings):
pass
class NA_Brand_Jng_Shen(Ratings):
pass
class NA_Brand_Jng_Shyvana(Ratings):
pass
class NA_Brand_Jng_Singed(Ratings):
pass
class NA_Brand_Jng_Sion(Ratings):
pass
class NA_Brand_Jng_Sivir(Ratings):
pass
class NA_Brand_Jng_Skarner(Ratings):
pass
class NA_Brand_Jng_Sona(Ratings):
pass
class NA_Brand_Jng_Soraka(Ratings):
pass
class NA_Brand_Jng_Swain(Ratings):
pass
class NA_Brand_Jng_Syndra(Ratings):
pass
class NA_Brand_Jng_TahmKench(Ratings):
pass
class NA_Brand_Jng_Taliyah(Ratings):
pass
class NA_Brand_Jng_Talon(Ratings):
pass
class NA_Brand_Jng_Taric(Ratings):
pass
class NA_Brand_Jng_Teemo(Ratings):
pass
class NA_Brand_Jng_Thresh(Ratings):
pass
class NA_Brand_Jng_Tristana(Ratings):
pass
class NA_Brand_Jng_Trundle(Ratings):
pass
class NA_Brand_Jng_Tryndamere(Ratings):
pass
class NA_Brand_Jng_TwistedFate(Ratings):
pass
class NA_Brand_Jng_Twitch(Ratings):
pass
class NA_Brand_Jng_Udyr(Ratings):
pass
class NA_Brand_Jng_Urgot(Ratings):
pass
class NA_Brand_Jng_Varus(Ratings):
pass
class NA_Brand_Jng_Vayne(Ratings):
pass
class NA_Brand_Jng_Veigar(Ratings):
pass
class NA_Brand_Jng_Velkoz(Ratings):
pass
class NA_Brand_Jng_Vi(Ratings):
pass
class NA_Brand_Jng_Viktor(Ratings):
pass
class NA_Brand_Jng_Vladimir(Ratings):
pass
class NA_Brand_Jng_Volibear(Ratings):
pass
class NA_Brand_Jng_Warwick(Ratings):
pass
class NA_Brand_Jng_Xayah(Ratings):
pass
class NA_Brand_Jng_Xerath(Ratings):
pass
class NA_Brand_Jng_XinZhao(Ratings):
pass
class NA_Brand_Jng_Yasuo(Ratings):
pass
class NA_Brand_Jng_Yorick(Ratings):
pass
class NA_Brand_Jng_Zac(Ratings):
pass
class NA_Brand_Jng_Zed(Ratings):
pass
class NA_Brand_Jng_Ziggs(Ratings):
pass
class NA_Brand_Jng_Zilean(Ratings):
pass
class NA_Brand_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
3c7f71346c2b8103ef543079cd71e4cc61d13bbf
| 32,442
|
py
|
Python
|
src/niswitch/metadata/enums.py
|
dan-davello/nimi-python
|
8d89e4cfb205f789630cf7725f0c3d57bfe7d755
|
[
"MIT"
] | null | null | null |
src/niswitch/metadata/enums.py
|
dan-davello/nimi-python
|
8d89e4cfb205f789630cf7725f0c3d57bfe7d755
|
[
"MIT"
] | null | null | null |
src/niswitch/metadata/enums.py
|
dan-davello/nimi-python
|
8d89e4cfb205f789630cf7725f0c3d57bfe7d755
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is code generated, do not make changes here.
# If the generated information is not correct for python
# changes can be made in enums_addon.py and they will be
# applied at build time.
enums = {
'CabledModuleScanAdvancedBus': {
'values': [
{
'name': 'NONE',
'value': 0,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG0',
'value': 111,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig0
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG1',
'value': 112,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig1
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG2',
'value': 113,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig2
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG3',
'value': 114,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig3
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG4',
'value': 115,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig4
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG5',
'value': 116,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig5
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG6',
'value': 117,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig6
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG7',
'value': 118,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig7
line before processing the next entry in the scan list.
''',
},
},
],
},
'CabledModuleTriggerBus': {
'values': [
{
'name': 'NONE',
'value': 0,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG0',
'value': 111,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG1',
'value': 112,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG2',
'value': 113,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG3',
'value': 114,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG4',
'value': 115,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG5',
'value': 116,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG6',
'value': 117,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG7',
'value': 118,
'documentation': {
'description': '',
},
},
],
},
'HandshakingInitiation': {
'values': [
{
'name': 'MEASUREMENT_DEVICE_INITIATED',
'value': 0,
'documentation': {
'description': '''
The `niSwitch Initiate
Scan <switchviref.chm::/niSwitch_Initiate_Scan.html>`__ VI does not
return until the switch hardware is waiting for a trigger input. This
ensures that if you initiate the measurement device after calling the
`niSwitch Initiate
Scan <switchviref.chm::/niSwitch_Initiate_Scan.html>`__ VI , the switch
is sure to receive the first measurement complete (MC) signal sent by
the measurement device. The measurement device should be configured to
first take a measurement, send MC, then wait for scanner advanced output
signal. Thus, the first MC of the measurement device initiates
handshaking.
''',
},
},
{
'name': 'SWITCH_INITIATED',
'value': 1,
'documentation': {
'description': '''
The `niSwitch Initiate
Scan <switchviref.chm::/niSwitch_Initiate_Scan.html>`__ VI returns
immediately after beginning scan list execution. It is assumed that the
measurement device has already been configured and is waiting for the
scanner advanced signal. The measurement should be configured to first
wait for a trigger, then take a measurement. Thus, the first scanner
advanced output signal of the switch module initiates handshaking.
''',
},
},
],
},
'MasterSlaveScanAdvancedBus': {
'values': [
{
'name': 'NONE',
'value': 0,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG0',
'value': 111,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig0
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG1',
'value': 112,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig1
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG2',
'value': 113,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig2
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG3',
'value': 114,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig3
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG4',
'value': 115,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig4
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG5',
'value': 116,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig5
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG6',
'value': 117,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig6
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG7',
'value': 118,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig7
line before processing the next entry in the scan list.
''',
},
},
],
},
'MasterSlaveTriggerBus': {
'values': [
{
'name': 'NONE',
'value': 0,
'documentation': {
'description': '',
},
},
{
'name': 'PXI_TRIG0',
'value': 111,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig0
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG1',
'value': 112,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig1
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG2',
'value': 113,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig2
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG3',
'value': 114,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig3
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG4',
'value': 115,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig4
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG5',
'value': 116,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig5
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG6',
'value': 117,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig6
line before processing the next entry in the scan list.
''',
},
},
{
'name': 'PXI_TRIG7',
'value': 118,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the PXI\_Trig7
line before processing the next entry in the scan list.
''',
},
},
],
},
'PathCapability': {
'values': [
{
'name': 'PATH_AVAILABLE',
'value': 1,
'documentation': {
'description': 'Path Available',
},
},
{
'name': 'PATH_EXISTS',
'value': 2,
'documentation': {
'description': 'Path Exists',
},
},
{
'name': 'PATH_UNSUPPORTED',
'value': 3,
'documentation': {
'description': 'Path Unsupported',
},
},
{
'name': 'RESOURCE_IN_USE',
'value': 4,
'documentation': {
'description': 'Resource in use',
},
},
{
'name': 'SOURCE_CONFLICT',
'value': 5,
'documentation': {
'description': 'Source conflict',
},
},
{
'name': 'CHANNEL_NOT_AVAILABLE',
'value': 6,
'documentation': {
'description': 'Channel not available',
},
},
],
},
'RelayAction': {
'values': [
{
'name': 'OPEN_RELAY',
'value': 20,
'documentation': {
'description': 'Open Relay',
},
},
{
'name': 'CLOSE_RELAY',
'value': 21,
'documentation': {
'description': 'Close Relay',
},
},
],
},
'RelayPosition': {
'values': [
{
'name': 'OPEN',
'value': 10,
'documentation': {
'description': 'Open',
},
},
{
'name': 'CLOSED',
'value': 11,
'documentation': {
'description': 'Closed',
},
},
],
},
'ScanAdvancedOutput': {
'values': [
{
'name': 'NISWITCH_VAL_NONE',
'value': 0,
'documentation': {
'description': 'The switch device does not produce a Scan Advanced Output trigger.',
},
},
{
'name': 'NISWITCH_VAL_EXTERNAL',
'value': 2,
'documentation': {
'description': 'External Trigger. The switch device produces the Scan Advanced Output trigger on the external trigger output.',
},
},
{
'name': 'NISWITCH_VAL_TTL0',
'value': 111,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG0 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL1',
'value': 112,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG1 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL2',
'value': 113,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG2 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL3',
'value': 114,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG3 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL4',
'value': 115,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG4 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL5',
'value': 116,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG5 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL6',
'value': 117,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG6 line.',
},
},
{
'name': 'NISWITCH_VAL_TTL7',
'value': 118,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output on the PXI TRIG7 line.',
},
},
{
'name': 'PXI_STAR',
'value': 125,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the PXI
Star trigger bus before processing the next entry in the scan list.
''',
},
},
{
'name': 'NISWITCH_VAL_REARCONNECTOR',
'value': 1000,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output trigger on the rear connector.',
},
},
{
'name': 'NISWITCH_VAL_FRONTCONNECTOR',
'value': 1001,
'documentation': {
'description': 'The switch device produces the Scan Advanced Output trigger on the front connector.',
},
},
{
'name': 'REARCONNECTOR_MODULE1',
'value': 1021,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 1.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE2',
'value': 1022,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 2.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE3',
'value': 1023,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 3.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE4',
'value': 1024,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 4.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE5',
'value': 1025,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 5.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE6',
'value': 1026,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 6.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE7',
'value': 1027,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 7.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE8',
'value': 1028,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 8.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE9',
'value': 1029,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Ouptut Trigger on the rear
connector module 9.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE10',
'value': 1030,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 10.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE11',
'value': 1031,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 11.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE12',
'value': 1032,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the rear
connector module 12.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE1',
'value': 1041,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 1.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE2',
'value': 1042,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 2.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE3',
'value': 1043,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 3.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE4',
'value': 1044,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 4.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE5',
'value': 1045,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 5.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE6',
'value': 1046,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 6.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE7',
'value': 1047,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 7.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE8',
'value': 1048,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 8.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE9',
'value': 1049,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 9.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE10',
'value': 1050,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 10.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE11',
'value': 1051,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 11.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE12',
'value': 1052,
'documentation': {
'description': '''
The switch module produces the Scan Advanced Output Trigger on the front
connector module 12.
''',
},
},
],
},
'ScanAdvancedPolarity': {
'values': [
{
'name': 'NISWITCH_VAL_RISING_EDGE',
'value': 0,
'documentation': {
'description': 'The trigger occurs on the rising edge of the signal.',
},
},
{
'name': 'NISWITCH_VAL_FALLING_EDGE',
'value': 1,
'documentation': {
'description': 'The trigger occurs on the falling edge of the signal.',
},
},
],
},
'ScanMode': {
'values': [
{
'name': 'NISWITCH_VAL_NONE',
'value': 0,
'documentation': {
'description': 'No implicit action on connections when scanning.',
},
},
{
'name': 'NISWITCH_VAL_BREAK_BEFORE_MAKE',
'value': 1,
'documentation': {
'description': 'When scanning, the switch device breaks existing connections before making new connections.',
},
},
{
'name': 'NISWITCH_VAL_BREAK_AFTER_MAKE',
'value': 2,
'documentation': {
'description': 'When scanning, the switch device breaks existing connections after making new connections.',
},
},
],
},
'TriggerInput': {
'values': [
{
'name': 'NISWITCH_VAL_IMMEDIATE',
'value': 1,
'documentation': {
'description': 'Immediate Trigger. The switch device does not wait for a trigger before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_EXTERNAL',
'value': 2,
'documentation': {
'description': 'External Trigger. The switch device waits until it receives a trigger from an external source through the external trigger input before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_SOFTWARE_TRIG',
'value': 3,
'documentation': {
'description': 'The switch device waits until you call the niSwitch_SendSoftwareTrigger function before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL0',
'value': 111,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG0 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL1',
'value': 112,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG1 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL2',
'value': 113,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG2 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL3',
'value': 114,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG3 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL4',
'value': 115,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG4 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL5',
'value': 116,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG5 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL6',
'value': 117,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG6 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_TTL7',
'value': 118,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI TRIG7 line before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_PXI_STAR',
'value': 125,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the PXI STAR trigger bus before processing the next entry in the scan list.',
},
},
{
'name': 'NISWITCH_VAL_REARCONNECTOR',
'value': 1000,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the rear connector.',
},
},
{
'name': 'NISWITCH_VAL_FRONTCONNECTOR',
'value': 1001,
'documentation': {
'description': 'The switch device waits until it receives a trigger on the front connector.',
},
},
{
'name': 'REARCONNECTOR_MODULE1',
'value': 1021,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 1.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE2',
'value': 1022,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 2.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE3',
'value': 1023,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 3.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE4',
'value': 1024,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 4.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE5',
'value': 1025,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 5.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE6',
'value': 1026,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 6.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE7',
'value': 1027,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 7.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE8',
'value': 1028,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 8.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE9',
'value': 1029,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 9.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE10',
'value': 1030,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 10.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE11',
'value': 1031,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 11.
''',
},
},
{
'name': 'REARCONNECTOR_MODULE12',
'value': 1032,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the rear
connector module 12.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE1',
'value': 1041,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 1.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE2',
'value': 1042,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 2.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE3',
'value': 1043,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 3.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE4',
'value': 1044,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 4.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE5',
'value': 1045,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 5.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE6',
'value': 1046,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 6.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE7',
'value': 1047,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 7.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE8',
'value': 1048,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 8.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE9',
'value': 1049,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 9.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE10',
'value': 1050,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 10.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE11',
'value': 1051,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 11.
''',
},
},
{
'name': 'FRONTCONNECTOR_MODULE12',
'value': 1052,
'documentation': {
'description': '''
The switch module waits until it receives a trigger on the front
connector module 12.
''',
},
},
],
},
'TriggerInputPolarity': {
'values': [
{
'name': 'NISWITCH_VAL_RISING_EDGE',
'value': 0,
'documentation': {
'description': 'The trigger occurs on the rising edge of the signal.',
},
},
{
'name': 'NISWITCH_VAL_FALLING_EDGE',
'value': 1,
'documentation': {
'description': 'The trigger occurs on the falling edge of the signal.',
},
},
],
},
'TriggerMode': {
'values': [
{
'name': 'SINGLE',
'value': 0,
'documentation': {
'description': '',
},
},
{
'name': 'MASTER',
'value': 1,
'documentation': {
'description': '',
},
},
{
'name': 'SLAVE',
'value': 2,
'documentation': {
'description': '',
},
},
],
},
}
| 26.247573
| 199
| 0.525831
| 3,040
| 32,442
| 5.543092
| 0.081579
| 0.189425
| 0.163432
| 0.188001
| 0.870215
| 0.857635
| 0.857635
| 0.836212
| 0.834431
| 0.824877
| 0
| 0.028599
| 0.35115
| 32,442
| 1,235
| 200
| 26.268826
| 0.771924
| 0.006535
| 0
| 0.580619
| 1
| 0.008958
| 0.579958
| 0.054681
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.000814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c893f3a54ff9073ffc97668cb717a00bdfefa59
| 119
|
py
|
Python
|
tps/snake-2/solutions/snapshot.py
|
boisgera/python-fr
|
583f7eae7baa949461464e9b53a415be16c1dd3e
|
[
"CC-BY-4.0"
] | null | null | null |
tps/snake-2/solutions/snapshot.py
|
boisgera/python-fr
|
583f7eae7baa949461464e9b53a415be16c1dd3e
|
[
"CC-BY-4.0"
] | null | null | null |
tps/snake-2/solutions/snapshot.py
|
boisgera/python-fr
|
583f7eae7baa949461464e9b53a415be16c1dd3e
|
[
"CC-BY-4.0"
] | null | null | null |
{'snake': [[6, 4], [6, 5], [5, 5], [4, 5], [4, 6], [4, 7], [4, 8]], 'direction': [0, 1], 'fruit': [28, 15], 'score': 4}
| 119
| 119
| 0.378151
| 23
| 119
| 1.956522
| 0.565217
| 0.088889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216495
| 0.184874
| 119
| 1
| 119
| 119
| 0.247423
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c8a72da21eee1cd6134b30dabc303928db26000
| 347
|
py
|
Python
|
tests/core/node_height.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 103
|
2021-05-30T02:09:28.000Z
|
2022-03-17T20:45:49.000Z
|
tests/core/node_height.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 107
|
2021-05-23T02:20:26.000Z
|
2022-03-29T17:07:43.000Z
|
tests/core/node_height.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 50
|
2021-05-23T02:19:06.000Z
|
2022-01-24T07:32:50.000Z
|
def node_height_at_least(node, h):
if node.full_node.blockchain.get_peak() is not None:
return node.full_node.blockchain.get_peak().height >= h
return False
def node_height_exactly(node, h):
if node.full_node.blockchain.get_peak() is not None:
return node.full_node.blockchain.get_peak().height == h
return False
| 31.545455
| 63
| 0.717579
| 55
| 347
| 4.290909
| 0.309091
| 0.135593
| 0.20339
| 0.372881
| 0.830508
| 0.830508
| 0.830508
| 0.830508
| 0.830508
| 0.830508
| 0
| 0
| 0.178674
| 347
| 10
| 64
| 34.7
| 0.82807
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
b1bac973c0fd36bd3cf53dd16c86d781532cf13a
| 21,524
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spaceone/api/identity/v1/domain_owner.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='spaceone/api/identity/v1/domain_owner.proto',
package='spaceone.api.identity.v1',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n+spaceone/api/identity/v1/domain_owner.proto\x12\x18spaceone.api.identity.v1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/api/annotations.proto\"\x8b\x01\n\x11\x43reateDomainOwner\x12\x10\n\x08owner_id\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05\x65mail\x18\x04 \x01(\t\x12\x10\n\x08language\x18\x07 \x01(\t\x12\x10\n\x08timezone\x18\x08 \x01(\t\x12\x11\n\tdomain_id\x18\n \x01(\t\"\x8b\x01\n\x11UpdateDomainOwner\x12\x10\n\x08owner_id\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05\x65mail\x18\x04 \x01(\t\x12\x10\n\x08language\x18\x07 \x01(\t\x12\x10\n\x08timezone\x18\x08 \x01(\t\x12\x11\n\tdomain_id\x18\n \x01(\t\"9\n\x12\x44omainOwnerRequest\x12\x11\n\tdomain_id\x18\x01 \x01(\t\x12\x10\n\x08owner_id\x18\x02 \x01(\t\"J\n\x15GetDomainOwnerRequest\x12\x11\n\tdomain_id\x18\x01 \x01(\t\x12\x10\n\x08owner_id\x18\x02 \x01(\t\x12\x0c\n\x04only\x18\x03 \x03(\t\"\xa5\x01\n\x0f\x44omainOwnerInfo\x12\x10\n\x08owner_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05\x65mail\x18\x03 \x01(\t\x12\x10\n\x08language\x18\x07 \x01(\t\x12\x10\n\x08timezone\x18\x08 \x01(\t\x12\x18\n\x10last_accessed_at\x18\x0b \x01(\t\x12\x12\n\ncreated_at\x18\x0c \x01(\t\x12\x11\n\tdomain_id\x18\r \x01(\t2\xc4\x04\n\x0b\x44omainOwner\x12\x8f\x01\n\x06\x63reate\x12+.spaceone.api.identity.v1.CreateDomainOwner\x1a).spaceone.api.identity.v1.DomainOwnerInfo\"-\x82\xd3\xe4\x93\x02\'\"%/identity/v1/domain/{domain_id}/owner\x12\x90\x01\n\x06update\x12+.spaceone.api.identity.v1.UpdateDomainOwner\x1a).spaceone.api.identity.v1.DomainOwnerInfo\".\x82\xd3\xe4\x93\x02(\x1a& /identity/v1/domain/{domain_id}/owner\x12}\n\x06\x64\x65lete\x12,.spaceone.api.identity.v1.DomainOwnerRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/identity/v1/domain/{domain_id}/owner\x12\x90\x01\n\x03get\x12/.spaceone.api.identity.v1.GetDomainOwnerRequest\x1a).spaceone.api.identity.v1.DomainOwnerInfo\"-\x82\xd3\xe4\x93\x02\'\x12%/identity/v1/domain/{domain_id}/ownerb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_CREATEDOMAINOWNER = _descriptor.Descriptor(
name='CreateDomainOwner',
full_name='spaceone.api.identity.v1.CreateDomainOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='owner_id', full_name='spaceone.api.identity.v1.CreateDomainOwner.owner_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='spaceone.api.identity.v1.CreateDomainOwner.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.identity.v1.CreateDomainOwner.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='spaceone.api.identity.v1.CreateDomainOwner.email', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='spaceone.api.identity.v1.CreateDomainOwner.language', index=4,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='spaceone.api.identity.v1.CreateDomainOwner.timezone', index=5,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.identity.v1.CreateDomainOwner.domain_id', index=6,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=133,
serialized_end=272,
)
_UPDATEDOMAINOWNER = _descriptor.Descriptor(
name='UpdateDomainOwner',
full_name='spaceone.api.identity.v1.UpdateDomainOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='owner_id', full_name='spaceone.api.identity.v1.UpdateDomainOwner.owner_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='spaceone.api.identity.v1.UpdateDomainOwner.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.identity.v1.UpdateDomainOwner.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='spaceone.api.identity.v1.UpdateDomainOwner.email', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='spaceone.api.identity.v1.UpdateDomainOwner.language', index=4,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='spaceone.api.identity.v1.UpdateDomainOwner.timezone', index=5,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.identity.v1.UpdateDomainOwner.domain_id', index=6,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=275,
serialized_end=414,
)
_DOMAINOWNERREQUEST = _descriptor.Descriptor(
name='DomainOwnerRequest',
full_name='spaceone.api.identity.v1.DomainOwnerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.identity.v1.DomainOwnerRequest.domain_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='owner_id', full_name='spaceone.api.identity.v1.DomainOwnerRequest.owner_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=416,
serialized_end=473,
)
_GETDOMAINOWNERREQUEST = _descriptor.Descriptor(
name='GetDomainOwnerRequest',
full_name='spaceone.api.identity.v1.GetDomainOwnerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.identity.v1.GetDomainOwnerRequest.domain_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='owner_id', full_name='spaceone.api.identity.v1.GetDomainOwnerRequest.owner_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='only', full_name='spaceone.api.identity.v1.GetDomainOwnerRequest.only', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=475,
serialized_end=549,
)
_DOMAINOWNERINFO = _descriptor.Descriptor(
name='DomainOwnerInfo',
full_name='spaceone.api.identity.v1.DomainOwnerInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='owner_id', full_name='spaceone.api.identity.v1.DomainOwnerInfo.owner_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.identity.v1.DomainOwnerInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='spaceone.api.identity.v1.DomainOwnerInfo.email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='spaceone.api.identity.v1.DomainOwnerInfo.language', index=3,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='spaceone.api.identity.v1.DomainOwnerInfo.timezone', index=4,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_accessed_at', full_name='spaceone.api.identity.v1.DomainOwnerInfo.last_accessed_at', index=5,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='spaceone.api.identity.v1.DomainOwnerInfo.created_at', index=6,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.identity.v1.DomainOwnerInfo.domain_id', index=7,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=552,
serialized_end=717,
)
DESCRIPTOR.message_types_by_name['CreateDomainOwner'] = _CREATEDOMAINOWNER
DESCRIPTOR.message_types_by_name['UpdateDomainOwner'] = _UPDATEDOMAINOWNER
DESCRIPTOR.message_types_by_name['DomainOwnerRequest'] = _DOMAINOWNERREQUEST
DESCRIPTOR.message_types_by_name['GetDomainOwnerRequest'] = _GETDOMAINOWNERREQUEST
DESCRIPTOR.message_types_by_name['DomainOwnerInfo'] = _DOMAINOWNERINFO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateDomainOwner = _reflection.GeneratedProtocolMessageType('CreateDomainOwner', (_message.Message,), {
'DESCRIPTOR' : _CREATEDOMAINOWNER,
'__module__' : 'spaceone.api.identity.v1.domain_owner_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.identity.v1.CreateDomainOwner)
})
_sym_db.RegisterMessage(CreateDomainOwner)
UpdateDomainOwner = _reflection.GeneratedProtocolMessageType('UpdateDomainOwner', (_message.Message,), {
'DESCRIPTOR' : _UPDATEDOMAINOWNER,
'__module__' : 'spaceone.api.identity.v1.domain_owner_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.identity.v1.UpdateDomainOwner)
})
_sym_db.RegisterMessage(UpdateDomainOwner)
DomainOwnerRequest = _reflection.GeneratedProtocolMessageType('DomainOwnerRequest', (_message.Message,), {
'DESCRIPTOR' : _DOMAINOWNERREQUEST,
'__module__' : 'spaceone.api.identity.v1.domain_owner_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.identity.v1.DomainOwnerRequest)
})
_sym_db.RegisterMessage(DomainOwnerRequest)
GetDomainOwnerRequest = _reflection.GeneratedProtocolMessageType('GetDomainOwnerRequest', (_message.Message,), {
'DESCRIPTOR' : _GETDOMAINOWNERREQUEST,
'__module__' : 'spaceone.api.identity.v1.domain_owner_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.identity.v1.GetDomainOwnerRequest)
})
_sym_db.RegisterMessage(GetDomainOwnerRequest)
DomainOwnerInfo = _reflection.GeneratedProtocolMessageType('DomainOwnerInfo', (_message.Message,), {
'DESCRIPTOR' : _DOMAINOWNERINFO,
'__module__' : 'spaceone.api.identity.v1.domain_owner_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.identity.v1.DomainOwnerInfo)
})
_sym_db.RegisterMessage(DomainOwnerInfo)
_DOMAINOWNER = _descriptor.ServiceDescriptor(
name='DomainOwner',
full_name='spaceone.api.identity.v1.DomainOwner',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=720,
serialized_end=1300,
methods=[
_descriptor.MethodDescriptor(
name='create',
full_name='spaceone.api.identity.v1.DomainOwner.create',
index=0,
containing_service=None,
input_type=_CREATEDOMAINOWNER,
output_type=_DOMAINOWNERINFO,
serialized_options=b'\202\323\344\223\002\'\"%/identity/v1/domain/{domain_id}/owner',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='update',
full_name='spaceone.api.identity.v1.DomainOwner.update',
index=1,
containing_service=None,
input_type=_UPDATEDOMAINOWNER,
output_type=_DOMAINOWNERINFO,
serialized_options=b'\202\323\344\223\002(\032& /identity/v1/domain/{domain_id}/owner',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='delete',
full_name='spaceone.api.identity.v1.DomainOwner.delete',
index=2,
containing_service=None,
input_type=_DOMAINOWNERREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002\'*%/identity/v1/domain/{domain_id}/owner',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='get',
full_name='spaceone.api.identity.v1.DomainOwner.get',
index=3,
containing_service=None,
input_type=_GETDOMAINOWNERREQUEST,
output_type=_DOMAINOWNERINFO,
serialized_options=b'\202\323\344\223\002\'\022%/identity/v1/domain/{domain_id}/owner',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_DOMAINOWNER)
DESCRIPTOR.services_by_name['DomainOwner'] = _DOMAINOWNER
# @@protoc_insertion_point(module_scope)
| 48.477477
| 2,095
| 0.75748
| 2,801
| 21,524
| 5.533381
| 0.072831
| 0.04439
| 0.077231
| 0.078586
| 0.817988
| 0.782438
| 0.774953
| 0.713336
| 0.693141
| 0.693141
| 0
| 0.041108
| 0.112804
| 21,524
| 443
| 2,096
| 48.586907
| 0.770528
| 0.02992
| 0
| 0.690594
| 1
| 0.007426
| 0.231967
| 0.19631
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.007426
| 0.014851
| 0
| 0.014851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
593e34613beeae06da0ca3f263d08d7c39ed2749
| 12,265
|
py
|
Python
|
lang/python/github/com/metaprov/modelaapi/services/notebookrun/v1/notebookrun_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 5
|
2022-02-18T03:40:10.000Z
|
2022-03-01T16:11:24.000Z
|
lang/python/github/com/metaprov/modelaapi/services/notebookrun/v1/notebookrun_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 1
|
2022-01-07T19:59:25.000Z
|
2022-02-04T01:21:14.000Z
|
lang/python/github/com/metaprov/modelaapi/services/notebookrun/v1/notebookrun_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 1
|
2022-03-25T10:21:43.000Z
|
2022-03-25T10:21:43.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from github.com.metaprov.modelaapi.services.notebookrun.v1 import notebookrun_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2
class NotebookRunServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListNotebookRuns = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/ListNotebookRuns',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsResponse.FromString,
)
self.CreateNotebookRun = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/CreateNotebookRun',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunResponse.FromString,
)
self.GetNotebookRun = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/GetNotebookRun',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunResponse.FromString,
)
self.UpdateNotebookRun = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/UpdateNotebookRun',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunResponse.FromString,
)
self.DeleteNotebookRun = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/DeleteNotebookRun',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunResponse.FromString,
)
class NotebookRunServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def ListNotebookRuns(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateNotebookRun(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetNotebookRun(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateNotebookRun(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteNotebookRun(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_NotebookRunServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ListNotebookRuns': grpc.unary_unary_rpc_method_handler(
servicer.ListNotebookRuns,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsResponse.SerializeToString,
),
'CreateNotebookRun': grpc.unary_unary_rpc_method_handler(
servicer.CreateNotebookRun,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunResponse.SerializeToString,
),
'GetNotebookRun': grpc.unary_unary_rpc_method_handler(
servicer.GetNotebookRun,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunResponse.SerializeToString,
),
'UpdateNotebookRun': grpc.unary_unary_rpc_method_handler(
servicer.UpdateNotebookRun,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunResponse.SerializeToString,
),
'DeleteNotebookRun': grpc.unary_unary_rpc_method_handler(
servicer.DeleteNotebookRun,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class NotebookRunService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def ListNotebookRuns(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/ListNotebookRuns',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.ListNotebookRunsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateNotebookRun(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/CreateNotebookRun',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.CreateNotebookRunResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetNotebookRun(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/GetNotebookRun',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.GetNotebookRunResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateNotebookRun(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/UpdateNotebookRun',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.UpdateNotebookRunResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteNotebookRun(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.notebookrun.v1.NotebookRunService/DeleteNotebookRun',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_notebookrun_dot_v1_dot_notebookrun__pb2.DeleteNotebookRunResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 61.633166
| 183
| 0.754015
| 1,233
| 12,265
| 7.000811
| 0.089213
| 0.100556
| 0.043095
| 0.053869
| 0.882762
| 0.882762
| 0.882762
| 0.855306
| 0.84152
| 0.809546
| 0
| 0.007525
| 0.187362
| 12,265
| 198
| 184
| 61.944444
| 0.858533
| 0.055932
| 0
| 0.493827
| 1
| 0
| 0.11172
| 0.084681
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.012346
| 0.030864
| 0.135802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
594a5d266064af3d4b1a16cc1541ae2f0f7ee9ac
| 12,188
|
py
|
Python
|
data process copy.py
|
KelvinLim96/Machine-Learning-and-Data-Analytics-based-Operation-for-Smart-Grids
|
2410840e06f6e590a45d463457a4ee9b0497c09a
|
[
"MIT"
] | null | null | null |
data process copy.py
|
KelvinLim96/Machine-Learning-and-Data-Analytics-based-Operation-for-Smart-Grids
|
2410840e06f6e590a45d463457a4ee9b0497c09a
|
[
"MIT"
] | null | null | null |
data process copy.py
|
KelvinLim96/Machine-Learning-and-Data-Analytics-based-Operation-for-Smart-Grids
|
2410840e06f6e590a45d463457a4ee9b0497c09a
|
[
"MIT"
] | 1
|
2021-06-12T22:14:47.000Z
|
2021-06-12T22:14:47.000Z
|
import pandas as pd
import numpy as np
import xlrd
import os
import logging
import sys
import xlwt
from openpyxl import load_workbook
book = xlwt.Workbook()
sheet2 = book.add_sheet('sheet2',cell_overwrite_ok=True)
data = pd.read_excel('C:\\Users\\65837\\Desktop\\original.xlsx')
print(float(data.iloc[2,[2]]))
#print(float(data.iloc[2,[7]]))
i=0
time = 0
count=0
n=0
for row in range(2,8786,720):
sheet2.write(1,2,'NEC_P')
while time < 24:
for column in range(7,73,13):
if column == 7:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>10):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 20:
while(float(data.iloc[row+time-1+n,[column]])<10 or float(data.iloc[row+time-1+n,[column]])>40):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 46:
while(float(data.iloc[row+time-1+n,[column]])<100 or float(data.iloc[row+time-1+n,[column]])>800):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 59:
while(float(data.iloc[row+time-1+n,[column]])<35 or float(data.iloc[row+time-1+n,[column]])>200):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 72:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>120):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,2,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,3,'NEC_Q')
while time < 24:
for column in range(6,72,13):
if column == 6:
while(float(data.iloc[row+time-1+n,[column]])<-5 or float(data.iloc[row+time-1+n,[column]])>10):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 19:
while(float(data.iloc[row+time-1+n,[column]])<-10 or float(data.iloc[row+time-1+n,[column]])>20):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 32:
while(float(data.iloc[row+time-1+n,[column]])<-10 or float(data.iloc[row+time-1+n,[column]])>20):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 45:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>300):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 58:
while(float(data.iloc[row+time-1+n,[column]])<1 or float(data.iloc[row+time-1+n,[column]])>120):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 71:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>120):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,3,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,4,'CANTEEN_2_P')
while time < 24:
for column in range(85,99,13):
if column == 85:
while(float(data.iloc[row+time-1+n,[column]])<-5 or float(data.iloc[row+time-1+n,[column]])>500):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 98:
while(float(data.iloc[row+time-1+n,[column]])<-10 or float(data.iloc[row+time-1+n,[column]])>300):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,4,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,5,'CANTEEN_2_Q')
while time < 24:
for column in range(84,98,13):
if column == 84:
while(float(data.iloc[row+time-1+n,[column]])<-5 or float(data.iloc[row+time-1+n,[column]])>100):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 97:
while(float(data.iloc[row+time-1+n,[column]])<-10 or float(data.iloc[row+time-1+n,[column]])>100):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,5,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,6,'SPMS_P')
while time < 24:
for column in range(111,177,13):
if column == 111:
while(float(data.iloc[row+time-1+n,[column]])<300 or float(data.iloc[row+time-1+n,[column]])>600):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 124:
while(float(data.iloc[row+time-1+n,[column]])<200 or float(data.iloc[row+time-1+n,[column]])>600):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 137:
while(float(data.iloc[row+time-1+n,[column]])<400 or float(data.iloc[row+time-1+n,[column]])>800):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 150:
while(float(data.iloc[row+time-1+n,[column]])<300 or float(data.iloc[row+time-1+n,[column]])>800):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 163:
while(float(data.iloc[row+time-1+n,[column]])<150 or float(data.iloc[row+time-1+n,[column]])>400):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 176:
while(float(data.iloc[row+time-1+n,[column]])<300 or float(data.iloc[row+time-1+n,[column]])>700):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,6,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,7,'SPMS_Q')
while time < 24:
for column in range(110,176,13):
if column == 110:
while(float(data.iloc[row+time-1+n,[column]])<200 or float(data.iloc[row+time-1+n,[column]])>400):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 123:
while(float(data.iloc[row+time-1+n,[column]])<100 or float(data.iloc[row+time-1+n,[column]])>250):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 136:
while(float(data.iloc[row+time-1+n,[column]])<100 or float(data.iloc[row+time-1+n,[column]])>200):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 149:
while(float(data.iloc[row+time-1+n,[column]])<150 or float(data.iloc[row+time-1+n,[column]])>300):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 162:
while(float(data.iloc[row+time-1+n,[column]])<50 or float(data.iloc[row+time-1+n,[column]])>150):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 175:
while(float(data.iloc[row+time-1+n,[column]])<60 or float(data.iloc[row+time-1+n,[column]])>200):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,7,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,8,'RTP_P')
while time < 24:
for column in range(189,229,13):
if column == 189:
while(float(data.iloc[row+time-1+n,[column]])<10 or float(data.iloc[row+time-1+n,[column]])>50):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 202:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>30):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 215:
while(float(data.iloc[row+time-1+n,[column]])<60 or float(data.iloc[row+time-1+n,[column]])>150):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 228:
while(float(data.iloc[row+time-1+n,[column]])<200 or float(data.iloc[row+time-1+n,[column]])>700):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,8,i)
time = time+1
i = 0
time = 0
for row in range(2,8786,720):
sheet2.write(1,9,'RTP_Q')
while time < 24:
for column in range(188,228,13):
if column == 188:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>10):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 201:
while(float(data.iloc[row+time-1+n,[column]])<0 or float(data.iloc[row+time-1+n,[column]])>10):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 214:
while(float(data.iloc[row+time-1+n,[column]])<-20 or float(data.iloc[row+time-1+n,[column]])>20):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
if column == 227:
while(float(data.iloc[row+time-1+n,[column]])<100 or float(data.iloc[row+time-1+n,[column]])>350):
n=n+1
i = i+float(data.iloc[row+time-1+n,[column]])
n=0
sheet2.write((int(row/720))*24+time+2,9,i)
time = time+1
i = 0
time = 0
book.save('sheet2.xls')
'''sheet2.write(0,1,str('NEC EMS1_IN1'))
sheet2.write(0,2,str('NEC EMS1_IN2'))
sheet2.write(0,3,str('NEC EMS1_IN3'))
sheet2.write(0,4,str('NEC MSB1_IN1'))
sheet2.write(0,5,str('NEC MSB1-IN2'))
sheet2.write(0,6,str('NEC MSB1-IN3'))
sheet2.write(0,7,str('CANT2 MSB1_IN1'))
sheet2.write(0,8,str('CANT2 MSB1_IN2'))
sheet2.write(0,9,str('SPMS EMSB1_IN1'))
sheet2.write(0,10,str('SPMS MSB1_IN1'))
sheet2.write(0,11,str('SPMS MSB1_IN2'))
sheet2.write(0,12,str('SPMS MSB1_IN3'))
sheet2.write(0,11,str('SPMS MSB2_IN1'))
sheet2.write(0,12,str('SPMS MSB2_IN2'))
sheet2.write(0,13,str('SPMS MSB1_IN2'))
sheet2.write(0,14,str('SPMS MSB1_IN3'))
sheet2.write(0,15,str('SPMS MSB1_IN2'))
sheet2.write(0,16,str('SPMS MSB1_IN3'))'''
| 40.899329
| 115
| 0.469396
| 1,865
| 12,188
| 3.050938
| 0.074531
| 0.099297
| 0.244464
| 0.295255
| 0.87065
| 0.846046
| 0.837961
| 0.815114
| 0.772759
| 0.742179
| 0
| 0.101197
| 0.355432
| 12,188
| 297
| 116
| 41.037037
| 0.623091
| 0.002461
| 0
| 0.605469
| 0
| 0
| 0.009933
| 0.003612
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0.003906
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3ccdf7a2fc28591521deaa084cb3a6c6d4d809f8
| 125
|
py
|
Python
|
torch_text_similarity/data/__init__.py
|
flydragon2018/torchtextsimilarity
|
3707ba6162ed93de422fe6fe4157f60be6159d72
|
[
"MIT"
] | null | null | null |
torch_text_similarity/data/__init__.py
|
flydragon2018/torchtextsimilarity
|
3707ba6162ed93de422fe6fe4157f60be6159d72
|
[
"MIT"
] | null | null | null |
torch_text_similarity/data/__init__.py
|
flydragon2018/torchtextsimilarity
|
3707ba6162ed93de422fe6fe4157f60be6159d72
|
[
"MIT"
] | null | null | null |
from .dataset import train_sts_b_dataset, dev_sts_b_dataset, test_sts_b_dataset, train_eval_sts_a_dataset, test_sts_a_dataset
| 125
| 125
| 0.904
| 24
| 125
| 4.041667
| 0.416667
| 0.123711
| 0.340206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056
| 125
| 1
| 125
| 125
| 0.822034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3ceb7a1e1220b644c14878f372e9ab28f89870fe
| 231
|
py
|
Python
|
notepadqq_api/__init__.py
|
notepadqq/NotepadqqApi_Python
|
b494ba948dc1cb3a962a1b52b9e989740fabc500
|
[
"MIT"
] | 1
|
2019-04-07T00:07:23.000Z
|
2019-04-07T00:07:23.000Z
|
notepadqq_api/__init__.py
|
notepadqq/NotepadqqApi_Python
|
b494ba948dc1cb3a962a1b52b9e989740fabc500
|
[
"MIT"
] | null | null | null |
notepadqq_api/__init__.py
|
notepadqq/NotepadqqApi_Python
|
b494ba948dc1cb3a962a1b52b9e989740fabc500
|
[
"MIT"
] | null | null | null |
"""
Library for writing Notepadqq extensions.
"""
from notepadqq_api.notepadqq_message_error import NotepadqqMessageError
from notepadqq_api.notepadqq_api import NotepadqqApi
__all__ = ["notepadqq_api", "notepadqq_message_error"]
| 28.875
| 71
| 0.839827
| 26
| 231
| 7
| 0.5
| 0.263736
| 0.346154
| 0.274725
| 0.362637
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08658
| 231
| 8
| 72
| 28.875
| 0.862559
| 0.177489
| 0
| 0
| 0
| 0
| 0.196721
| 0.125683
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a72218879d2b4b7e27fc71cce59434ec30f4d340
| 143
|
py
|
Python
|
test1.py
|
tim-aarons/pynetA
|
8dca88d73ffd86c84f45ade9e00b3c2b9e048f87
|
[
"Apache-2.0"
] | null | null | null |
test1.py
|
tim-aarons/pynetA
|
8dca88d73ffd86c84f45ade9e00b3c2b9e048f87
|
[
"Apache-2.0"
] | null | null | null |
test1.py
|
tim-aarons/pynetA
|
8dca88d73ffd86c84f45ade9e00b3c2b9e048f87
|
[
"Apache-2.0"
] | null | null | null |
print("totally new file pynetA - File test 1")
print("totally new file pynetA - File test 1")
print("totally new file pynetA - File test 1")
| 23.833333
| 46
| 0.713287
| 24
| 143
| 4.25
| 0.291667
| 0.352941
| 0.441176
| 0.558824
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.025641
| 0.181818
| 143
| 5
| 47
| 28.6
| 0.846154
| 0
| 0
| 1
| 0
| 0
| 0.78169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 15
|
59aaee0d4feb2683bf7e3d4f957e8b95f85f8d00
| 790
|
py
|
Python
|
cli_stats/database/mongo_db.py
|
timoudas/premier_league_api
|
2b850466ed1c910ee901c68e660706d55f53df61
|
[
"MIT"
] | 2
|
2020-02-13T12:30:47.000Z
|
2020-03-21T16:32:47.000Z
|
cli_stats/database/mongo_db.py
|
timoudas/premier_league_api
|
2b850466ed1c910ee901c68e660706d55f53df61
|
[
"MIT"
] | 2
|
2021-04-06T18:27:57.000Z
|
2021-06-02T03:51:47.000Z
|
cli_stats/database/mongo_db.py
|
timoudas/premier_league_api
|
2b850466ed1c910ee901c68e660706d55f53df61
|
[
"MIT"
] | null | null | null |
from .mongo_db_league import DBLeague
from .mongo_db_league import executePushFixtureLeague
from .mongo_db_league import executePushFixturePlayerStatsLeague
from .mongo_db_league import executePushLeagueStandingsLeague
from .mongo_db_league import executePushPlayerLeague
from .mongo_db_league import executePushSchedule
from .mongo_db_league import executePushTeamLeague
from .mongo_db_league import executePushTeamSquadsLeague
from .mongo_db_year import DB
from .mongo_db_year import executePushFixture
from .mongo_db_year import executePushFixturePlayerStats
from .mongo_db_year import executePushLeagueStandings
from .mongo_db_year import executePushPlayer
from .mongo_db_year import executePushTeam
from .mongo_db_year import executePushTeamSquads
if __name__ == '__main__':
pass
| 41.578947
| 64
| 0.886076
| 94
| 790
| 7.042553
| 0.255319
| 0.203927
| 0.249245
| 0.205438
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 790
| 19
| 65
| 41.578947
| 0.919444
| 0
| 0
| 0
| 0
| 0
| 0.010114
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.058824
| 0.882353
| 0
| 0.882353
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ab818a845756515501437b5f89cdc71b92751081
| 3,494
|
py
|
Python
|
tests/test_fredkin_self_replicating_ca.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 35
|
2018-12-07T14:11:29.000Z
|
2022-03-17T23:47:21.000Z
|
tests/test_fredkin_self_replicating_ca.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 2
|
2020-03-15T06:45:39.000Z
|
2020-04-15T23:50:13.000Z
|
tests/test_fredkin_self_replicating_ca.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 6
|
2019-10-18T08:47:32.000Z
|
2022-03-02T10:17:12.000Z
|
import netomaton as ntm
from .rule_test import *
class TestFredkinSelfReplicatingCA(RuleTest):
def test_von_neumann(self):
network = ntm.topology.cellular_automaton2d(rows=60, cols=60, r=1, neighbourhood='von Neumann')
initial_conditions = ntm.init_simple2d(60, 60)
# the letter "E"
initial_conditions[1709] = 1
initial_conditions[1710] = 1
initial_conditions[1711] = 1
initial_conditions[1769] = 1
initial_conditions[1829] = 1
initial_conditions[1830] = 1
initial_conditions[1831] = 1
initial_conditions[1889] = 1
initial_conditions[1949] = 1
initial_conditions[1950] = 1
initial_conditions[1951] = 1
def activity_rule(ctx):
return (np.sum(ctx.neighbourhood_activities) - ctx.current_activity) % 2
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=network, timesteps=20,
activity_rule=activity_rule)
activities_list = ntm.get_activities_over_time_as_list(trajectory)
expected = self._convert_to_list_of_lists("fredkin_self_replicating_ca_vonneumann.ca")
np.testing.assert_equal(expected, activities_list)
def test_moore(self):
network = ntm.topology.cellular_automaton2d(rows=60, cols=60, r=1, neighbourhood='Moore')
initial_conditions = ntm.init_simple2d(60, 60)
# the letter "E"
initial_conditions[1709] = 1
initial_conditions[1710] = 1
initial_conditions[1711] = 1
initial_conditions[1769] = 1
initial_conditions[1829] = 1
initial_conditions[1830] = 1
initial_conditions[1831] = 1
initial_conditions[1889] = 1
initial_conditions[1949] = 1
initial_conditions[1950] = 1
initial_conditions[1951] = 1
def activity_rule(ctx):
return (np.sum(ctx.neighbourhood_activities) - ctx.current_activity) % 2
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=network, timesteps=20,
activity_rule=activity_rule)
activities_list = ntm.get_activities_over_time_as_list(trajectory)
expected = self._convert_to_list_of_lists("fredkin_self_replicating_ca_moore.ca")
np.testing.assert_equal(expected, activities_list)
def test_multicolor(self):
network = ntm.topology.cellular_automaton2d(rows=60, cols=60, r=1, neighbourhood='von Neumann')
initial_conditions = ntm.init_simple2d(60, 60)
# the letter "E"
initial_conditions[1709] = 0
initial_conditions[1710] = 1
initial_conditions[1711] = 2
initial_conditions[1769] = 3
initial_conditions[1829] = 4
initial_conditions[1830] = 5
initial_conditions[1831] = 6
initial_conditions[1889] = 7
initial_conditions[1949] = 8
initial_conditions[1950] = 9
initial_conditions[1951] = 10
def activity_rule(ctx):
return (np.sum(ctx.neighbourhood_activities) - ctx.current_activity) % 11
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=network, timesteps=23,
activity_rule=activity_rule)
activities_list = ntm.get_activities_over_time_as_list(trajectory)
expected = self._convert_to_list_of_lists("fredkin_self_replicating_ca_multicolor.ca")
np.testing.assert_equal(expected, activities_list)
| 39.258427
| 103
| 0.673726
| 404
| 3,494
| 5.532178
| 0.205446
| 0.319463
| 0.169128
| 0.02953
| 0.855034
| 0.855034
| 0.855034
| 0.835794
| 0.816107
| 0.816107
| 0
| 0.078868
| 0.241557
| 3,494
| 89
| 104
| 39.258427
| 0.764528
| 0.012593
| 0
| 0.666667
| 0
| 0
| 0.042066
| 0.034233
| 0
| 0
| 0
| 0
| 0.045455
| 1
| 0.090909
| false
| 0
| 0.030303
| 0.045455
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9f954124efb224a92f24a620e60e2f60efae5d1
| 146
|
py
|
Python
|
lcarmq/daglca/__init__.py
|
GianCarloMilanese/lcarmq
|
93b12b1edb6e0cddcb8d30a56d9601eabe9f7320
|
[
"MIT"
] | null | null | null |
lcarmq/daglca/__init__.py
|
GianCarloMilanese/lcarmq
|
93b12b1edb6e0cddcb8d30a56d9601eabe9f7320
|
[
"MIT"
] | null | null | null |
lcarmq/daglca/__init__.py
|
GianCarloMilanese/lcarmq
|
93b12b1edb6e0cddcb8d30a56d9601eabe9f7320
|
[
"MIT"
] | null | null | null |
from lcarmq.daglca.abs_daglca import AbstractDagLca
from lcarmq.daglca.akbln1989 import Akbln1989
from lcarmq.daglca.bfcpss2005 import Bfcpss2005
| 36.5
| 51
| 0.876712
| 19
| 146
| 6.684211
| 0.421053
| 0.23622
| 0.377953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 0.082192
| 146
| 3
| 52
| 48.666667
| 0.828358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0588c6d50a1a785c1dd74c27a124c8d0b4002532
| 8,902
|
py
|
Python
|
Codes/Functions_GJI_2017.py
|
lviens/2017_GJI
|
635790b875d116b31fe646394669118c455ef10b
|
[
"MIT"
] | 2
|
2019-05-25T15:06:04.000Z
|
2021-01-05T22:09:18.000Z
|
Codes/Functions_GJI_2017.py
|
lviens/2017_GJI
|
635790b875d116b31fe646394669118c455ef10b
|
[
"MIT"
] | null | null | null |
Codes/Functions_GJI_2017.py
|
lviens/2017_GJI
|
635790b875d116b31fe646394669118c455ef10b
|
[
"MIT"
] | null | null | null |
import numpy as np
import obspy.signal as obssig
def one_bit(dat_s, dat_r, delta, tpm, std_control, count):
"""Compute cross-correlation of 1-bit data.
Parameters
----------
Inputs
dat_s: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
dat_r: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
delta: float
Sampling rate in Hz (4 Hz in Viens et al. (2017, GJI))
tpm: int
To return the cross-correlation between -"tpm" seconds and +"tmp" seconds (tpm = 1500 s in Viens et al. (2017, GJI)).
count: int
When looping over many files, count the number of cross-correlations (should be equal to 1 for the first cross-correlation).
std_control:
If the virtual source or receiver records have spikes larger than std_control times the standard deviation of the signal, the cross-correlation is not computed (std_control = 10 in Viens et al. (2017, GJI)).
-------
Outputs
corr: numpy.ndarray
Cross-correlated waveform of 1-bit data in time domain between -"tpm" and +"tpm" seconds.
nodata: int
If the virtual source or receiver records have spikes larger than "std_control" times the standard deviation of the signal, "nodata" is equal to 1
count: int
Incremented "count" variable.
"""
n = len(dat_s)
std_s = np.std(dat_s)
std_r = np.std(dat_r)
mx_s = max(np.absolute(dat_s))
mx_r = max(np.absolute(dat_r))
if mx_s< std_s*std_control and mx_r< std_r*std_control:
dat_r[dat_r[:] >0 ]=1
dat_r[dat_r[:] <0 ]=-1
dat_s[dat_s[:] >0 ]=1
dat_s[dat_s[:] <0 ]=-1
fft_s = np.fft.fft(dat_s, n*5)
fft_r = np.fft.fft(dat_r, n*5)
cc_t1 = np.real(np.fft.ifft( (fft_r * np.conj(fft_s))))
corr2 = np.concatenate((cc_t1[int(len(cc_t1)/2):], cc_t1[:int(len(cc_t1)/2)]))
corr = corr2[int(len(corr2)/2)-tpm*delta:int(len(corr2)/2)+tpm*delta]
nodata = 0
count +=1
else:
corr = 0
nodata = 1
return corr, nodata, count
def cross_corr(dat_s, dat_r, delta, tpm, std_control, count):
"""Compute cross-correlation of raw data
Parameters
----------
Inputs
dat_s: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
dat_r: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
delta: float
Sampling rate in Hz (4 Hz in Viens et al. (2017, GJI))
tpm: int
To return the cross-correlation between -"tpm" seconds and +"tmp" seconds (tpm = 1500 s in Viens et al. (2017, GJI)).
count: int
When looping over many files, count the number of cross-correlations (should be equal to 1 for the first cross-correlation).
std_control: int
If the virtual source or receiver records have spikes larger than std_control times the standard deviation of the signal, the cross-correlation is not computed (std_control = 10 in Viens et al. (2017, GJI)).
-------
Outputs
corr: numpy.ndarray
Cross-correlated waveform of raw data in time domain between -tpm and +tpm seconds.
nodata: int
If the virtual source or receiver records have spikes larger than "std_control" times the standard deviation of the signal, "nodata" is equal to 1
count: int
Incremented "count" variable.
"""
n = len(dat_s)
std_s = np.std(dat_s)
std_r = np.std(dat_r)
mx_s = max(np.absolute(dat_s))
mx_r = max(np.absolute(dat_r))
if mx_s< std_s*std_control and mx_r< std_r*std_control:
fft_s = np.fft.fft(dat_s, n*5)
fft_r = np.fft.fft(dat_r, n*5)
cc_t1 = np.real(np.fft.ifft( (fft_r * np.conj(fft_s))))
corr2 = np.concatenate((cc_t1[int(len(cc_t1)/2):], cc_t1[:int(len(cc_t1)/2)]))
corr = corr2[int(len(corr2)/2)-tpm*delta:int(len(corr2)/2)+tpm*delta]
nodata = 0
count +=1
else:
corr = 0
nodata = 1
return corr, nodata, count
def deconvolution_stab(dat_s, dat_r, delta, tpm, std_control, count, stab):
"""Compute Deconvolution of raw data.
Parameters
----------
Inputs
dat_s: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
dat_r: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
delta: float
Sampling rate in Hz (4 Hz in Viens et al. (2017, GJI))
tpm: int
To return the cross-correlation between -"tpm" seconds and +"tmp" seconds (tpm = 1500 s in Viens et al. (2017, GJI)).
std_control: int
If the virtual source or receiver records have spikes larger than "std_control times" the standard deviation of the signal, the cross-correlation is not computed (std_control = 10 in Viens et al. (2017, GJI)).
count: int
When looping over many files, count the number of cross-correlations (should be equal to 1 for the first cross-correlation).
stab: int
To smooth the denominator term over "stab" points (stab = 10 in Viens et al. (2017, GJI)).
-------
Outputs
corr: numpy.ndarray
Cross-correlated waveform in time domain between -"tpm" and +"tpm" seconds.
nodata: int
If the virtual source or receiver records have spikes larger than "std_control" times the standard deviation of the signal, "nodata" is equal to 1
count: int
Incremented "count" variable.
"""
n = len(dat_s)
std_s = np.std(dat_s)
std_r = np.std(dat_r)
mx_s = max(np.absolute(dat_s))
mx_r = max(np.absolute(dat_r))
if mx_s< std_s*std_control and mx_r< std_r*std_control:
fft_s = np.fft.fft(dat_s, n*5)
fft_r = np.fft.fft(dat_r, n*5)
sj = obssig.util.smooth(np.absolute(fft_s), stab)
dec_t1 = np.real(np.fft.ifft( (fft_r * np.conj(fft_s))/ (sj**2) ))
dec_t2 = np.concatenate((dec_t1[int(len(dec_t1)/2):], dec_t1[:int(len(dec_t1)/2)]))
dec_t = dec_t2[int(len(dec_t2)/2)-tpm*delta:int(len(dec_t2)/2)+tpm*delta]
nodata = 0
count +=1
else:
dec_t = 0
nodata = 1
return dec_t, nodata, count
def coherency_stab(dat_s, dat_r, delta, tpm, std_control, count, stab):
"""Compute coherency of raw data.
Parameters
----------
Inputs
dat_s: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
dat_r: numpy.ndarray
Record at the virtual source (1-h long in Viens et al. (2017, GJI))
delta: float
Sampling rate in Hz (4 Hz in Viens et al. (2017, GJI))
tpm: int
To return the cross-correlation between -"tpm" seconds and +"tmp" seconds (tpm = 1500 s in Viens et al. (2017, GJI)).
std_control: int
If the virtual source or receiver records have spikes larger than std_control times the standard deviation of the signal, the cross-correlation is not computed (std_control = 10 in Viens et al. (2017, GJI)).
count: int
When looping over many files, count the number of cross-correlations (should be equal to 1 for the first cross-correlation).
stab: int
To smooth the denominator term over "stab" points (stab = 10 in Viens et al. (2017, GJI)).
-------
Outputs
corr: numpy.ndarray
Coherency waveform in time domain between -"tpm" and +"tpm" seconds.
nodata: int
If the virtual source or receiver records have spikes larger than "std_control" times the standard deviation of the signal, "nodata" is equal to 1
count: int
Incremented "count" variable.
"""
n = len(dat_s)
std_s = np.std(dat_s)
std_r = np.std(dat_r)
mx_s = max(np.absolute(dat_s))
mx_r = max(np.absolute(dat_r))
if mx_s< std_s*std_control and mx_r< std_r*std_control:
fft_s = np.fft.fft(dat_s, n*5)
fft_r = np.fft.fft(dat_r, n*5)
sj = obssig.util.smooth(np.absolute(fft_s), stab)
si = obssig.util.smooth(np.absolute(fft_r), stab)
coh_t1 = np.real(np.fft.ifft( (fft_r * np.conj(fft_s))/ (si*sj) ))
coh_t2 = np.concatenate((coh_t1[int(len(coh_t1)/2):], coh_t1[:int(len(coh_t1)/2)]))
coh_t = coh_t2[int(len(coh_t2)/2)-tpm*delta:int(len(coh_t2)/2)+tpm*delta]
nodata = 0
count +=1
else:
coh_t = 0
nodata = 1
return coh_t, nodata, count
| 42.593301
| 221
| 0.602337
| 1,396
| 8,902
| 3.719914
| 0.086676
| 0.021567
| 0.038128
| 0.046601
| 0.955902
| 0.950125
| 0.944541
| 0.929906
| 0.911419
| 0.900635
| 0
| 0.035302
| 0.290384
| 8,902
| 208
| 222
| 42.798077
| 0.786766
| 0.542238
| 0
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049383
| false
| 0
| 0.024691
| 0
| 0.123457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5539df6854ef67fde5cf9e90f7281f4cbd5cc522
| 150
|
py
|
Python
|
app/explorer/views.py
|
pebblecode/cirrus-marketplace-api
|
64d9e3be8705a2fe64c964b16947e9877885de7b
|
[
"MIT"
] | null | null | null |
app/explorer/views.py
|
pebblecode/cirrus-marketplace-api
|
64d9e3be8705a2fe64c964b16947e9877885de7b
|
[
"MIT"
] | null | null | null |
app/explorer/views.py
|
pebblecode/cirrus-marketplace-api
|
64d9e3be8705a2fe64c964b16947e9877885de7b
|
[
"MIT"
] | null | null | null |
from flask import render_template
from . import explorer
@explorer.route('/_explorer')
def explorer():
return render_template('explorer.html')
| 16.666667
| 43
| 0.76
| 18
| 150
| 6.166667
| 0.555556
| 0.252252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 150
| 8
| 44
| 18.75
| 0.853846
| 0
| 0
| 0
| 0
| 0
| 0.153333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
55447930c17844e6e547cb5066d2d27bbb51fda6
| 4,322
|
py
|
Python
|
tests/ConsolePrinter_test.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | null | null | null |
tests/ConsolePrinter_test.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | 3
|
2021-04-05T17:14:21.000Z
|
2021-04-06T21:49:41.000Z
|
tests/ConsolePrinter_test.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | null | null | null |
import unittest
import os
import io
import sys
from mazikeen.ConsolePrinter import Printer, BufferedPrinter
class ConsolePrinterTest(unittest.TestCase):
def test_basic(self):
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = Printer()
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("Hello World\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = Printer(verbose=True)
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("Hello World\nHow do you do?\n", capturedOutput.getvalue())
sys.stdout = sys.__stdout__
def test_basicBufferedPrinter(self):
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = BufferedPrinter()
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("", capturedOutput.getvalue())
printer.flush()
self.assertEqual("Hello World\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = BufferedPrinter(verbose=True)
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("", capturedOutput.getvalue())
printer.flush()
self.assertEqual("Hello World\nHow do you do?\n", capturedOutput.getvalue())
sys.stdout = sys.__stdout__
def test_getBufferedPrinter(self):
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = Printer(verbose = True)
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("Hello World\nHow do you do?\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = printer.getBufferedPrinter()
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("", capturedOutput.getvalue())
printer.flush()
self.assertEqual("Hello World\nHow do you do?\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = printer.getBufferedPrinter()
printer.print("Hello World")
printer.verbose("How do you do?")
self.assertEqual("", capturedOutput.getvalue())
printer.flush()
self.assertEqual("Hello World\nHow do you do?\n", capturedOutput.getvalue())
sys.stdout = sys.__stdout__
def test_printSeparator(self):
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = Printer(verbose = True)
printer.print("Hello", "World", sep = ",")
printer.verbose("How", "do", "you", "do?", sep = ",")
self.assertEqual("Hello,World\nHow,do,you,do?\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = printer.getBufferedPrinter()
printer.print("Hello", "World", sep = ",")
printer.verbose("How", "do", "you", "do?", sep = ",")
printer.flush()
self.assertEqual("Hello,World\nHow,do,you,do?\n", capturedOutput.getvalue())
sys.stdout = sys.__stdout__
def test_basicError(self):
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = Printer()
printer.error("Test error")
self.assertEqual("Error: Test error\n", capturedOutput.getvalue())
capturedOutput = io.StringIO()
sys.stdout = capturedOutput
printer = BufferedPrinter()
printer.error("Test error")
self.assertEqual("", capturedOutput.getvalue())
printer.flush()
self.assertEqual("Error: Test error\n", capturedOutput.getvalue())
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
| 31.779412
| 84
| 0.585377
| 403
| 4,322
| 6.19603
| 0.104218
| 0.075691
| 0.044854
| 0.118943
| 0.90869
| 0.90869
| 0.897477
| 0.890268
| 0.836604
| 0.806167
| 0
| 0
| 0.298936
| 4,322
| 135
| 85
| 32.014815
| 0.824092
| 0
| 0
| 0.849462
| 0
| 0
| 0.119417
| 0.013423
| 0
| 0
| 0
| 0
| 0.172043
| 1
| 0.053763
| false
| 0
| 0.053763
| 0
| 0.11828
| 0.408602
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
5545ec2afa878bbc7b92f09d5c2d28bda73ac88b
| 1,894
|
py
|
Python
|
brambling/migrations/0059_auto_20190517_1605.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 8
|
2015-05-06T18:26:15.000Z
|
2018-02-07T22:18:32.000Z
|
brambling/migrations/0059_auto_20190517_1605.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 578
|
2015-01-05T21:37:17.000Z
|
2018-02-14T16:43:50.000Z
|
brambling/migrations/0059_auto_20190517_1605.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 1
|
2015-08-20T16:59:32.000Z
|
2015-08-20T16:59:32.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2019-05-17 16:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('brambling', '0058_auto_20190114_0455'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='stripe_access_token',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_publishable_key',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_refresh_token',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_test_access_token',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_test_publishable_key',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_test_refresh_token',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_test_user_id',
field=models.CharField(blank=True, default='', max_length=255),
),
migrations.AlterField(
model_name='organization',
name='stripe_user_id',
field=models.CharField(blank=True, default='', max_length=255),
),
]
| 33.821429
| 75
| 0.597677
| 185
| 1,894
| 5.881081
| 0.281081
| 0.147059
| 0.183824
| 0.213235
| 0.821691
| 0.821691
| 0.821691
| 0.774816
| 0.774816
| 0.774816
| 0
| 0.041943
| 0.282471
| 1,894
| 55
| 76
| 34.436364
| 0.758646
| 0.035903
| 0
| 0.666667
| 1
| 0
| 0.163467
| 0.066374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
557de2c03eb977f2baf1d03c5a17452ba6ba8271
| 10,705
|
py
|
Python
|
tests/test_vc_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 7
|
2021-08-18T00:42:05.000Z
|
2022-03-14T09:49:15.000Z
|
tests/test_vc_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vc_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 1
|
2022-03-14T09:49:20.000Z
|
2022-03-14T09:49:20.000Z
|
# Code generated by lark_sdk_gen. DO NOT EDIT.
import unittest
import pylark
import pytest
from tests.test_conf import app_all_permission, app_no_permission
from tests.test_helper import mock_get_tenant_access_token_failed
def mock(*args, **kwargs):
raise pylark.PyLarkError(scope="scope", func="func", code=1, msg="mock-failed")
def mock_raw_request(*args, **kwargs):
raise pylark.PyLarkError(
scope="scope", func="func", code=1, msg="mock-raw-request-failed"
)
# mock get token
class TestVCSampleMockGetTokenFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestVCSampleMockGetTokenFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.cli.auth.get_tenant_access_token = mock_get_tenant_access_token_failed
self.cli.auth.get_app_access_token = mock_get_tenant_access_token_failed
self.module_cli = self.cli.vc
def test_mock_get_token_get_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_meeting(pylark.GetVCMeetingReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_kickout_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.kickout_vc_meeting(pylark.KickoutVCMeetingReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_vc_host_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_host_meeting(pylark.SetVCHostMeetingReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_vc_daily_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_daily_report(pylark.GetVCDailyReportReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_vc_top_user_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_top_user_report(pylark.GetVCTopUserReportReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_room_config(pylark.GetVCRoomConfigReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_room_config(pylark.SetVCRoomConfigReq())
assert "msg=failed" in f"{e}"
# mock mock self func
class TestVCSampleMockSelfFuncFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestVCSampleMockSelfFuncFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.vc
def test_mock_self_func_get_vc_meeting(self):
origin_func = self.module_cli.get_vc_meeting
self.module_cli.get_vc_meeting = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_meeting(pylark.GetVCMeetingReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_vc_meeting = origin_func
def test_mock_self_func_kickout_vc_meeting(self):
origin_func = self.module_cli.kickout_vc_meeting
self.module_cli.kickout_vc_meeting = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.kickout_vc_meeting(pylark.KickoutVCMeetingReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.kickout_vc_meeting = origin_func
def test_mock_self_func_set_vc_host_meeting(self):
origin_func = self.module_cli.set_vc_host_meeting
self.module_cli.set_vc_host_meeting = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_host_meeting(pylark.SetVCHostMeetingReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_vc_host_meeting = origin_func
def test_mock_self_func_get_vc_daily_report(self):
origin_func = self.module_cli.get_vc_daily_report
self.module_cli.get_vc_daily_report = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_daily_report(pylark.GetVCDailyReportReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_vc_daily_report = origin_func
def test_mock_self_func_get_vc_top_user_report(self):
origin_func = self.module_cli.get_vc_top_user_report
self.module_cli.get_vc_top_user_report = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_top_user_report(pylark.GetVCTopUserReportReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_vc_top_user_report = origin_func
def test_mock_self_func_get_vc_room_config(self):
origin_func = self.module_cli.get_vc_room_config
self.module_cli.get_vc_room_config = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_room_config(pylark.GetVCRoomConfigReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_vc_room_config = origin_func
def test_mock_self_func_set_vc_room_config(self):
origin_func = self.module_cli.set_vc_room_config
self.module_cli.set_vc_room_config = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_room_config(pylark.SetVCRoomConfigReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_vc_room_config = origin_func
# mock raw request
class TestVCSampleMockRawRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestVCSampleMockRawRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.vc
self.cli.raw_request = mock_raw_request
def test_mock_raw_request_get_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_meeting(
pylark.GetVCMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_kickout_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.kickout_vc_meeting(
pylark.KickoutVCMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_vc_host_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_host_meeting(
pylark.SetVCHostMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_vc_daily_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_daily_report(pylark.GetVCDailyReportReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_vc_top_user_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_top_user_report(pylark.GetVCTopUserReportReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_room_config(pylark.GetVCRoomConfigReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_room_config(pylark.SetVCRoomConfigReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
# real request
class TestVCSampleRealRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestVCSampleRealRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_no_permission.ins()
self.module_cli = self.cli.vc
def test_real_request_get_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_meeting(
pylark.GetVCMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_kickout_vc_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.kickout_vc_meeting(
pylark.KickoutVCMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_vc_host_meeting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_host_meeting(
pylark.SetVCHostMeetingReq(
meeting_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_vc_daily_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_daily_report(pylark.GetVCDailyReportReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_vc_top_user_report(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_top_user_report(pylark.GetVCTopUserReportReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_vc_room_config(pylark.GetVCRoomConfigReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_vc_room_config(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_vc_room_config(pylark.SetVCRoomConfigReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
| 36.411565
| 83
| 0.684353
| 1,444
| 10,705
| 4.746537
| 0.058864
| 0.077327
| 0.100525
| 0.071491
| 0.915232
| 0.91246
| 0.896411
| 0.868106
| 0.815436
| 0.753137
| 0
| 0.001939
| 0.229145
| 10,705
| 293
| 84
| 36.535836
| 0.828648
| 0.010182
| 0
| 0.603774
| 1
| 0
| 0.041171
| 0.017375
| 0
| 0
| 0
| 0
| 0.231132
| 1
| 0.160377
| false
| 0
| 0.023585
| 0
| 0.20283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e950a7fb439f89bd28ca1e8c22e0971bdc72d86f
| 11,461
|
py
|
Python
|
backend/stakeholder/migrations/0003_auto_20210310_2002.py
|
DaasDaham/Team_9_Club_Management_Portal
|
aef7a361f383efbbdb73517d7fa30c806cc1ea96
|
[
"MIT"
] | 1
|
2022-01-07T16:07:09.000Z
|
2022-01-07T16:07:09.000Z
|
backend/stakeholder/migrations/0003_auto_20210310_2002.py
|
DaasDaham/Team_9_Club_Management_Portal
|
aef7a361f383efbbdb73517d7fa30c806cc1ea96
|
[
"MIT"
] | null | null | null |
backend/stakeholder/migrations/0003_auto_20210310_2002.py
|
DaasDaham/Team_9_Club_Management_Portal
|
aef7a361f383efbbdb73517d7fa30c806cc1ea96
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2021-03-10 14:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stakeholder', '0002_auto_20210310_1415'),
]
operations = [
migrations.CreateModel(
name='ASTRONUTS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='astronuts/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='astronuts/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='AUDIOBYTES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='audiobytes/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='audiobytes/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='BYLD',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='byld/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='byld/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='ELECTROHOLICS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='electroholics/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='electroholics/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='FOOBAR',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='foobar/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='foobar/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='KUBIC',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='kubic/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='kubic/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='MACHAN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='machan/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='machan/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='MADTOES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='madtoes/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='madtoes/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='PHILOSOC',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='philosoc/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='philosoc/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='ROBOTICS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='robotics/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='robotics/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='TASVEER',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='tasveer/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='tasveer/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='THE65SQUARE',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='the65square/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='the65square/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='TRIVIALIS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Time', models.DateTimeField()),
('Location', models.URLField(max_length=500)),
('Description', models.TextField()),
('Payment_receipt_student', models.ImageField(null=True, upload_to='trivialis/student/payment_receipt')),
('Payment_receipt_reimburse', models.ImageField(null=True, upload_to='trivialis/office/payment_receipt')),
('Approved', models.BooleanField(default=False)),
('Attendance', models.IntegerField(default=0)),
],
),
migrations.RemoveField(
model_name='biobytes',
name='Payment_receipt',
),
migrations.AddField(
model_name='biobytes',
name='Payment_receipt_reimburse',
field=models.ImageField(null=True, upload_to='biobytes/office/payment_receipt'),
),
migrations.AddField(
model_name='biobytes',
name='Payment_receipt_student',
field=models.ImageField(null=True, upload_to='biobytes/student/payment_receipt'),
),
]
| 54.57619
| 126
| 0.583631
| 1,026
| 11,461
| 6.340156
| 0.078947
| 0.122675
| 0.086088
| 0.103305
| 0.948347
| 0.948347
| 0.945119
| 0.913451
| 0.899616
| 0.899616
| 0
| 0.015357
| 0.272751
| 11,461
| 209
| 127
| 54.837321
| 0.765087
| 0.003926
| 0
| 0.743842
| 1
| 0
| 0.207202
| 0.137988
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004926
| 0
| 0.019704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
757087d31526037d8be97075e04950fa5472bbf1
| 4,910
|
py
|
Python
|
17.py
|
mjenrungrot/AdventOfCode2020
|
ad2607fe6c4418327a97b863146f7a5af3361afe
|
[
"MIT"
] | null | null | null |
17.py
|
mjenrungrot/AdventOfCode2020
|
ad2607fe6c4418327a97b863146f7a5af3361afe
|
[
"MIT"
] | null | null | null |
17.py
|
mjenrungrot/AdventOfCode2020
|
ad2607fe6c4418327a97b863146f7a5af3361afe
|
[
"MIT"
] | null | null | null |
import sys
import copy
def extra():
fp = open("17.input")
lines = list(map(lambda x: x.strip(), fp.readlines()))
board = {}
R = len(lines)
C = len(lines[0])
for i in range(R):
for j in range(C):
if lines[i][j] == '#':
board[(i, j, 0, 0)] = 1
else:
board[(i, j, 0, 0)] = 0
N_CYCLE = 6
min_x = 0
max_x = R - 1
min_y = 0
max_y = C - 1
min_z = 0
max_z = 0
min_w = 0
max_w = 0
for n_cycle in range(N_CYCLE):
new_board = {}
min_x -= 1
max_x += 1
min_y -= 1
max_y += 1
min_z -= 1
max_z += 1
min_w -= 1
max_w += 1
for x in range(min_x - 1, max_x + 2):
for y in range(min_y - 1, max_y + 2):
for z in range(min_z - 1, max_z + 2):
for w in range(min_w - 1, max_w + 2):
is_active = ((x, y, z, w)
in board) and (board[(x, y, z, w)] == 1)
count_active = 0
for dx in range(-1, 2):
for dy in range(-1, 2):
for dz in range(-1, 2):
for dw in range(-1, 2):
if abs(dx) + abs(dy) + abs(dz) + abs(
dw) == 0:
continue
if (x + dx, y + dy, z + dz,
w + dw) not in board:
continue
if board[(x + dx, y + dy, z + dz,
w + dw)] == 1:
count_active += 1
if is_active and (count_active == 2 or
count_active == 3):
new_board[(x, y, z, w)] = 1
elif is_active:
new_board[(x, y, z, w)] = 0
elif (not is_active) and (count_active == 3):
new_board[(x, y, z, w)] = 1
else:
new_board[(x, y, z, w)] = 0
board = copy.deepcopy(new_board)
ans = 0
for x in range(min_x - 1, max_x + 2):
for y in range(min_y - 1, max_y + 2):
for z in range(min_z - 1, max_z + 2):
for w in range(min_w - 1, max_w + 2):
ans += (x, y, z, w) in board and board[(x, y, z, w)] == 1
print(ans)
def main():
fp = open("17.input")
lines = list(map(lambda x: x.strip(), fp.readlines()))
board = {}
R = len(lines)
C = len(lines[0])
for i in range(R):
for j in range(C):
if lines[i][j] == '#':
board[(i, j, 0)] = 1
else:
board[(i, j, 0)] = 0
N_CYCLE = 6
min_x = 0
max_x = R - 1
min_y = 0
max_y = C - 1
min_z = 0
max_z = 0
for n_cycle in range(N_CYCLE):
new_board = {}
min_x -= 1
max_x += 1
min_y -= 1
max_y += 1
min_z -= 1
max_z += 1
for x in range(min_x - 1, max_x + 2):
for y in range(min_y - 1, max_y + 2):
for z in range(min_z - 1, max_z + 2):
is_active = ((x, y, z) in board) and (board[(x, y, z)] == 1)
count_active = 0
for dx in range(-1, 2):
for dy in range(-1, 2):
for dz in range(-1, 2):
if abs(dx) + abs(dy) + abs(dz) == 0:
continue
if (x + dx, y + dy, z + dz) not in board:
continue
if board[(x + dx, y + dy, z + dz)] == 1:
count_active += 1
if is_active and (count_active == 2 or count_active == 3):
new_board[(x, y, z)] = 1
elif is_active:
new_board[(x, y, z)] = 0
elif (not is_active) and (count_active == 3):
new_board[(x, y, z)] = 1
else:
new_board[(x, y, z)] = 0
board = copy.deepcopy(new_board)
ans = 0
for x in range(min_x - 1, max_x + 2):
for y in range(min_y - 1, max_y + 2):
for z in range(min_z - 1, max_z + 2):
ans += (x, y, z) in board and board[(x, y, z)] == 1
print(ans)
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'extra':
extra()
else:
main()
| 31.883117
| 80
| 0.349287
| 658
| 4,910
| 2.454407
| 0.089666
| 0.117028
| 0.029721
| 0.059443
| 0.926935
| 0.915789
| 0.905882
| 0.886068
| 0.866254
| 0.811765
| 0
| 0.051439
| 0.53279
| 4,910
| 153
| 81
| 32.091503
| 0.652572
| 0
| 0
| 0.740458
| 0
| 0
| 0.006314
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015267
| false
| 0
| 0.015267
| 0
| 0.030534
| 0.015267
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
75c6d4f6d2814b978b561b7a8a20a429b26fe160
| 29,261
|
py
|
Python
|
validation/report/scalability_analysis_v2.py
|
nerds-ufes/G-PolKA
|
9c6bd42167bc333f6421a751c93a88c00841def9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
validation/report/scalability_analysis_v2.py
|
nerds-ufes/G-PolKA
|
9c6bd42167bc333f6421a751c93a88c00841def9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
validation/report/scalability_analysis_v2.py
|
nerds-ufes/G-PolKA
|
9c6bd42167bc333f6421a751c93a88c00841def9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
from polka import *
from keyflow import *
from keyflow_multicast import *
import pandas as pd
import networkx as nx
def max_bitlength(nports, nnodes, lpath):
mindegree = math.log(nports, 2)
print ("Mindegree: ", mindegree)
mindegree = np.ceil(mindegree)
print ("Mindegree: ", mindegree)
nodeids = generate_nodeids(mindegree, nnodes)
# print "nodeids[", len(nodeids), "]: ", nodeids
# Worst Case
path = nodeids[(-1 * lpath) :]
# print "path[", len(path), "]: ", path
bitlength = 0
for elem in path:
bitlength = bitlength + gf_degree(elem)
# print "Bitlength: ", bitlength
return bitlength
def max_bitlength_table(nports, nnodes, lpath, directory, is_multicast):
if is_multicast:
# Multicast
print ("Multicast")
mindegree = nports
else:
# Unicast
print ("Unicast")
mindegree = math.log(nports, 2)
mindegree = int(np.ceil(mindegree))
print ("Mindegree: ", mindegree)
nodeids = generate_nodeids_table(mindegree, nnodes, directory)
# print "nodeids[", len(nodeids), "]: ", nodeids
# Worst Case
path = nodeids[(-1 * lpath) :]
# print "path[", len(path), "]: ", path
bitlength = 0
for elem in path:
bitlength = bitlength + gf_degree(elem)
# print "Bitlength: ", bitlength
return bitlength
def max_bitlength_list(nports, lpath):
bitsport = math.log(nports, 2)
bitsport = np.ceil(bitsport)
nbits = bitsport * lpath
return int(nbits)
def max_bitlength_elmo(nports, nspine, nleaf, nserver, mult):
# Calculating upstream
bitsport = nleaf + nspine + 2
# Calculatin downstream
bitsport += nports + (nleaf * mult) + (nspine * mult)
nbits = bitsport
return int(nbits)
def scalability_analysis_keyflow_paper():
LOGGER.debug("Running")
lst = []
# Header
lst.append("Topology,Path,Bits")
# Graph from KeyFlow paper
nports = 24
switch_nodes = [15, 30, 45, 60]
for nnodes in switch_nodes:
for lpath in range(1, 16):
topo_name = "N=" + str(nnodes)
print ("######", topo_name)
nbits = max_bitlength(nports, nnodes, lpath)
lst.append("PolKA " + str(topo_name) + "," + str(lpath) + "," + str(nbits))
nbits_keyflow = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
lst.append(
"KeyFlow "
+ str(topo_name)
+ ","
+ str(lpath)
+ ","
+ str(nbits_keyflow)
)
nbits_list = max_bitlength_list(nports, lpath)
lst.append(
"List " + str(topo_name) + "," + str(lpath) + "," + str(nbits_list)
)
# Export to csv file
arr = np.array(lst)
np.savetxt("keyflow_paper.csv", arr, delimiter=",", fmt="%s")
def scalability_analysis_polka_paper(directory):
LOGGER.debug("Running")
table_directory = directory + "/irrpolys"
lst = []
# Header
lst.append(
"Topology,Max.Ports,Diameter,Nr.Nodes,Nr.Servers,"
+ "Nr.Bits-Unicast,Nr.Bits-Multicast"
)
# 2-tier Topologies
switch_ports = [24]
spine_nodes = [6, 12, 16]
lpath = 3
for nspine in spine_nodes:
nleaf = nspine
nnodes = nspine + nleaf
for nports in switch_ports:
if nports > nspine:
nservers = (nports - nspine) * nleaf
# topo_name = "2-tier - spine: ", nspine, " - leaf: ", nleaf,
# "- switches: ", nnodes," - ports: ", nports,
# " - servers: ", nservers, " - lpath: ", lpath
topo_name = "2-tier spine " + str(nspine) + " leaf " + str(nleaf)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(
nports, nnodes, lpath, table_directory, 0
)
nbits_multicast = max_bitlength_table(
nports, nnodes, lpath, table_directory, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
)
# FatTree
pods = [4, 8, 16, 24]
# pods = [4,8,16]
lpath = 5
for k in pods:
nservers = pow(k, 3) / 4
nswitch_access = k / 2
nswitch_agreg = k / 2
nswitch_core = pow(k / 2, 2)
nnodes = k * nswitch_access + k * nswitch_agreg + nswitch_core
nports = k
topo_name = "Fat Tree pod " + str(k)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
)
# Arpanet Backbone Topology (ARPANET)
# nnodes = 20
# nports = 4
# lpath = 7
topo_name = "ARPANET"
print ("######", topo_name)
filename = "./graphs/21-arpanet.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
)
# Geant Backbone Topology (GEANT2)
# nnodes = 30
# nports = 8
# lpath = 7
topo_name = "GEANT2"
print ("######", topo_name)
filename = "./graphs/32-geant2-30N-48L.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
)
# Internet2 Network (INTERNET2)
# nnodes = 56
# nports = 3
# lpath = 21
topo_name = "INTERNET2"
print ("######", topo_name)
filename = "./graphs/38-internet2.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
)
# Export to csv file
arr = np.array(lst)
np.savetxt("polka_paper_table.csv", arr, delimiter=",", fmt="%s")
def scalability_analysis_netsoft(directory):
LOGGER.debug("Running")
table_directory = directory + "/irrpolys"
lst = []
# Header
lst.append(
"Topology,Max.Ports,Diameter,Nr.Nodes,Nr.Servers,"
+ "Nr.Bits-PolKA,Nr.Bits-List"
)
# 2-tier Topologies
switch_ports = [24]
spine_nodes = [6, 12, 16]
# spine_nodes = [6]
lpath = 3
for nspine in spine_nodes:
nleaf = nspine
nnodes = nspine + nleaf
for nports in switch_ports:
if nports > nspine:
nservers = (nports - nspine) * nleaf
# topo_name = "2-tier - spine: ", nspine, " - leaf: ", nleaf,
# "- switches: ", nnodes," - ports: ", nports,
# " - servers: ", nservers, " - lpath: ", lpath
topo_name = "2-tier spine " + str(nspine) + " leaf " + str(nleaf)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(
nports, nnodes, lpath, table_directory, 0
)
nbits_list = max_bitlength_list(nports, lpath)
nbits_elmo = max_bitlength_elmo(nports, nspine, nleaf, nservers, lpath)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_list)
+ ","
+ str(nbits_elmo)
)
# FatTree
pods = [4, 8, 16, 24]
# pods = [16]
lpath = 5
for k in pods:
nservers = pow(k, 3) / 4
nswitch_access = k / 2
nswitch_agreg = k / 2
nswitch_core = pow(k / 2, 2)
nnodes = k * nswitch_access + k * nswitch_agreg + nswitch_core
nports = k
topo_name = "Fat Tree pod " + str(k)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_list = max_bitlength_list(nports, lpath)
nbits_elmo = max_bitlength_elmo(
nports, nswitch_agreg, nswitch_access, nservers, 3
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_list)
+ ","
+ str(nbits_elmo)
)
# Arpanet Backbone Topology (ARPANET)
# nnodes = 20
# nports = 4
# lpath = 7
topo_name = "ARPANET"
print ("######", topo_name)
filename = "./graphs/21-arpanet.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_list = max_bitlength_list(nports, lpath)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_list)
)
# Geant Backbone Topology (GEANT2)
# nnodes = 30
# nports = 8
# lpath = 7
topo_name = "GEANT2"
print ("######", topo_name)
filename = "./graphs/32-geant2-30N-48L.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_list = max_bitlength_list(nports, lpath)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_list)
)
# Internet2 Network (INTERNET2)
# nnodes = 56
# nports = 3
# lpath = 21
topo_name = "INTERNET2"
print ("######", topo_name)
filename = "./graphs/38-internet2.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_list = max_bitlength_list(nports, lpath)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_list)
)
# Export to csv file
arr = np.array(lst)
np.savetxt("polka_paper_netsoft.csv", arr, delimiter=",", fmt="%s")
def scalability_analysis_thesis(directory):
LOGGER.debug("Running")
table_directory = directory + "/irrpolys"
lst = []
# Header
lst.append(
"Topology,Max.Ports,Diameter,Nr.Nodes,Nr.Servers,Nr.Bits-Bin-Unicast,"
+ "Nr.Bits-Bin-Multicast,Nr.Bits-Int-Unicast,Nr.Bits-Int-Multicast"
)
# lst.append("Topology,Max.Ports,Diameter,Nr.Nodes,Nr.Servers,Nr.Bits-Unicast,Nr.Bits-Multicast")
# 2-tier Topologies
switch_ports = [24]
spine_nodes = [6, 12, 16]
lpath = 3
for nspine in spine_nodes:
nleaf = nspine
nnodes = nspine + nleaf
for nports in switch_ports:
if nports > nspine:
nservers = (nports - nspine) * nleaf
# topo_name = "2-tier - spine: ", nspine, " - leaf: ", nleaf,
# "- switches: ", nnodes," - ports: ", nports,
# " - servers: ", nservers, " - lpath: ", lpath
topo_name = "2-tier spine " + str(nspine) + " leaf " + str(nleaf)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(
nports, nnodes, lpath, table_directory, 0
)
nbits_multicast = max_bitlength_table(
nports, nnodes, lpath, table_directory, 1
)
nbits_keyflow_unicast = max_bitlength_keyflow(
nports, nnodes, lpath, topo_name
)
nbits_keyflow_multicast = max_bitlength_keyflow_multicast(
nports, nnodes, lpath, topo_name, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
+ ","
+ str(nbits_keyflow_unicast)
+ ","
+ str(nbits_keyflow_multicast)
)
# FatTree
pods = [4, 8, 16, 24]
# pods = [4,8,16]
lpath = 5
for k in pods:
nservers = pow(k, 3) / 4
nswitch_access = k / 2
nswitch_agreg = k / 2
nswitch_core = pow(k / 2, 2)
nnodes = k * nswitch_access + k * nswitch_agreg + nswitch_core
nports = k
topo_name = "Fat Tree pod " + str(k)
print ("######", topo_name)
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
nbits_keyflow_unicast = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_keyflow_multicast = max_bitlength_keyflow_multicast(
nports, nnodes, lpath, topo_name, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
+ ","
+ str(nbits_keyflow_unicast)
+ ","
+ str(nbits_keyflow_multicast)
)
# Arpanet Backbone Topology (ARPANET)
# nnodes = 20
# nports = 4
# lpath = 7
topo_name = "ARPANET"
print ("######", topo_name)
filename = "./graphs/21-arpanet.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
nbits_keyflow_unicast = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_keyflow_multicast = max_bitlength_keyflow_multicast(
nports, nnodes, lpath, topo_name, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
+ ","
+ str(nbits_keyflow_unicast)
+ ","
+ str(nbits_keyflow_multicast)
)
# Geant Backbone Topology (GEANT2)
# nnodes = 30
# nports = 8
# lpath = 7
topo_name = "GEANT2"
print ("######", topo_name)
filename = "./graphs/32-geant2-30N-48L.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
nbits_keyflow_unicast = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_keyflow_multicast = max_bitlength_keyflow_multicast(
nports, nnodes, lpath, topo_name, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
+ ","
+ str(nbits_keyflow_unicast)
+ ","
+ str(nbits_keyflow_multicast)
)
# Internet2 Network (INTERNET2)
# nnodes = 56
# nports = 3
# lpath = 21
topo_name = "INTERNET2"
print ("######", topo_name)
filename = "./graphs/38-internet2.txt"
g = create_graph_edgelist(filename)
nports = get_graph_maxdegree(g)
print ("Maximum node degree: ", nports)
nnodes = g.order()
print ("Number of nodes: ", nnodes)
lpath = nx.diameter(g)
print ("Diameter: ", lpath)
nservers = 0
nbits_unicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 0)
nbits_multicast = max_bitlength_table(nports, nnodes, lpath, table_directory, 1)
nbits_keyflow_unicast = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_keyflow_multicast = max_bitlength_keyflow_multicast(
nports, nnodes, lpath, topo_name, 1
)
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits_unicast)
+ ","
+ str(nbits_multicast)
+ ","
+ str(nbits_keyflow_unicast)
+ ","
+ str(nbits_keyflow_multicast)
)
# Export to csv file
arr = np.array(lst)
np.savetxt("polka_paper_table_2.csv", arr, delimiter=",", fmt="%s")
def create_graph_edgelist(filename):
# Read edge list
edgelist = pd.read_csv(filename, delim_whitespace=True)
g = nx.Graph()
# Add edges and edge attributes
for i, elrow in edgelist.iterrows():
g.add_edge(elrow[0], elrow[1])
return g
def get_graph_maxdegree(g):
degree_sequence = sorted([d for n, d in g.degree()], reverse=True)
dmax = max(degree_sequence)
return dmax
def scalability_analysis_unicast():
LOGGER.debug("Running")
lst = []
# Header
lst.append(
"Topology,Max.Ports,Diameter,Nr.Nodes,Nr.Servers,Nr.Bits-Bin,"
+ "Nr.Bits-Int,Nr.Bits-List,Worse than Keyflow, Worse than List"
)
# 2-tier Topologies
switch_ports = [24, 48, 96]
spine_nodes = [6, 12, 16, 24, 36, 48]
lpath = 3
for nspine in spine_nodes:
nleaf = nspine
nnodes = nspine + nleaf
for nports in switch_ports:
if nports > nspine:
nservers = (nports - nspine) * nleaf
# topo_name = "2-tier - spine: ", nspine, " - leaf: ",
# nleaf, "- switches: ", nnodes," - ports: ",
# nports," - servers: ", nservers, " - lpath: ",
# lpath
topo_name = "2-tier spine " + str(nspine) + " leaf " + str(nleaf)
print ("######", topo_name)
nbits = max_bitlength(nports, nnodes, lpath)
nbits_keyflow = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_list = max_bitlength_list(nports, lpath)
print ("Bitlength:", nbits)
result = str(nbits - nbits_keyflow) if nbits > nbits_keyflow else ""
result2 = str(nbits - nbits_list) if nbits > nbits_list else ""
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits)
+ ","
+ str(nbits_keyflow)
+ ","
+ str(nbits_list)
+ ","
+ result
+ ","
+ result2
)
# FatTree
pods = [4, 8, 16, 24, 32]
lpath = 5
for k in pods:
nservers = pow(k, 3) / 4
nswitch_access = k / 2
nswitch_agreg = k / 2
nswitch_core = pow(k / 2, 2)
nnodes = k * nswitch_access + k * nswitch_agreg + nswitch_core
nports = k
topo_name = "Fat Tree pod " + str(k)
print ("######", topo_name)
nbits = max_bitlength(nports, nnodes, lpath)
nbits_keyflow = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_list = max_bitlength_list(nports, lpath)
print ("Bitlength:", nbits)
result = str(nbits - nbits_keyflow) if nbits > nbits_keyflow else ""
result2 = str(nbits - nbits_list) if nbits > nbits_list else ""
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits)
+ ","
+ str(nbits_keyflow)
+ ","
+ str(nbits_list)
+ ","
+ result
+ ","
+ result2
)
# Hypercube Topologies
degree = [3, 4, 5, 6, 7, 8, 9, 10]
for ndegree in degree:
nservers = pow(2, ndegree)
nnodes = nservers
nports = ndegree
lpath = ndegree
# topo_name = "Hypercube - degree: ",
# ndegree, "- switches: ",
# nnodes," - ports: ", nports,
# " - servers: ", nservers, " - lpath: ", lpath
topo_name = "Hypercube degree " + str(ndegree)
print ("######", topo_name)
nbits = max_bitlength(nports, nnodes, lpath)
nbits_keyflow = max_bitlength_keyflow(nports, nnodes, lpath, topo_name)
nbits_list = max_bitlength_list(nports, lpath)
print ("Bitlength:", nbits)
result = str(nbits - nbits_keyflow) if nbits > nbits_keyflow else ""
result2 = str(nbits - nbits_list) if nbits > nbits_list else ""
lst.append(
topo_name
+ ","
+ str(nports)
+ ","
+ str(lpath)
+ ","
+ str(nnodes)
+ ","
+ str(nservers)
+ ","
+ str(nbits)
+ ","
+ str(nbits_keyflow)
+ ","
+ str(nbits_list)
+ ","
+ result
+ ","
+ result2
)
# DCell
# k = [1,2]
# n = [4,6,8,10,12]
# tmp = (n+1)*n
# nservers = (tmp+1)*tmp
# nswitcheslevel = nservers/n
# nportsservers = k+1
# nportsswitches = n
# nnodes =
# nports =
# Export to csv file
arr = np.array(lst)
np.savetxt("scalability_analysis_unicast.csv", arr, delimiter=",", fmt="%s")
def calculate_routeid_worst_case(nodeids, lpath, mindegree):
LOGGER.debug("Running")
path = nodeids[(-1 * lpath) :]
print "Path[", len(path), "]: ", path
bitlength = 0
for elem in path:
bitlength = bitlength + gf_degree(elem)
print "Bitlength: ", bitlength
print "############ All ones output ports############"
o1 = []
for elem in path:
# o1.append([1] * gf_degree(elem))
o1.append([1] * mindegree)
# print "o[", len(o1), "]: ", o1
r = calculate_routeid(path, o1)
print "RouteID[", len(r), "] = ", r
print "############ 1 + all zeros output ports############"
o2 = []
for elem in path:
# o2.append([1] + [0] * (gf_degree(elem)-1))
o2.append([1] + [0] * (mindegree - 1))
# print "o[", len(o2), "]: ", o2
r = calculate_routeid(path, o2)
print "RouteID[", len(r), "] = ", r
for i in range(0, 10):
print "############ 1 + Random bits output ports############"
o3 = []
for elem in path:
# o3.append
# ([1] + [ int(uniform(0, 2)) for i in xrange(0,
# gf_degree(elem)-1) ])
o3.append([1] + [int(uniform(0, 2)) for i in xrange(0, mindegree - 1)])
# print "o[", len(o3), "]: ", o3
r = calculate_routeid(path, o3)
print "RouteID[", len(r), "] = ", r
def test_routeid_worst_case():
LOGGER.debug("Running")
# 2-tier 4 leaf - 4 spine - 8 ports - 16 servers
toponame = "2-tier 16 servers"
nports = 8
nnodes = 8
lpath = 3
mindegree = int(math.log(nports, 2))
nodeids = generate_nodeids(mindegree, nnodes)
print "nodeids[", len(nodeids), "]: ", nodeids
calculate_routeid_worst_case(nodeids, lpath, mindegree)
# Main
def main():
# Generate table with irreducible polynomials mod2
# mindegree = 1
# maxdegree = 7
# table = generate_coprimes_table(1, 7)
# print table
# Execute scalability analysis for unicast
# Comparison between
# scalability_analysis_unicast()
# Comparison with KeyFlow as done in KeyFlow paper
# scalability_analysis_keyflow_paper()
# Generate human readable poly table
mindeg = 1
maxdeg = 24
# table = generate_coprimes_table_print(mindeg, maxdeg)
# Generate pickle poly table (better performance)
# mindeg = 1
# maxdeg = 24
# directory = "./control/irrpolys"
# generate_coprimes_table_pickle(mindeg, maxdeg, directory)
# table = get_coprimes_table_pickle(mindeg, maxdeg, directory)
# print table
# Comparison PolKA
# scalability_analysis_polka_paper(directory)
# Comparison PolKA with Sourcey
# scalability_analysis_netsoft(
# "./control",
# # "/home/cristina/Dropbox/Cristina/UFES/doutorado/P4_v2/git/control"
# )
# Analysis Thesis
scalability_analysis_thesis("./control")
# Test routeid in the worst case for
# 2-tier 4 leaf - 4 spine - 8 ports - 16 servers
# test_routeid_worst_case()
if __name__ == "__main__":
main()
| 29.76704
| 101
| 0.512696
| 2,981
| 29,261
| 4.856089
| 0.076149
| 0.043106
| 0.052846
| 0.04131
| 0.823017
| 0.80706
| 0.790066
| 0.774731
| 0.761951
| 0.748826
| 0
| 0.020095
| 0.360548
| 29,261
| 982
| 102
| 29.797352
| 0.753567
| 0.127132
| 0
| 0.814913
| 0
| 0.003995
| 0.082963
| 0.027917
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.006658
| null | null | 0.083888
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
75d1d1c28151bd93da1622d16c2be762b4daaf68
| 5,420
|
py
|
Python
|
analyzer.py
|
mjdileep/datePicker
|
1e4801d04ee3fa90ed6a9cd7e43c7367f34ee849
|
[
"MIT"
] | null | null | null |
analyzer.py
|
mjdileep/datePicker
|
1e4801d04ee3fa90ed6a9cd7e43c7367f34ee849
|
[
"MIT"
] | null | null | null |
analyzer.py
|
mjdileep/datePicker
|
1e4801d04ee3fa90ed6a9cd7e43c7367f34ee849
|
[
"MIT"
] | null | null | null |
__author__ = 'ASUS-PC'
import re
import datetime
from dateutil.parser import *
def analyzer(text):
regex=re.compile('( (0?[1-9]|1[0-9]|2[0-9]|3[01])[tTNSns][DTHdth]( of | OF | ?, ?).{3,10}(, ?)?( in | IN )?(\d\d)?(\d\d))|( (0?[1-9]|1[0-9]|2[0-9]|3[01])[.\/-](0?[1-9]|1[12])[.\/-](\d\d)?(\d\d))|( (\d\d)?(\d\d)[.\/-](0?[1-9]|1[12])[.\/-](0?[1-9]|1[0-9]|2[0-9]|3[01]).)|( \w{3,10} (0?[1-9]|1[0-9]|2[0-9]|3[01]) ?, ?(\d\d)?(\d\d))')
itr=regex.finditer(text)
time_entities=[]
for each in itr:
time_entities.append(time_parser(each.group()))
#time_entities.append(parse(each.group(),fuzzy=True))
return time_entities
def time_parser(text):
date=None
try:
date=datetime.datetime.strptime(text,' %d/%m/%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %d/%m/%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %d-%m-%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %d-%m-%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %d.%m.%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %d.%m.%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %Y-%m-%d')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %y-%m-%d')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %Y/%m/%d')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %y/%m/%d')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %Y.%m.%d')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %y.%m.%d')
return date
except:
pass
#***************************************************************************************
try:
date=datetime.datetime.strptime(text, ' %dth of %B in %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %b in %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %B in %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %B, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %B,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %B, %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth of %B,%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth of %b, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth of %b,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth of %b, %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth of %b,%y')
return date
except:
pass
#******************************************************
try:
date=datetime.datetime.strptime(text, ' %dth,%B, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth,%B,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth,%B, %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text, ' %dth,%B,%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth,%b, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth,%b,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth,%b, %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %dth,%b,%y')
return date
except:
pass
#****************************************************************************
try:
date=datetime.datetime.strptime(text,' %B %d,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %b %d,%Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %B %d,%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %b %d,%y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %B %d, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %b %d, %Y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %B %d, %y')
return date
except:
pass
try:
date=datetime.datetime.strptime(text,' %b %d, %y')
return date
except:
pass
return date
| 23.876652
| 334
| 0.498708
| 655
| 5,420
| 4.11145
| 0.076336
| 0.148533
| 0.21723
| 0.333086
| 0.891199
| 0.888229
| 0.888229
| 0.886743
| 0.882287
| 0.882287
| 0
| 0.017737
| 0.313469
| 5,420
| 227
| 335
| 23.876652
| 0.705993
| 0.049631
| 0
| 0.866029
| 0
| 0.004785
| 0.14763
| 0.042735
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009569
| false
| 0.186603
| 0.014354
| 0
| 0.220096
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
75d9c4ee236d95533714ed94c6e082fd5452230a
| 26,265
|
py
|
Python
|
userbot/modules/ttroll.py
|
theshashankk/javes-3.0
|
9b16914be1350f7f6ac034bd30e33992035301b9
|
[
"MIT"
] | null | null | null |
userbot/modules/ttroll.py
|
theshashankk/javes-3.0
|
9b16914be1350f7f6ac034bd30e33992035301b9
|
[
"MIT"
] | null | null | null |
userbot/modules/ttroll.py
|
theshashankk/javes-3.0
|
9b16914be1350f7f6ac034bd30e33992035301b9
|
[
"MIT"
] | null | null | null |
import os
import re
import time
import urllib.request
import zipfile
from random import choice
#MADE BY SHIVAM
import PIL.ImageOps
import requests
from PIL import Image, ImageDraw, ImageFont
from telethon.tl.types import Channel, PollAnswer
from validators.url import url
#MADE BY SHIVAM
import re
import requests
import os
#MADE BY SHIVAM
import pybase64
from telegraph import exceptions, upload_file
from telethon.tl.functions.messages import ImportChatInviteRequest as Get
from userbot import bot
#MADE BY SHIVAM
#MADE BY SHIVAM
from userbot.utils import admin_cmd
from userbot.helpers import *
from asyncio import sleep
from random import choice, getrandbits, randint
import random
import time
from telethon import events
from userbot import bot
from collections import deque
import sys
import html
import json
from PIL import ImageEnhance, ImageOps
from userbot import CMD_HELP
from userbot.events import register
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
async def purge():
try:
os.remove("temp.webp")
os.remove("temp.webp")
except OSError:
pass
async def clyde(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=clyde&text={text}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def ship(link1,link2):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=ship&user1={link1}&user2={link2}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
'''async def captcha(url,username):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=captcha&url={url}&username={username}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"'''
async def whowouldwin(link1,link2):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=whowouldwin&user1={link1}&user2={link2}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
#######
async def ddlc(character,background,body,face,text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=ddlc&character={character}&background={background}&body={body}&face={face}&text={text}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
##22
async def jpeg(link):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=jpeg&url={link}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
'''async def kms(link):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=kms&url={link}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def kidnap(image):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=kidnap&image={image}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"'''
async def deepfry(image):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=deepfry&image={image}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def blurpify(image):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=blurpify&image={image}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def magik(image,intensity):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=magik&image={image}&intensity={intensity}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
'''async def clickforhentai(image,fontsize):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=clickforhentai&image={image}&fontsize={fontsize}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"'''
'''async def stickbug(image):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=stickbug&url={image}"
).json()
season4= r.get("message")
miraculous = url(season4)
if not miraculous:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(season4).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"'''
@register(outgoing=True, pattern=r"^!clyde(?: |$)(.*)")####################
async def cld(event):
text = event.pattern_match.group(1)
text = re.sub("&", "", text)
reply_to_id = event.message
if event.reply_to_msg_id:
reply_to_id = await event.get_reply_message()
if not text:
if event.is_reply and not reply_to_id.media:
text = reply_to_id.message
else:
await event.edit("`Give text for to write on `")
return
await event.edit("`Your chat is under creation wait a sec...`")
img = await clyde(text)
await event.client.send_file(event.chat_id, img, reply_to=reply_to_id)
await event.delete()
await purge()
@bot.on(admin_cmd(pattern="ship(?: |$)(.*)"))#######################
async def shp(event):
input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await ship(mlc,input_str)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
'''@bot.on(admin_cmd(pattern="captcha(?: |$)(.*)"))
async def captch(event):
input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await captcha(mlc,input_str)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)'''
@bot.on(admin_cmd(pattern="win(?: |$)(.*)"))##############################
async def whowould(event):
input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await whowouldwin(mlc,input_str)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
@bot.on(admin_cmd(pattern="jpeg"))##############################
async def jpg(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await jpeg(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
'''@bot.on(admin_cmd(pattern="kms"))
async def kms_kms(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await kms(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
@bot.on(admin_cmd(pattern="kidnap"))
async def kidnaps(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await kidnap(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)'''
@bot.on(admin_cmd(pattern="deep"))
async def fry(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await deepfry(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
@bot.on(admin_cmd(pattern="brpify"))
async def blurpifry(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await blurpify(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
@bot.on(admin_cmd(pattern="magik(?: |$)(.*)"))####################
async def magic(event):
input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await magik(mlc,int(input_str))
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)
'''@bot.on(admin_cmd(pattern="clickht(?: |$)(.*)"))
async def clickfor(event):
input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await clickforhentai(mlc,int(input_str))
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)'''
'''@bot.on(admin_cmd(pattern="bug(?: |$)(.*)"))
async def stick(event):
#input_str = event.pattern_match.group(1)
replied = await event.get_reply_message()
if not os.path.isdir("./temp/"):
os.makedirs("./temp/")
if not replied:
await event.edit("reply to a supported media file")
return
if replied.media:
mlcs4 = await event.edit("passing to telegraph...")
else:
await event.edit("reply to a supported media file")
return
download_lomlcion = await event.client.download_media(replied, "./temp/")
if download_lomlcion.endswith((".webp")):
download_lomlcion = convert_toimage(download_lomlcion)
size = os.stat(download_lomlcion).st_size
if download_lomlcion.endswith((".jpg", ".jpeg", ".png", ".bmp", ".ico")):
if size > 5242880:
await mlcs4.edit(
"the replied file size is not supported it must me below 5 mb"
)
os.remove(download_lomlcion)
return
await mlcs4.edit("generating image..")
else:
await mlcs4.edit("the replied file is not supported")
os.remove(download_lomlcion)
return
try:
response = upload_file(download_lomlcion)
os.remove(download_lomlcion)
except exceptions.TelegraphException as exc:
await mlcs4.edit("ERROR: " + str(exc))
os.remove(download_lomlcion)
return
mlc = f"https://telegra.ph{response[0]}"
mlc = await stickbug(mlc)
await mlcs4.delete()
await event.client.send_file(event.chat_id, mlc, reply_to=replied)'''
| 37.41453
| 136
| 0.615115
| 3,275
| 26,265
| 4.847023
| 0.061985
| 0.12196
| 0.038806
| 0.066524
| 0.886796
| 0.882323
| 0.882323
| 0.880433
| 0.878229
| 0.876087
| 0
| 0.011808
| 0.25197
| 26,265
| 701
| 137
| 37.467903
| 0.796152
| 0.00731
| 0
| 0.79803
| 0
| 0.009852
| 0.202698
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.017241
| 0.081281
| 0
| 0.197044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f94fb0c1aae9ad7ed299430b0c4008996c909c8b
| 13,459
|
py
|
Python
|
tests/test_subscription_manager.py
|
eurocontrol-swim/subscription-manager-client
|
a6fdc57fa78c956e69f57d434bfd08370ba16063
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_subscription_manager.py
|
eurocontrol-swim/subscription-manager-client
|
a6fdc57fa78c956e69f57d434bfd08370ba16063
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_subscription_manager.py
|
eurocontrol-swim/subscription-manager-client
|
a6fdc57fa78c956e69f57d434bfd08370ba16063
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright 2019 EUROCONTROL
==========================================
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
==========================================
Editorial note: this license is an instance of the BSD license template as provided by the Open Source Initiative:
http://opensource.org/licenses/BSD-3-Clause
Details on EUROCONTROL: http://www.eurocontrol.int
"""
from unittest.mock import Mock
import pytest
from rest_client.errors import APIError
from subscription_manager_client.subscription_manager import SubscriptionManagerClient
from tests.utils import make_topic_list, make_topic, make_subscription_list, make_subscription
__author__ = "EUROCONTROL (SWIM)"
BASE_URL = 'subscription-manager/api/1.0/'
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_get_topics__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.get_topics()
def test_get_topics__list_of_topics_is_returned():
topic_dict_list, expected_topic_list = make_topic_list()
response = Mock()
response.status_code = 200
response.content = topic_dict_list
response.json = Mock(return_value=topic_dict_list)
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
topic_list = client.get_topics()
assert expected_topic_list == topic_list
called_url = request_handler.get.call_args[0][0]
assert BASE_URL + 'topics/' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_get_topics_own__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.get_topics_own()
def test_get_topics_own__list_of_topics_is_returned():
topic_dict_list, expected_topic_list = make_topic_list()
response = Mock()
response.status_code = 200
response.content = topic_dict_list
response.json = Mock(return_value=topic_dict_list)
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
topic_list = client.get_topics_own()
assert expected_topic_list == topic_list
called_url = request_handler.get.call_args[0][0]
assert BASE_URL + 'topics/own' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_get_topic_by_id__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.get_topic_by_id(1)
def test_get_topic_by_id__list_of_topics_is_returned():
topic_dict, expected_topic = make_topic()
response = Mock()
response.status_code = 200
response.content = topic_dict
response.json = Mock(return_value=topic_dict)
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
topic = client.get_topic_by_id(1)
assert expected_topic == topic
called_url = request_handler.get.call_args[0][0]
assert BASE_URL + 'topics/1' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_post_topic__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.post = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.post_topic(Mock())
def test_post_topic__topic_object_is_returned():
topic_dict, expected_topic = make_topic()
response = Mock()
response.status_code = 201
response.content = topic_dict
response.json = Mock(return_value=topic_dict)
request_handler = Mock()
request_handler.post = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
topic = client.post_topic(Mock())
assert expected_topic == topic
called_url = request_handler.post.call_args[0][0]
assert BASE_URL + 'topics/' == called_url
# @pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
# def test_put_topic__http_error_code__raises_api_error(error_code):
# response = Mock()
# response.status_code = error_code
#
# request_handler = Mock()
# request_handler.put = Mock(return_value=response)
#
# client = SubscriptionManagerClient(request_handler=request_handler)
#
# with pytest.raises(APIError):
# client.put_topic(1, Mock())
#
#
# def test_put_topic__topic_object_is_returned():
# topic_dict, expected_topic = make_topic()
#
# response = Mock()
# response.status_code = 200
# response.content = topic_dict
# response.json = Mock(return_value=topic_dict)
#
# request_handler = Mock()
# request_handler.put = Mock(return_value=response)
#
# client = SubscriptionManagerClient(request_handler=request_handler)
#
# topic = client.put_topic(1, Mock())
#
# assert expected_topic == topic
#
# called_url = request_handler.put.call_args[0][0]
# assert BASE_URL + 'topics/1' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_delete_topic_by_id__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.delete = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.delete_topic_by_id(1)
def test_delete_topic_by_id():
response = Mock()
response.status_code = 204
response.content = {}
response.json = Mock(return_value={})
request_handler = Mock()
request_handler.delete = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
topic = client.delete_topic_by_id(1)
called_url = request_handler.delete.call_args[0][0]
assert BASE_URL + 'topics/1' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_get_subscriptions__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.get_subscriptions()
def test_get_subscriptions__list_of_subscriptions_is_returned():
subscription_dict_list, expected_subscription_list = make_subscription_list()
response = Mock()
response.status_code = 200
response.content = subscription_dict_list
response.json = Mock(return_value=subscription_dict_list)
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
subscription_list = client.get_subscriptions()
assert expected_subscription_list == subscription_list
called_url = request_handler.get.call_args[0][0]
assert BASE_URL + 'subscriptions/' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_get_subscription_by_id__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.get_subscription_by_id(1)
def test_get_subscription_by_id__list_of_subscriptions_is_returned():
subscription_dict, expected_subscription = make_subscription()
response = Mock()
response.status_code = 200
response.content = subscription_dict
response.json = Mock(return_value=subscription_dict)
request_handler = Mock()
request_handler.get = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
subscription = client.get_subscription_by_id(1)
assert expected_subscription == subscription
called_url = request_handler.get.call_args[0][0]
assert BASE_URL + 'subscriptions/1' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_post_subscription__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.post = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.post_subscription(Mock())
def test_post_subscription__subscription_object_is_returned():
subscription_dict, expected_subscription = make_subscription()
response = Mock()
response.status_code = 201
response.content = subscription_dict
response.json = Mock(return_value=subscription_dict)
request_handler = Mock()
request_handler.post = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
subscription = client.post_subscription(Mock())
assert expected_subscription == subscription
called_url = request_handler.post.call_args[0][0]
assert BASE_URL + 'subscriptions/' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_put_subscription__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.put = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.put_subscription(1, Mock())
def test_put_subscription__subscription_object_is_returned():
subscription_dict, expected_subscription = make_subscription()
response = Mock()
response.status_code = 200
response.content = subscription_dict
response.json = Mock(return_value=subscription_dict)
request_handler = Mock()
request_handler.put = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
subscription = client.put_subscription(1, {'active': False})
assert expected_subscription == subscription
called_url = request_handler.put.call_args[0][0]
assert BASE_URL + 'subscriptions/1' == called_url
@pytest.mark.parametrize('error_code', [400, 401, 403, 404, 500])
def test_delete_subscription_by_id__http_error_code__raises_api_error(error_code):
response = Mock()
response.status_code = error_code
request_handler = Mock()
request_handler.delete = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
with pytest.raises(APIError):
client.delete_subscription_by_id(1)
def test_delete_subscription_by_id():
response = Mock()
response.status_code = 204
response.content = {}
response.json = Mock(return_value={})
request_handler = Mock()
request_handler.delete = Mock(return_value=response)
client = SubscriptionManagerClient(request_handler=request_handler)
subscription = client.delete_subscription_by_id(1)
called_url = request_handler.delete.call_args[0][0]
assert BASE_URL + 'subscriptions/1' == called_url
| 32.121718
| 120
| 0.755925
| 1,697
| 13,459
| 5.662935
| 0.117266
| 0.144225
| 0.051509
| 0.059521
| 0.816649
| 0.804787
| 0.783559
| 0.771176
| 0.756191
| 0.749948
| 0
| 0.021772
| 0.153652
| 13,459
| 418
| 121
| 32.198565
| 0.821877
| 0.203656
| 0
| 0.762332
| 0
| 0
| 0.024911
| 0.002716
| 0
| 0
| 0
| 0
| 0.080717
| 1
| 0.089686
| false
| 0
| 0.022422
| 0
| 0.112108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9578428c196769f62eabbfc891936929fe86ca6
| 88
|
py
|
Python
|
models/__init__.py
|
nt-hn/adversarial-attack
|
133008840f952c864d90e200d7173a3320681b61
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
nt-hn/adversarial-attack
|
133008840f952c864d90e200d7173a3320681b61
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
nt-hn/adversarial-attack
|
133008840f952c864d90e200d7173a3320681b61
|
[
"MIT"
] | null | null | null |
from models.resnet import *
from models.metrics import *
from models.focal_loss import *
| 29.333333
| 31
| 0.806818
| 13
| 88
| 5.384615
| 0.538462
| 0.428571
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 88
| 3
| 31
| 29.333333
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f9a71a5ab3d48e247f6dab63c0ea9ba060b326da
| 17,649
|
py
|
Python
|
tests/test_utils.py
|
jmeyers314/jtrace
|
9149a5af766fb9a9cd7ebfe6f3f18de0eb8b2e89
|
[
"BSD-2-Clause"
] | 13
|
2018-12-24T03:55:04.000Z
|
2021-11-09T11:40:40.000Z
|
tests/test_utils.py
|
bregeon/batoid
|
7b03d9b59ff43db6746eadab7dd58a463a0415c3
|
[
"BSD-2-Clause"
] | 65
|
2017-08-15T07:19:05.000Z
|
2021-09-08T17:44:57.000Z
|
tests/test_utils.py
|
bregeon/batoid
|
7b03d9b59ff43db6746eadab7dd58a463a0415c3
|
[
"BSD-2-Clause"
] | 10
|
2019-02-19T07:02:31.000Z
|
2021-12-10T22:19:40.000Z
|
import batoid
from test_helpers import timer
import numpy as np
@timer
def test_normalized():
rng = np.random.default_rng(5)
for _ in range(1000):
x = rng.uniform()
y = rng.uniform()
z = rng.uniform()
w = rng.uniform()
np.testing.assert_allclose(
np.linalg.norm(batoid.utils.normalized([x])),
1.0,
rtol=0, atol=1e-10
)
np.testing.assert_allclose(
np.linalg.norm(batoid.utils.normalized([x, y])),
1.0,
rtol=0, atol=1e-10
)
np.testing.assert_allclose(
np.linalg.norm(batoid.utils.normalized([x, y, z])),
1.0,
rtol=0, atol=1e-10
)
np.testing.assert_allclose(
np.linalg.norm(batoid.utils.normalized([x, y, z, w])),
1.0,
rtol=0, atol=1e-10
)
@timer
def test_gnomonicDirCos():
rng = np.random.default_rng(57)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToGnomonic(*batoid.utils.gnomonicToDirCos(u, v))
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='gnomonic'
),
projection='gnomonic'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.1, 0.1, size=10000)
beta = rng.uniform(-0.1, 0.1, size=10000)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.gnomonicToDirCos(
*batoid.utils.dirCosToGnomonic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.gnomonicToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.gnomonicToDirCos(
*batoid.utils.dirCosToGnomonic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToGnomonic(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.gnomonicToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_postelDirCos():
rng = np.random.default_rng(577)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToPostel(*batoid.utils.postelToDirCos(u, v))
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='postel'
),
projection='postel'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.1, 0.1, size=10000)
beta = rng.uniform(-0.1, 0.1, size=10000)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.postelToDirCos(
*batoid.utils.dirCosToPostel(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.postelToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.postelToDirCos(
*batoid.utils.dirCosToPostel(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToPostel(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.postelToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_zemaxDirCos():
rng = np.random.default_rng(5772)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToZemax(*batoid.utils.zemaxToDirCos(u, v))
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='zemax'
),
projection='zemax'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.1, 0.1, size=10000)
beta = rng.uniform(-0.1, 0.1, size=10000)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.zemaxToDirCos(
*batoid.utils.dirCosToZemax(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.zemaxToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.zemaxToDirCos(
*batoid.utils.dirCosToZemax(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToZemax(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.zemaxToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_stereographicDirCos():
rng = np.random.default_rng(57721)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToStereographic(
*batoid.utils.stereographicToDirCos(u, v)
)
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='stereographic'
),
projection='stereographic'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.1, 0.1, size=10000)
beta = rng.uniform(-0.1, 0.1, size=10000)
gamma = np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.stereographicToDirCos(
*batoid.utils.dirCosToStereographic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.stereographicToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.stereographicToDirCos(
*batoid.utils.dirCosToStereographic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToStereographic(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.stereographicToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_orthographicDirCos():
rng = np.random.default_rng(577215)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToOrthographic(
*batoid.utils.orthographicToDirCos(u, v)
)
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='orthographic'
),
projection='orthographic'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.1, 0.1, size=10000)
beta = rng.uniform(-0.1, 0.1, size=10000)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.orthographicToDirCos(
*batoid.utils.dirCosToOrthographic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.orthographicToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.orthographicToDirCos(
*batoid.utils.dirCosToOrthographic(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToOrthographic(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.orthographicToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_lambertDirCos():
rng = np.random.default_rng(5772156)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
# Insert a (0,0) explicitly
u[5000] = v[5000] = 0
# Test round trip
u1, v1 = batoid.utils.dirCosToLambert(*batoid.utils.lambertToDirCos(u, v))
np.testing.assert_allclose(u, u1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(v, v1, rtol=1e-10, atol=1e-12)
u2, v2 = batoid.utils.dirCosToField(
*batoid.utils.fieldToDirCos(
u, v, projection='lambert'
),
projection='lambert'
)
np.testing.assert_array_equal(u1, u2)
np.testing.assert_array_equal(v1, v2)
# Test round trip in the other direction
alpha = rng.uniform(-0.5, 0.5, size=10000)
beta = rng.uniform(-0.5, 0.5, size=10000)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
# Insert a (0,0) explicitly
alpha[5000] = 0
beta[5000] = 0
gamma[5000] = -1
alpha1, beta1, gamma1 = batoid.utils.lambertToDirCos(
*batoid.utils.dirCosToLambert(alpha, beta, gamma)
)
# Not sure why Lambert isn't as good as other projections in this test.
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# For really tiny angles, u/v should be basically the same as alpha/beta
u = rng.uniform(-1e-6, 1e-6, size=10000)
v = rng.uniform(-1e-6, 1e-6, size=10000)
alpha, beta, gamma = batoid.utils.lambertToDirCos(u, v)
np.testing.assert_allclose(alpha, u, rtol=0, atol=1e-8)
np.testing.assert_allclose(beta, v, rtol=0, atol=1e-8)
# Check normalization of direction cosines
np.testing.assert_allclose(
np.sqrt(alpha*alpha + beta*beta + gamma*gamma),
1,
rtol=0, atol=1e-15
)
# Check scalar
alpha = rng.uniform(-0.1, 0.1)
beta = rng.uniform(-0.1, 0.1)
gamma = -np.sqrt(1 - alpha**2 - beta**2)
alpha1, beta1, gamma1 = batoid.utils.lambertToDirCos(
*batoid.utils.dirCosToLambert(alpha, beta, gamma)
)
np.testing.assert_allclose(alpha, alpha1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(beta, beta1, rtol=1e-10, atol=1e-12)
np.testing.assert_allclose(gamma, gamma1, rtol=1e-10, atol=1e-12)
# Check scalar (0,0)
u, v = batoid.utils.dirCosToLambert(0, 0, -1)
np.testing.assert_allclose([u, v], 0, rtol=0, atol=1e-12)
a, b, c = batoid.utils.lambertToDirCos(0, 0)
np.testing.assert_allclose([a, b, c], [0, 0, -1], rtol=1e-10, atol=1e-12)
@timer
def test_coord():
rng = np.random.default_rng(57721566)
import coord
pole = coord.CelestialCoord(0.*coord.degrees, 90.*coord.degrees)
u = rng.uniform(-0.5, 0.5, size=10000)
v = rng.uniform(-0.5, 0.5, size=10000)
for projection in ['gnomonic', 'stereographic', 'postel', 'lambert']:
ra, dec = pole.deproject_rad(u, v, projection=projection)
xcos, ycos, zcos = batoid.utils.fieldToDirCos(
u, v, projection=projection
)
np.testing.assert_allclose(-np.sin(dec), zcos, rtol=0, atol=1e-13)
np.testing.assert_allclose(
np.abs((np.pi/2-ra)-np.arctan2(ycos, xcos)),
np.pi,
rtol=0, atol=1e-13
)
# Check invalid input
with np.testing.assert_raises(ValueError):
batoid.utils.fieldToDirCos(
u, v, projection="banana"
)
with np.testing.assert_raises(ValueError):
batoid.utils.dirCosToField(
u, v, v, projection="banana"
)
if __name__ == '__main__':
test_normalized()
test_gnomonicDirCos()
test_postelDirCos()
test_zemaxDirCos()
test_stereographicDirCos()
test_orthographicDirCos()
test_lambertDirCos()
test_coord()
| 35.94501
| 80
| 0.634597
| 2,695
| 17,649
| 4.102041
| 0.055288
| 0.079783
| 0.132972
| 0.174763
| 0.89118
| 0.869109
| 0.863229
| 0.861872
| 0.851199
| 0.827318
| 0
| 0.092549
| 0.214516
| 17,649
| 490
| 81
| 36.018367
| 0.704898
| 0.090373
| 0
| 0.670157
| 0
| 0
| 0.009748
| 0
| 0
| 0
| 0
| 0
| 0.256545
| 1
| 0.020942
| false
| 0
| 0.010471
| 0
| 0.031414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9dfc1372965c11bb87ab121cda93670db1cfc54
| 116
|
py
|
Python
|
simuvex/simuvex/engines/vex/expressions/const.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 86
|
2015-08-06T23:25:07.000Z
|
2022-02-17T14:58:22.000Z
|
simuvex/simuvex/engines/vex/expressions/const.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 132
|
2015-09-10T19:06:59.000Z
|
2018-10-04T20:36:45.000Z
|
simuvex/simuvex/engines/vex/expressions/const.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 80
|
2015-08-07T10:30:20.000Z
|
2020-03-21T14:45:28.000Z
|
print '... Importing simuvex/engines/vex/expressions/const.py ...'
from angr.engines.vex.expressions.const import *
| 38.666667
| 66
| 0.767241
| 15
| 116
| 5.933333
| 0.733333
| 0.224719
| 0.47191
| 0.58427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077586
| 116
| 2
| 67
| 58
| 0.831776
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
dda78f75bab81ff1696f48b0235f1c0e24642b69
| 15,532
|
py
|
Python
|
feature_extractor.py
|
pykao/BraTS2018-survival
|
8e6e1850cc462fc8bc63ebe12010257f618168f9
|
[
"MIT"
] | 7
|
2020-06-27T14:31:11.000Z
|
2021-10-02T16:30:21.000Z
|
feature_extractor.py
|
pykao/BraTS2018-survival
|
8e6e1850cc462fc8bc63ebe12010257f618168f9
|
[
"MIT"
] | null | null | null |
feature_extractor.py
|
pykao/BraTS2018-survival
|
8e6e1850cc462fc8bc63ebe12010257f618168f9
|
[
"MIT"
] | 2
|
2018-09-10T02:03:45.000Z
|
2021-03-06T01:54:15.000Z
|
import numpy as np
import os
import SimpleITK as sitk
from scipy.io import loadmat
from skimage.measure import regionprops
import paths
import utils
def ReadImage(path):
''' This code returns the numpy nd array for a MR image at path'''
return sitk.GetArrayFromImage(sitk.ReadImage(path)).astype(np.float32)
def binarize_connectivity_matrix(connectivity_matrix, threshold=0.01):
''' binarize the matrix '''
binary_connectivity_matrix = np.zeros(connectivity_matrix.shape, dtype=np.float)
#print threshold*np.amax(connectivity_matrix)
binary_connectivity_matrix[connectivity_matrix >= threshold*np.amax(connectivity_matrix)] = 1
return binary_connectivity_matrix
def normalize_conncetivity_matrix(connectivity_matrix):
''' normalize the connectivity matrix'''
normalized_connectivity_matrix = np.copy(connectivity_matrix)
return normalized_connectivity_matrix/np.amax(connectivity_matrix)
def threshold_connectivity_matrix(connectivity_matrix, threshold=0.01):
''' threshold the connectiivty matrix in order to remove the noise'''
thresholded_connectivity_matrix= np.copy(connectivity_matrix)
thresholded_connectivity_matrix[connectivity_matrix <= threshold*np.amax(connectivity_matrix)] = 0
return thresholded_connectivity_matrix
def weight_conversion(W):
''' convert to the normalized version and binary version'''
W_bin = np.copy(W)
W_bin[W!=0]=1
W_nrm = np.copy(W)
W_nrm = W_nrm/np.amax(np.absolute(W))
return W_nrm, W_bin
def get_pat_name(pat_dir):
''' get the patient's name'''
temp = os.path.split(pat_dir)[1]
return temp[:temp.find('_whole_tumor')]
def get_lesion_weights(whole_tumor_mni_path):
''' get the weight vector'''
#print(whole_tumor_mni_path)
aal_path = os.path.join(paths.dsi_studio_path, 'atlas', 'aal.nii.gz')
aal_nda = utils.ReadImage(aal_path)
aal_182_218_182 = utils.reshape_by_padding_upper_coords(aal_nda, (182,218,182), 0)
whole_tumor_mni_nda = utils.ReadImage(whole_tumor_mni_path)
weights = np.zeros(int(np.amax(aal_182_218_182)), dtype=float)
for bp_number in range(int(np.amax(aal_182_218_182))):
mask = np.zeros(aal_182_218_182.shape, aal_182_218_182.dtype)
mask[aal_182_218_182==(bp_number+1)]=1
bp_size = float(np.count_nonzero(mask))
whole_tumor_in_bp = np.multiply(mask, whole_tumor_mni_nda)
whole_tumor_in_bp_size = float(np.count_nonzero(whole_tumor_in_bp))
weights[bp_number] = whole_tumor_in_bp_size/bp_size
return weights
def get_weighted_connectivity_feature_vectors_test(dsi_studio_path=paths.dsi_studio_path, region='seed'):
connectivity_testing_dir = os.path.join(dsi_studio_path, 'connectivity', region, 'testing')
whole_tumor_mni_testing_dir = os.path.join(dsi_studio_path, 'predicted_whole_tumor', 'testing')
connectivity_pass_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_testing_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'pass' in name and name.endswith('.mat')]
connectivity_pass_files.sort()
connectivity_end_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_testing_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'end' in name and name.endswith('.mat')]
connectivity_end_files.sort()
whole_tumor_mni_paths = [os.path.join(root, name) for root, dirs, files in os.walk(whole_tumor_mni_testing_dir) for name in files if 'whole_tumor' in name and 'MNI152_1mm' in name and name.endswith('nii.gz')]
whole_tumor_mni_paths.sort()
assert(len(connectivity_pass_files) == len(connectivity_end_files) == len(whole_tumor_mni_paths)==77)
W_dsi_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_dsi_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
pat_names=[]
for idx, (connectivity_pass_file, connectivity_end_file, whole_tumor_mni_path) in enumerate(zip(connectivity_pass_files, connectivity_end_files, whole_tumor_mni_paths)):
assert(get_pat_name(connectivity_pass_file)==get_pat_name(connectivity_end_file))
assert(get_pat_name(connectivity_pass_file) in whole_tumor_mni_path)
pat_name = get_pat_name(connectivity_pass_file)
pat_names.append(pat_name)
#lesion weights
lesion_weights = get_lesion_weights(whole_tumor_mni_path)
connectivity_matrix_pass_obj = loadmat(connectivity_pass_file)
weighted_connectivity_matrix_pass_temp = connectivity_matrix_pass_obj['connectivity']
weighted_connectivity_matrix_pass = threshold_connectivity_matrix(weighted_connectivity_matrix_pass_temp, 0)
W_nrm_pass, W_bin_pass = weight_conversion(weighted_connectivity_matrix_pass)
connectivity_matrix_end_obj = loadmat(connectivity_end_file)
weighted_connectivity_matrix_end_temp = connectivity_matrix_end_obj['connectivity']
weighted_connectivity_matrix_end = threshold_connectivity_matrix(weighted_connectivity_matrix_end_temp, 0)
W_nrm_end, W_bin_end = weight_conversion(weighted_connectivity_matrix_end)
# weighted connectivity histogram
W_dsi_pass_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_pass, axis=0), lesion_weights)
W_nrm_pass_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_pass, axis=0), lesion_weights)
W_bin_pass_histogram_features[idx, :] = np.multiply(np.sum(W_bin_pass, axis=0), lesion_weights)
W_dsi_end_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_end, axis=0), lesion_weights)
W_nrm_end_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_end, axis=0), lesion_weights)
W_bin_end_histogram_features[idx, :] = np.multiply(np.sum(W_bin_end, axis=0), lesion_weights)
return pat_names , W_dsi_pass_histogram_features, W_nrm_pass_histogram_features, W_bin_pass_histogram_features, W_dsi_end_histogram_features, W_nrm_end_histogram_features, W_bin_end_histogram_features
def get_weighted_connectivity_feature_vectors_valid(dsi_studio_path=paths.dsi_studio_path, region='seed'):
connectivity_valid_dir = os.path.join(dsi_studio_path, 'connectivity', region, 'validation')
whole_tumor_mni_valid_dir = os.path.join(dsi_studio_path, 'predicted_whole_tumor', 'validation')
connectivity_pass_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_valid_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'pass' in name and name.endswith('.mat')]
connectivity_pass_files.sort()
connectivity_end_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_valid_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'end' in name and name.endswith('.mat')]
connectivity_end_files.sort()
whole_tumor_mni_paths = [os.path.join(root, name) for root, dirs, files in os.walk(whole_tumor_mni_valid_dir) for name in files if 'whole_tumor' in name and 'MNI152_1mm' in name and name.endswith('nii.gz')]
whole_tumor_mni_paths.sort()
assert(len(connectivity_pass_files) == len(connectivity_end_files) == len(whole_tumor_mni_paths)==28)
W_dsi_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_dsi_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
pat_names=[]
for idx, (connectivity_pass_file, connectivity_end_file, whole_tumor_mni_path) in enumerate(zip(connectivity_pass_files, connectivity_end_files, whole_tumor_mni_paths)):
assert(get_pat_name(connectivity_pass_file)==get_pat_name(connectivity_end_file))
assert(get_pat_name(connectivity_pass_file) in whole_tumor_mni_path)
pat_name = get_pat_name(connectivity_pass_file)
pat_names.append(pat_name)
#lesion weights
lesion_weights = get_lesion_weights(whole_tumor_mni_path)
connectivity_matrix_pass_obj = loadmat(connectivity_pass_file)
weighted_connectivity_matrix_pass_temp = connectivity_matrix_pass_obj['connectivity']
weighted_connectivity_matrix_pass = threshold_connectivity_matrix(weighted_connectivity_matrix_pass_temp, 0)
W_nrm_pass, W_bin_pass = weight_conversion(weighted_connectivity_matrix_pass)
connectivity_matrix_end_obj = loadmat(connectivity_end_file)
weighted_connectivity_matrix_end_temp = connectivity_matrix_end_obj['connectivity']
weighted_connectivity_matrix_end = threshold_connectivity_matrix(weighted_connectivity_matrix_end_temp, 0)
W_nrm_end, W_bin_end = weight_conversion(weighted_connectivity_matrix_end)
# weighted connectivity histogram
W_dsi_pass_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_pass, axis=0), lesion_weights)
W_nrm_pass_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_pass, axis=0), lesion_weights)
W_bin_pass_histogram_features[idx, :] = np.multiply(np.sum(W_bin_pass, axis=0), lesion_weights)
W_dsi_end_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_end, axis=0), lesion_weights)
W_nrm_end_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_end, axis=0), lesion_weights)
W_bin_end_histogram_features[idx, :] = np.multiply(np.sum(W_bin_end, axis=0), lesion_weights)
return pat_names , W_dsi_pass_histogram_features, W_nrm_pass_histogram_features, W_bin_pass_histogram_features, W_dsi_end_histogram_features, W_nrm_end_histogram_features, W_bin_end_histogram_features
def get_weighted_connectivity_feature_vectors_train(dsi_studio_path=paths.dsi_studio_path, mode='gt', region='seed'):
''' Loading the survival dataset '''
survival_dataset = utils.load_survival_training_dataset()
if mode == 'gt':
connectivity_train_dir = os.path.join(dsi_studio_path, 'connectivity', region, 'gt')
whole_tumor_mni_train_dir = os.path.join(dsi_studio_path, 'gt_whole_tumor')
if mode == 'predicted':
connectivity_train_dir = os.path.join(dsi_studio_path, 'connectivity', region, 'training')
whole_tumor_mni_train_dir = os.path.join(dsi_studio_path, 'predicted_whole_tumor', 'training')
connectivity_pass_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_train_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'pass' in name and name.endswith('.mat')]
connectivity_pass_files.sort()
connectivity_end_files = [os.path.join(root, name) for root, dirs, files in os.walk(connectivity_train_dir) for name in files if 'count' in name and 'ncount' not in name and 'connectivity' in name and 'end' in name and name.endswith('.mat')]
connectivity_end_files.sort()
whole_tumor_mni_paths = [os.path.join(root, name) for root, dirs, files in os.walk(whole_tumor_mni_train_dir) for name in files if 'whole_tumor' in name and 'MNI152_1mm' in name and name.endswith('nii.gz')]
whole_tumor_mni_paths.sort()
assert(len(connectivity_pass_files) == len(connectivity_end_files) == len(whole_tumor_mni_paths)==59)
pat_names = []
gt = np.zeros((len(connectivity_pass_files),2), dtype = np.float32)
W_dsi_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_pass_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_dsi_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_nrm_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
W_bin_end_histogram_features = np.zeros((len(connectivity_pass_files), 116), dtype=np.float32)
for idx, (connectivity_pass_file, connectivity_end_file, whole_tumor_mni_path) in enumerate(zip(connectivity_pass_files, connectivity_end_files, whole_tumor_mni_paths)):
assert(get_pat_name(connectivity_pass_file)==get_pat_name(connectivity_end_file))
assert(get_pat_name(connectivity_pass_file) in whole_tumor_mni_path)
pat_name = get_pat_name(connectivity_pass_file)
pat_names.append(pat_name)
# short
if int(survival_dataset[pat_name]['survival']) < 305:
gt[idx, 0] = 0
gt[idx, 1] = int(survival_dataset[pat_name]['survival'])
#short_period += 1
# long should be 454 or 456.25
elif int(survival_dataset[pat_name]['survival']) > 456:
gt[idx, 0] = 2
gt[idx, 1] = int(survival_dataset[pat_name]['survival'])
#long_period += 1
# mid
else:
gt[idx, 0] = 1
gt[idx, 1] = int(survival_dataset[pat_name]['survival'])
lesion_weights = get_lesion_weights(whole_tumor_mni_path)
connectivity_matrix_pass_obj = loadmat(connectivity_pass_file)
weighted_connectivity_matrix_pass_temp = connectivity_matrix_pass_obj['connectivity']
weighted_connectivity_matrix_pass = threshold_connectivity_matrix(weighted_connectivity_matrix_pass_temp, 0)
W_nrm_pass, W_bin_pass = weight_conversion(weighted_connectivity_matrix_pass)
connectivity_matrix_end_obj = loadmat(connectivity_end_file)
weighted_connectivity_matrix_end_temp = connectivity_matrix_end_obj['connectivity']
weighted_connectivity_matrix_end = threshold_connectivity_matrix(weighted_connectivity_matrix_end_temp, 0)
W_nrm_end, W_bin_end = weight_conversion(weighted_connectivity_matrix_end)
# weighted connectivity histogram
W_dsi_pass_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_pass, axis=0), lesion_weights)
W_nrm_pass_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_pass, axis=0), lesion_weights)
W_bin_pass_histogram_features[idx, :] = np.multiply(np.sum(W_bin_pass, axis=0), lesion_weights)
W_dsi_end_histogram_features[idx, :] = np.multiply(np.sum(weighted_connectivity_matrix_end, axis=0), lesion_weights)
W_nrm_end_histogram_features[idx, :] = np.multiply(np.sum(W_nrm_end, axis=0), lesion_weights)
W_bin_end_histogram_features[idx, :] = np.multiply(np.sum(W_bin_end, axis=0), lesion_weights)
return pat_names, gt, W_dsi_pass_histogram_features, W_nrm_pass_histogram_features, W_bin_pass_histogram_features, W_dsi_end_histogram_features, W_nrm_end_histogram_features, W_bin_end_histogram_features
| 63.395918
| 250
| 0.767641
| 2,268
| 15,532
| 4.852293
| 0.074956
| 0.116129
| 0.038982
| 0.047978
| 0.856247
| 0.844889
| 0.820264
| 0.800363
| 0.798183
| 0.767015
| 0
| 0.017833
| 0.137136
| 15,532
| 245
| 251
| 63.395918
| 0.803313
| 0.03702
| 0
| 0.552326
| 0
| 0
| 0.040609
| 0.004229
| 0
| 0
| 0
| 0
| 0.052326
| 1
| 0.05814
| false
| 0.372093
| 0.040698
| 0
| 0.156977
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
34a267652c8a9561eda5bcc763f0887dbeff45dc
| 46
|
py
|
Python
|
Silo/Console/Commands/__init__.py
|
gohako/framework
|
bc7635cf556e03679b9b68537bb637edd5038a22
|
[
"MIT"
] | 1
|
2019-05-26T23:13:24.000Z
|
2019-05-26T23:13:24.000Z
|
Silo/Console/Commands/__init__.py
|
gohako/framework
|
bc7635cf556e03679b9b68537bb637edd5038a22
|
[
"MIT"
] | null | null | null |
Silo/Console/Commands/__init__.py
|
gohako/framework
|
bc7635cf556e03679b9b68537bb637edd5038a22
|
[
"MIT"
] | 1
|
2019-05-26T23:13:39.000Z
|
2019-05-26T23:13:39.000Z
|
from .RouteListCommand import RouteListCommand
| 46
| 46
| 0.913043
| 4
| 46
| 10.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 1
| 46
| 46
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9b625ca0ed87b6a1e07514e6c1e3afb9637c0ad4
| 19,692
|
py
|
Python
|
tests/test_cached_query.py
|
Watch-Later/olo
|
c8bbdfffb64a9766df51a462a119a879c9030a4a
|
[
"Apache-2.0"
] | 81
|
2018-03-08T11:07:33.000Z
|
2022-01-18T10:19:46.000Z
|
tests/test_cached_query.py
|
Watch-Later/olo
|
c8bbdfffb64a9766df51a462a119a879c9030a4a
|
[
"Apache-2.0"
] | 40
|
2018-07-04T09:22:34.000Z
|
2021-09-03T09:30:02.000Z
|
tests/test_cached_query.py
|
Watch-Later/olo
|
c8bbdfffb64a9766df51a462a119a879c9030a4a
|
[
"Apache-2.0"
] | 13
|
2018-03-19T10:34:24.000Z
|
2022-01-18T10:19:49.000Z
|
# coding: utf-8
from olo import funcs
from olo.funcs import COUNT, SUM, AVG, MAX, DISTINCT
from .base import TestCase, Foo, Bar, Dummy
from .fixture import is_pg
from .utils import (
patched_execute, no_pk
)
attrs = dict(
name='foo',
tags=['a', 'b', 'c'],
password='password',
payload={
'abc': ['1', 2, 3],
'def': [4, '5', 6]
}
)
class TestCachedQuery(TestCase):
def test_fallback(self):
bar = Bar.create(name='a', xixi='a', age=1)
with patched_execute as execute:
bar = Bar.cq.filter(age=MAX(Bar.cq('age'))).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq.filter(age=MAX(Bar.cq('age'))).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq.filter(Bar.age > 0).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq.filter(Bar.age > 0).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq('age').filter(Bar.age > 0).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq('age').filter(Bar.age > 0).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
def test_first(self):
with patched_execute as execute:
bar = Bar.cq.filter(xixi='a', age=1).first()
self.assertIsNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq.filter(xixi='a', age=1).first()
self.assertIsNone(bar)
self.assertFalse(execute.called)
bar = Bar.create(name='a', xixi='a', age=1)
with patched_execute as execute:
bar = Bar.cq.filter(xixi='a', age=1).first()
self.assertIsNotNone(bar)
self.assertTrue(execute.called)
with patched_execute as execute:
bar = Bar.cq.filter(xixi='a', age=1).first()
self.assertIsNotNone(bar)
self.assertFalse(execute.called)
def test_all(self):
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).all()
self.assertEqual(bars, [])
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).all()
self.assertEqual(bars, [])
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(10).all()
self.assertEqual(bars, [])
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(bars, [])
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.limit(10).all()
self.assertEqual(bars, [])
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.limit(11).all()
self.assertEqual(bars, [])
self.assertFalse(execute.called)
bar = Bar.create(name='a', xixi='a', age=1)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
self.assertEqual(execute.call_count, 2)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.limit(10).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
bar.update(name='a+')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
self.assertEqual(execute.call_count, 2)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
bar.update(name='a')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
self.assertEqual(execute.call_count, 2)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
bar.update(word='1')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
self.assertEqual(execute.call_count, 1)
self.assertEqual(bars[0].word, bar.word)
bar.update(word='2')
Bar.cache.get(bar.name)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
self.assertEqual(bars[0].word, bar.word)
bar.update(xixi='b')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 0)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='a', age=1).limit(11).all()
self.assertEqual(len(bars), 0)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).limit(11).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
bar.update(word='a')
bar = Bar.create(name='b', xixi='b', age=1, word='b')
bar = Bar.create(name='c', xixi='b', age=1, word='c')
bar = Bar.create(name='d', xixi='b', age=1, word='d')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).limit(11).all()
self.assertEqual(len(bars), 4)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).limit(11).all()
self.assertEqual(len(bars), 4)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).limit(2).all()
self.assertEqual(len(bars), 2)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cache.gets_by(xixi='b', age=1, start=3,
limit=2)
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'-name'
).limit(3).all()
self.assertEqual(len(bars), 3)
self.assertEqual(['d', 'c', 'b'], list(map(lambda x: x.name, bars)))
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'-name'
).limit(3).all()
self.assertEqual(len(bars), 3)
self.assertEqual(['d', 'c', 'b'], list(map(lambda x: x.name, bars)))
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'name'
).limit(3).all()
self.assertEqual(len(bars), 3)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'name'
).limit(3).all()
self.assertEqual(len(bars), 3)
self.assertEqual(['a', 'b', 'c'], list(map(lambda x: x.name, bars)))
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'-age', 'word'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'-age', 'word'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
_bar = bars[0]
_bar.update(xixi='c')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'-age', 'word'
).offset(2).limit(2).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
_bar.update(xixi='b')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'word', 'age'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 1)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'word', 'age'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
Bar.create(name='e', xixi='b', age=1, word='e')
Bar.create(name='f', xixi='b', age=1, word='f')
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'word', 'age'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 2)
self.assertTrue(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(xixi='b', age=1).order_by(
'word', 'age'
).offset(3).limit(2).all()
self.assertEqual(len(bars), 2)
self.assertFalse(execute.called)
with patched_execute as execute:
bars = Bar.cq.filter(name='e').all()
self.assertEqual(len(bars), 1)
self.assertFalse(execute.called)
Foo.create(name='1', age=1)
Foo.create(name='2', age=1)
Foo.create(name='3', age=2)
with no_pk(Foo):
Foo.cq.filter(age=1).limit(3).all()
foos = Foo.cq.filter(age=3).limit(3).all()
self.assertEqual(foos, [])
def test_count_by(self):
with patched_execute as execute:
c = Bar.cq.filter(xixi='a', age=1).count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='a', age=1).count()
self.assertEqual(c, 0)
self.assertFalse(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 0)
self.assertFalse(execute.called)
with patched_execute as execute:
c = Bar.cq.filter().count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter().count()
self.assertEqual(c, 0)
self.assertFalse(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(name='a').count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(name='a').count()
self.assertEqual(c, 0)
self.assertFalse(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(word='a').count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(word='a').count()
self.assertEqual(c, 0)
self.assertTrue(execute.called)
Bar.create(name='a', xixi='b', age=1)
with patched_execute as execute:
c = Bar.cq.filter(xixi='a', age=1).count()
self.assertEqual(c, 0)
self.assertFalse(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 1)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter().count()
self.assertEqual(c, 1)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(name='a').count()
self.assertEqual(c, 1)
self.assertTrue(execute.called)
Bar.create(name='b', xixi='a', age=1)
with patched_execute as execute:
c = Bar.cq.filter(xixi='a', age=1).count()
self.assertEqual(c, 1)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 1)
self.assertFalse(execute.called)
bar = Bar.create(name='c', xixi='b', age=1)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 2)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 2)
self.assertFalse(execute.called)
bar.update(xixi='c')
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 1)
self.assertTrue(execute.called)
with patched_execute as execute:
c = Bar.cq.filter(xixi='b', age=1).count()
self.assertEqual(c, 1)
self.assertFalse(execute.called)
def test_order_by(self):
Dummy.create(name='foo0', age=3)
Dummy.create(name='foo2', age=6)
Dummy.create(name='foo2', age=7)
Dummy.create(name='foo3', age=4)
Dummy.create(name='foo4', age=2)
rv = Dummy.cq('age').order_by('age').all()
self.assertEqual(rv, [2, 3, 4, 6, 7])
rv = Dummy.cq('age').order_by(Dummy.age).all()
self.assertEqual(rv, [2, 3, 4, 6, 7])
rv = Dummy.cq('age').order_by(Dummy.age.desc()).all()
self.assertEqual(rv, [7, 6, 4, 3, 2])
age = Dummy.age.alias('a')
rv = Dummy.cq(age).order_by(age).all()
self.assertEqual(rv, [2, 3, 4, 6, 7])
rv = Dummy.cq(age).order_by(age.desc()).all()
self.assertEqual(rv, [7, 6, 4, 3, 2])
rv = Dummy.cq(age).order_by(Dummy.id.asc(), Dummy.age.desc()).all()
self.assertEqual(rv, [3, 6, 7, 4, 2])
rv = Dummy.cq(age).order_by(Dummy.age.in_([2, 4]).desc(), Dummy.id.desc()).all() # noqa
self.assertEqual(rv, [2, 4, 7, 6, 3])
rv = Dummy.cq(age).order_by(Dummy.age.in_([2, 4]).desc()).order_by(Dummy.id.desc()).all() # noqa
self.assertEqual(rv, [2, 4, 7, 6, 3])
def test_group_by(self):
Dummy.create(name='foo0', age=1)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo3', age=3)
Dummy.create(name='foo4', age=3)
rv = Dummy.cq('age', funcs.COUNT(1)).group_by('age').order_by('age').all()
self.assertEqual(rv, [(1, 1), (2, 2), (3, 2)])
rv = Dummy.cq('name', 'age').group_by('name', 'age').order_by('age').all()
self.assertEqual(rv, [('foo0', 1), ('foo2', 2),
('foo3', 3), ('foo4', 3)])
rv = Dummy.cq('name', 'age').group_by('name').group_by('age').order_by('age').all()
self.assertEqual(rv, [('foo0', 1), ('foo2', 2),
('foo3', 3), ('foo4', 3)])
def test_having(self):
# FIXME(PG)
if is_pg:
return
Dummy.create(name='foo0', age=1)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo3', age=3)
Dummy.create(name='foo4', age=3)
Dummy.create(name='foo5', age=3)
c = COUNT(1).alias('c')
rv = Dummy.cq('age', c).group_by(
'age'
).having(c > 2).all()
self.assertEqual(rv, [(3, 3)])
def test_join(self):
Dummy.create(name='dummy0', age=3)
Dummy.create(name='dummy1', age=6)
Dummy.create(name='dummy2', age=9)
Foo.create(name='foo0', age=1)
Foo.create(name='foo1', age=2)
Foo.create(name='foo2', age=3)
Foo.create(name='foo3', age=3)
Foo.create(name='foo4', age=6)
Foo.create(name='foo5', age=6)
Foo.create(name='foo6', age=6)
q = Foo.cq.join(Dummy).on(Foo.age == Dummy.age)
res = q.all()
self.assertEqual(len(res), 5)
self.assertEqual({x.name for x in res}, {
'foo2', 'foo3', 'foo4', 'foo5', 'foo6'
})
q = Dummy.cq.join(Foo).on(Foo.age == Dummy.age)
res = q.all()
self.assertEqual(len(res), 5)
self.assertEqual({x.name for x in res}, {
'dummy0', 'dummy0', 'dummy1', 'dummy1', 'dummy1'
})
q = Dummy.cq.join(Foo).on(Foo.age == Dummy.age,
Dummy.age == 6)
res = q.all()
self.assertEqual(len(res), 3)
self.assertEqual({x.name for x in res}, {
'dummy1', 'dummy1', 'dummy1'
})
q = Dummy.cq(DISTINCT(Dummy.id)).join(Foo).on(
Foo.age == Dummy.age
).order_by(
Foo.id.desc(), Dummy.age.desc()
)
res = q.all()
self.assertEqual(res, [2, 1])
q = Dummy.cq(DISTINCT(Dummy.id)).left_join(Foo).on(
Foo.age == Dummy.age
).order_by(
Foo.id.desc(), Dummy.age.desc()
)
res = q.all()
if is_pg:
self.assertEqual(res, [3, 2, 1])
else:
self.assertEqual(res, [2, 1, 3])
q = Dummy.cq(DISTINCT(Dummy.id)).right_join(Foo).on(
Foo.age == Dummy.age
).order_by(
Foo.id.desc(), Dummy.age.desc()
)
res = q.all()
self.assertEqual(res, [2, 1, None])
def test_sum(self):
Dummy.create(name='foo0', age=1)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo3', age=3)
rv = Dummy.cq(SUM(Dummy.age)).first()
self.assertEqual(rv, 6)
def test_avg(self):
Dummy.create(name='foo0', age=1)
Dummy.create(name='foo2', age=2)
Dummy.create(name='foo3', age=3)
rv = Dummy.cq(AVG(Dummy.age)).first()
self.assertEqual(rv, 2)
| 40.105906
| 105
| 0.54286
| 2,561
| 19,692
| 4.124561
| 0.046466
| 0.126385
| 0.110764
| 0.123071
| 0.918678
| 0.876834
| 0.854208
| 0.842942
| 0.832718
| 0.816151
| 0
| 0.026155
| 0.301036
| 19,692
| 490
| 106
| 40.187755
| 0.741282
| 0.001676
| 0
| 0.736383
| 0
| 0
| 0.025593
| 0
| 0
| 0
| 0
| 0.002041
| 0.357298
| 1
| 0.021786
| false
| 0.002179
| 0.010893
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32fb221a795253ad969f0c1a9f321b8671a01cdf
| 87
|
py
|
Python
|
models/common/norm2d.py
|
lehduong/Knowledge-Distilation-CNN
|
dfb7b881de9740260a59e83a7a4f5dbba8787c23
|
[
"MIT"
] | 9
|
2020-01-21T04:27:18.000Z
|
2020-04-12T03:35:54.000Z
|
models/common/norm2d.py
|
lehduong/Knowledge-Distilation-CNN
|
dfb7b881de9740260a59e83a7a4f5dbba8787c23
|
[
"MIT"
] | 2
|
2020-03-05T10:42:10.000Z
|
2020-03-06T12:41:27.000Z
|
models/common/norm2d.py
|
lehduong/Knowledge-Distilation-CNN
|
dfb7b881de9740260a59e83a7a4f5dbba8787c23
|
[
"MIT"
] | 2
|
2020-05-20T07:42:03.000Z
|
2021-10-08T02:48:08.000Z
|
from torch import nn
def Norm2d(in_channels):
return nn.BatchNorm2d(in_channels)
| 14.5
| 38
| 0.770115
| 13
| 87
| 5
| 0.769231
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.16092
| 87
| 5
| 39
| 17.4
| 0.863014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
fd16a296939fad0a6998d07715351a45b6846f92
| 5,651
|
py
|
Python
|
SEPESIAL-50SUBS-main/main.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
SEPESIAL-50SUBS-main/main.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
SEPESIAL-50SUBS-main/main.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
#Encypt BY MR.1557
#Makasih 50 subscribe
#mau code scnya chat wa
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b16decode("789CD55949731BC7157E83950001EE2248AD4D6A21684BDC1793964593144552D6C21A4A9105518500981109121B318392E00055AE920FF1D9A7B872D241C75C744A552AC75CF207744C7C7755FE402A79EFF5CC0884408934255BC290DDAF97EFF5EBF7BA5F6F29B07E41FCFF12FF8DA80B40C33F05320031875620A6D8B40B622E9B7643CC6DD31E88796CDA0B312FD32EC8F820EB83980F140DAB620D2CF081E607AD09B4006841D09A410BC16610368210F343AC0962018821DD0CB110C4C2106B81582BC4DA400BC37728563B682D4C7480D6CA4427686D4C74817E0CB476D87141F19C4BEBE0CC6E387AD3BA07B623A075C25305143D00CB2B564E97CC0981760C227A0B44B46E8C431847AC740FC64D18F7621CC4F83844965756741FD4824ED4814E5AA0531041FEB11ED07B403BCDFDFA97B2A2773BE000563A63818505EE6390A27B61BB17B47E29210A0AA0EC9E73D5A29B1A347DB6115A76F61C27726CE4F5E8791C31E9FFE1EF565441D2F463B0A99B858461185EA44F3C9819C95AD4F8A8438D39D4B84DCD4C3879930E35E550D33635815C3C4C8D6453D4260E5770E3FF220D5E0A9E5C860A8B17B9FA7004AA0A9800DB0A6CBBA0A2B0F056DACDDA30B15B5E1AA554B2DB0CF770CCAE4789E12D83D80737FFD0F597E57F7FF3DD5CD4473DA412A36C9894304C2D5F324D12EC71316DEA4C3DCA948C2D934434D35999656474BD40F30A64BEBE6398C43B1DA599C78141A5DA2697CF67B47407E5355189D2AEB4296125654F5497DDD76E6205380C4DEE9C263BC7E2BB587C9ACE663E9DD14D71E991789CD674532F66C5A56B22992F6A7A510C08E7B7944B15CB0553AC95CDAD7CCEC91E604DB0B00593B8E6AD9EA306F4EC2BF15FEB4326BF99EFA53C0FF7C1FB4A7EC7567137CB8FCCC95696612C4310E576288F43791DCA07284F046772044D12C1E9CC1C564CD6314EED0817E364C3640427B9AC16721A7A53ADF0C198B558F9AD0762DAB69769BB95DD5157ADD3827761ECC5F89895EEDED3888D8ED4A17B0E85EEAD431F3F14FA441DFAE4A1D0A7EAD0A70F853E5387168742F7D5A1FB0F853E5B873E7728F4F93AF485B7A06BA00375D0E8FE505C5DD8110CB22398C55088B8FCF1BC8EDB094AC61B9639B18457A8406CE027C4307E5C5CC16445548639B1A74C6C3037AC62C32BF1F820E1E358835382786E54A888DBAD2D1B14767E4DEB582A5BE014150F89DF3BF0DA32AC5907B7EA30C12D30830D27515356535318330C8FDB994454A45E88DE88573664C229ABADC90EF192309A315A288B9BEAD0E8E4E4B46077FAC02047F8D3B7CF38F590BDF9FAD2DAD2FAEAFC0D313922D6EF2EAC2FAAAB0B4BC23885454BB716C5ACE59D2FCA68CC8AC785412E59DCD47325D9E4AC303A29675D37F305B19349EF88C53BEA0D1113DCA8100FD8978F8C1AE4B7859ED3A4F37F846B05AE13E69648260C7D6A42ECF9B1A051B90AE4383128D98CBD91CDE8541D1BC68CBF11333ED60833B12F269B281A5B894C03CCE4BE986F32E9E485BA7E4ACCD4413017EC362566FAED98D1A93ACC676FC78C8FD561662CCC93B429DEFE63CCE888C4DC37C562514F98F9E21B31BCCD5103F6B2BFC361560DDA19250EB7F659FB69BB934CE4F4220D5A638417FF1625687DC71545B9809FA20C7178E5B56F466E14BA409E4878A3B01A066B0F67EF715C6FDACECBBD7C1568EB577541D50D550F54BD50F541D50FD526A80668EFC09B3F3754BCB4FF8F54FC72DDAF78E5C2CE695CD871CBC84B77C5C5FEB81A844AD03A5D18CF98EEE01DF9CF4AD943CEBA866BA7C5A5CBE27ACC4A775B5C23AFB836032AAED24C9DDBF6118F6A88F6D2DB4D5009512676069776DCD5F29A8DDDC1BADBD8DFE380ABF05317758D18A02A4E72320C95306CA3AC72E7448966295AC811CD2517E13DA29DAEE97093CC43A6918A75C2E032612D4CC8B3057029E5442B73BFB7FB331AB5B61145AEAE7BB47A767F0688FF1A8F16A4D673AC62AF8BE8F3ACE235F7FB50715B8D8A2F7C0C2A5E731F4DC56B6E5BC503ACE22D37D15156F10BCFFB50717B8D8A073F0615BFF01C4DC52F3CB68A3F6115FFD343F4A7ACE2095FB9FF1DABB883F4AB5D04ED123C759396CD4ED8EE22451304CFBFC7AC936F15251D820A9DF0873F68034CF88E6680099F6D801136C00D1FD1A36C80E7FE72E73B364037DD8C5863DC673B6E1F27C3A08D911623DA386ABCF5358DB731BF365BE33855DA59E3AD52E3ED47D078DB6134FEDC7F348D3FF7DB1A9F608DFFDD4FF4246B7C2E509E7DBF43BEC31EF21DACF44EDB249DFB98640ACC1EB2CA536A6BFA8336CC5CE06886990BD886F98C0D732F40F40C1BE665F043304C5B8D61663F1EC3BC0C1ECD302F83B6613E67C3FC2748F46536CC6EE843304C7B8D61BEF8780CB31B3A9A617643B661AEB061BE0F113DC786F95BA88EF197758CE72DC6BDB07D9CD9E5BE67560B0C6F0E63A5E515EB30B30835325CADB99AD696603FE97238431A77CDD6DFB53D5DBB5A0BE62BA165BA12E2735A2AA3E3D9F2FD5FD333F1E0A76F9FD9571E7C587C28F82639B595CF1BBABC97B822EF1846D5363A82B6D827CE4FD5563A06F225C7353A23DB971CB7303ABB944BD1E179E1BE7DBF123C7B33B19330D25B748D62949246AA984EEAC174B6902F9AD6CD46507FA2A7A2921E4A4E4D687A2AAFE9D1019663607090E3A142D9A0078DB89E4B21CDB23C967DF8F30F5C226EDFBDB376F70E9748A92E8BB47363FEB54172B3C4B93C1EEFF3A59CD6C725632A1D70DF99FCA353B6FCCC7D5C3DF62EB98F8FEDE13EA1F2C343CD5D3FD536BEFA05AD59571BB2392B3194C9273423CACA1C1CE41627D508F567E797B770912E542ED6768E3286A85FD94251378CD787423F8FDAFE414B8629B5876428BF4B19F6F6F80012792D89A448D36AEF6F249233DEFA5990CFD4E3BF9120CED09482CCA827ECB9D767D0F5D142A25C163BF95C5F9F7A92441C8583BD88DD2EE83971FF0ED303691A8AC617183CD1362FE5A968CB340BC6ECF070395F324B497D08051B4E6D2572393D337C7771BABCB93D9FBCBA5D4ADF9A9E5ADBBD9DB99E9D189937DAF99236612644A6B425922524FAFAA224B14A17C46A88823E92902EDF8A89C7F174AE5032D53058EF86D43413453DA1B1F3947AE0FA384ED04DA12EF80DB6A817328994AEB22F92BE1EBD2CDFEAA994547D36D36C229D930C46A72C069C1A1FAB61478A27ADF919C09661AE5A295B302C910A4526C8546693852163C91753B3A82A7693747919EDB22D95504F93408282537666523D63932979F5484648CA6BE7BCBD70A4533A33A48B536E9308D61255D338942FAD3A878F38DC645E464246C9D7EE30D5CF69BE533A43D595B0CBADD47F032EBF32E1EA410A63A54369555A380C2B41E53A86CD587201E396F75087CA4307E275506EF475FDAA35317685142FA6E417745109DB301EA721198FFF3A8FFB3FFE897E7F958FFB3C44556A93278EEAD9335DF8555DA529A03641A3BB6F8A1299FFC2AB977B17BDDCABFD94337740D7237F37F29BE99CF84A2FD7E40DBCDA117D227744241A57E2CC59839649D1F811491D07EB319E0072BB853B28DA283404C86DCE1F7FB4B66AECBC6EDC5E5EBD8555161797D6D7C5D0D0509A2718F92721CBAECDAFDE58BACADB9E827CA312644CDC3C45698950A72958A060CE5177B3A364D2997A8D82BD1E41A52D2B3B04758502F207EA2A05D7C17AAA6027C9CE9367AF35907289AC1E8FABBF73BCEB9A634B80461624D8E56C5E2B65F42BEC1DEE60B0A2B4B91B780077C01FF0047A03FF0829017F230F11E6F13D8A87911125C40F1F34CE65FC7F0AD2BB2E"))))
| 941.833333
| 5,558
| 0.99416
| 24
| 5,651
| 234.083333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.613269
| 0.002477
| 5,651
| 5
| 5,559
| 1,130.2
| 0.38336
| 0.010441
| 0
| 0
| 0
| 0
| 0.984076
| 0.984076
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
fd1d3d5c296ec44f755343fdadeeef20ad40c683
| 38,810
|
py
|
Python
|
py/tests/test_booking.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 26
|
2018-07-28T23:11:27.000Z
|
2022-02-09T13:40:33.000Z
|
py/tests/test_booking.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 336
|
2018-05-25T17:57:00.000Z
|
2022-03-11T23:24:36.000Z
|
py/tests/test_booking.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 4
|
2018-07-18T08:37:19.000Z
|
2022-01-31T14:42:48.000Z
|
import pytest
from pytest_toolbox.comparison import AnyInt, RegexStr
from shared.actions import ActionTypes
from web.stripe import Reservation
from web.utils import decrypt_json, encrypt_json
from .conftest import Factory
async def test_booking_info(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=20, status='published')
await login()
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-booking-info-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'tickets_remaining': None,
'existing_tickets': 0,
'ticket_types': [{'id': AnyInt(), 'name': 'Standard', 'price': None}],
}
async def test_booking_info_limited(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=8, status='published')
await login()
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-booking-info-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'tickets_remaining': 8,
'existing_tickets': 0,
'ticket_types': [{'id': AnyInt(), 'name': 'Standard', 'price': None}],
}
async def test_booking_info_inactive(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published')
await login()
ticket_type2_id = await db_conn.fetchval(
"INSERT INTO ticket_types (event, name, price) VALUES ($1, 'Different', 42) RETURNING id", factory.event_id
)
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-booking-info-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'tickets_remaining': None,
'existing_tickets': 0,
'ticket_types': [
{'id': factory.ticket_type_id, 'name': 'Standard', 'price': None},
{'id': ticket_type2_id, 'name': 'Different', 'price': 42},
],
}
await db_conn.execute('update ticket_types set active=false where id=$1', ticket_type2_id)
r = await cli.get(url('event-booking-info-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'tickets_remaining': None,
'existing_tickets': 0,
'ticket_types': [{'id': factory.ticket_type_id, 'name': 'Standard', 'price': None}],
}
async def test_booking_info_sig(cli, url, factory: Factory, login, settings, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=20, status='published', public=False)
await login()
event_link = await db_conn.fetchval(
"""
SELECT event_link(cat.slug, e.slug, e.public, $2)
FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1
""",
factory.event_id,
settings.auth_key,
)
_, cat_slug, event_slug, sig = event_link.strip('/').split('/')
r = await cli.get(url('event-booking-info-private', category=cat_slug, event=event_slug, sig=sig))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'tickets_remaining': None,
'existing_tickets': 0,
'ticket_types': [{'id': AnyInt(), 'name': 'Standard', 'price': None}],
}
async def test_booking_info_private(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=20, status='published', public=False)
await login()
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-booking-info-public', category=cat_slug, event=event_slug))
assert r.status == 404, await r.text()
async def test_booking_info_sig_wrong(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=20, status='published', public=False)
await login()
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-booking-info-private', category=cat_slug, event=event_slug, sig='xxx'))
assert r.status == 404, await r.text()
async def test_reserve_tickets(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name=None, last_name=None, email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [
{
't': True,
'first_name': 'Ticket',
'last_name': 'Buyer',
'email': 'ticket.buyer@example.org',
'allow_marketing': True,
},
{
't': True,
'first_name': 'Other',
'last_name': 'Person',
'email': 'other.person@example.org',
'extra_info': 'I love to party',
'cover_costs': None,
'allow_marketing': None,
},
],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'booking_token': RegexStr(r'.+'),
'ticket_count': 2,
'extra_donated': None,
'item_price': 10.0,
'total_price': 20.0,
'timeout': AnyInt(),
'client_secret': RegexStr(r'payment_intent_secret_\d+'),
'action_id': AnyInt(),
}
booking_token = decrypt_json(cli.app['main_app'], data['booking_token'].encode())
reserve_action_id = await db_conn.fetchval("SELECT id FROM actions WHERE type='reserve-tickets'")
assert booking_token == {
'user_id': factory.user_id,
'action_id': reserve_action_id,
'event_id': factory.event_id,
'price_cent': 20_00,
'ticket_count': 2,
'event_name': 'The Event Name',
}
users = [
dict(r)
for r in await db_conn.fetch(
'SELECT first_name, last_name, email, role, allow_marketing FROM users ORDER BY id'
)
]
assert users == [
{
'first_name': None,
'last_name': None,
'email': 'ticket.buyer@example.org',
'role': 'admin',
'allow_marketing': True,
},
{
'first_name': None,
'last_name': None,
'email': 'other.person@example.org',
'role': 'guest',
'allow_marketing': False,
},
]
users = [
dict(r)
for r in await db_conn.fetch(
"""
SELECT event, user_id, first_name, last_name, reserve_action, booked_action, status, extra_info
FROM tickets
ORDER BY user_id
"""
)
]
assert users == [
{
'event': factory.event_id,
'user_id': factory.user_id,
'first_name': 'Ticket',
'last_name': 'Buyer',
'reserve_action': reserve_action_id,
'booked_action': None,
'status': 'reserved',
'extra_info': None,
},
{
'event': factory.event_id,
'user_id': await db_conn.fetchval('SELECT id FROM users WHERE email=$1', 'other.person@example.org'),
'first_name': 'Other',
'last_name': 'Person',
'reserve_action': reserve_action_id,
'booked_action': None,
'status': 'reserved',
'extra_info': 'I love to party',
},
]
async def test_reserve_tickets_no_name(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name='T', last_name='B', email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [
{'t': True, 'first_name': 'TT', 'last_name': 'BB', 'email': 'ticket.buyer@example.org'},
{'t': True},
],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'booking_token': RegexStr(r'.+'),
'ticket_count': 2,
'extra_donated': None,
'item_price': 10.0,
'total_price': 20.0,
'timeout': AnyInt(),
'client_secret': RegexStr(r'payment_intent_secret_\d+'),
'action_id': AnyInt(),
}
users = [dict(r) for r in await db_conn.fetch('SELECT first_name, last_name, email, role FROM users ORDER BY id')]
assert users == [
{'first_name': 'T', 'last_name': 'B', 'email': 'ticket.buyer@example.org', 'role': 'admin'},
]
users = [
dict(r)
for r in await db_conn.fetch(
"""
SELECT event, user_id, first_name, last_name, reserve_action, booked_action, status, extra_info
FROM tickets
ORDER BY user_id
"""
)
]
reserve_action_id = await db_conn.fetchval("SELECT id FROM actions WHERE type='reserve-tickets'")
assert users == [
{
'event': factory.event_id,
'user_id': factory.user_id,
'first_name': 'TT',
'last_name': 'BB',
'reserve_action': reserve_action_id,
'booked_action': None,
'status': 'reserved',
'extra_info': None,
},
{
'event': factory.event_id,
'user_id': None,
'first_name': None,
'last_name': None,
'reserve_action': reserve_action_id,
'booked_action': None,
'status': 'reserved',
'extra_info': None,
},
]
async def test_reserve_tickets_cover_costs(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=12.5)
await factory.create_user(first_name=None, last_name=None, email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [
{
't': True,
'first_name': 'Ticket',
'last_name': 'Buyer',
'email': 'ticket.buyer@example.org',
'cover_costs': True,
},
{'t': True},
],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'booking_token': RegexStr(r'.+'),
'ticket_count': 2,
'extra_donated': 2.5,
'item_price': 10.0,
'total_price': 22.50,
'timeout': AnyInt(),
'client_secret': RegexStr(r'payment_intent_secret_\d+'),
'action_id': AnyInt(),
}
assert decrypt_json(cli.app['main_app'], data['booking_token'].encode()) == {
'user_id': factory.user_id,
'action_id': AnyInt(),
'event_id': factory.event_id,
'price_cent': 22_50,
'ticket_count': 2,
'event_name': 'The Event Name',
}
async def test_reserve_tickets_free(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published')
await login()
data = {
'tickets': [{'t': True, 'first_name': 'Ticket', 'last_name': 'Buyer', 'email': 'ticket.buyer@example.org'}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'booking_token': RegexStr(r'.+'),
'ticket_count': 1,
'extra_donated': None,
'item_price': None,
'total_price': None,
'timeout': AnyInt(),
'client_secret': None,
'action_id': AnyInt(),
}
assert decrypt_json(cli.app['main_app'], data['booking_token'].encode()) == {
'user_id': factory.user_id,
'action_id': AnyInt(),
'event_id': factory.event_id,
'price_cent': None,
'ticket_count': 1,
'event_name': 'The Event Name',
}
async def test_reserve_tickets_wrong_type(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published')
await login()
data = {
'tickets': [{'t': True, 'first_name': 'Ticket', 'last_name': 'Buyer', 'email': 'ticket.buyer@example.org'}],
'ticket_type': 999,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'Ticket type not found'}
async def test_reserve_tickets_externally_ticketed(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published')
await login()
await db_conn.execute('update events set external_ticket_url=$1', 'https://www.example.com/thing')
data = {
'tickets': [{'t': True}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'Cannot reserve ticket for an externally ticketed event'}
async def test_reserve_0_tickets(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name='Ticket', last_name=None, email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10)
await login(email='ticket.buyer@example.org')
data = {'tickets': []}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
async def test_reserve_tickets_none_left(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name='Ticket', last_name=None, email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10, ticket_limit=1)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [{'t': True, 'email': 'foo1@example.org'}, {'t': True, 'email': 'foo2@example.org'}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 470, await r.text()
data = await r.json()
assert data == {
'message': 'only 1 tickets remaining',
'tickets_remaining': 1,
}
async def test_reserve_tickets_none_left_no_precheck(cli, url, factory: Factory, login, settings):
settings.ticket_reservation_precheck = False
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name='Ticket', last_name=None, email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10, ticket_limit=1)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [{'t': True, 'email': 'foo1@example.org'}, {'t': True, 'email': 'foo2@example.org'}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {
'message': 'insufficient tickets remaining',
}
async def test_reserve_tickets_too_many(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published', price=10)
await login()
data = {
'tickets': [{'t': True, 'email': f'foo{i}@example.org'} for i in range(30)],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'Too many tickets reserved'}
async def test_cancel_reservation(cli, url, db_conn, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=12.5)
res = await factory.create_reservation()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM tickets')
assert 1 == await db_conn.fetchval('SELECT tickets_taken FROM events')
booking_token = encrypt_json(cli.app['main_app'], res.dict())
r = await cli.json_post(url('event-cancel-reservation'), data={'booking_token': booking_token})
assert r.status == 200, await r.text()
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM tickets')
assert 0 == await db_conn.fetchval('SELECT tickets_taken FROM events')
async def test_cancel_reservation_booked(cli, url, db_conn, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=12.5)
res = await factory.create_reservation()
await db_conn.execute("UPDATE tickets SET status='booked'")
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM tickets')
assert 1 == await db_conn.fetchval('SELECT tickets_taken FROM events')
booking_token = encrypt_json(cli.app['main_app'], res.dict())
r = await cli.json_post(url('event-cancel-reservation'), data={'booking_token': booking_token})
assert r.status == 400, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM tickets')
assert 1 == await db_conn.fetchval('SELECT tickets_taken FROM events')
async def test_book_free(cli, url, dummy_server, factory: Factory, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=None)
res: Reservation = await factory.create_reservation()
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='book-free-tickets')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
assert dummy_server.app['log'] == [
(
'email_send_endpoint',
'Subject: "The Event Name Ticket Confirmation", To: "Frank Spencer <frank@example.org>"',
),
]
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
async def test_book_free_with_price(cli, url, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
res: Reservation = await factory.create_reservation()
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='book-free-tickets')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {
'message': 'booking not free',
}
async def test_buy_offline(cli, url, dummy_server, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
await login()
res: Reservation = await factory.create_reservation()
app = cli.app['main_app']
assert 10 == await db_conn.fetchval('SELECT price FROM tickets')
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='buy-tickets-offline')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
assert dummy_server.app['log'] == [
(
'email_send_endpoint',
'Subject: "The Event Name Ticket Confirmation", To: "Frank Spencer <frank@example.org>"',
),
]
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM tickets')
assert None is await db_conn.fetchval('SELECT price FROM tickets')
async def test_buy_offline_other_admin(cli, url, dummy_server, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
u2 = await factory.create_user(email='other@example.org')
await login('other@example.org')
res: Reservation = await factory.create_reservation(u2)
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='buy-tickets-offline')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
assert dummy_server.app['log'] == [
(
'email_send_endpoint',
'Subject: "The Event Name Ticket Confirmation", To: "Frank Spencer <other@example.org>"',
),
]
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
async def test_buy_offline_other_not_admin(cli, url, dummy_server, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
u2 = await factory.create_user(email='other@example.org', role='host')
await login('other@example.org')
res: Reservation = await factory.create_reservation(u2)
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='buy-tickets-offline')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 400, await r.text()
assert {'message': 'to buy tickets offline you must be the host or an admin'} == await r.json()
assert dummy_server.app['log'] == []
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets'")
async def test_buy_offline_host(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event(price=10)
await login()
res: Reservation = await factory.create_reservation()
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='buy-tickets-offline')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets'")
async def test_free_repeat(factory: Factory, cli, url, login, db_conn):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published')
await factory.create_user(email='ticket.buyer@example.org')
await login(email='ticket.buyer@example.org')
data = {
'tickets': [{'t': True, 'email': 'ticket.buyer@example.org'}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
data = await r.json()
data = dict(booking_token=data['booking_token'], book_action='book-free-tickets')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'invalid reservation'}
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='book-free-tickets'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets-offline'")
assert 0 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='buy-tickets'")
@pytest.fixture(name='buy_tickets')
def _fix_buy_tickets(cli, url, login):
async def run(factory: Factory):
await factory.create_user(email='ticket.buyer@example.org')
await login(email='ticket.buyer@example.org')
data = {
'tickets': [{'t': True, 'email': 'ticket.buyer@example.org', 'cover_costs': True}],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
action_id = (await r.json())['action_id']
await factory.fire_stripe_webhook(action_id)
return run
async def test_cancel_ticket(factory: Factory, cli, url, buy_tickets, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published', price=100, ticket_limit=10)
tickets_remaining = await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600)
assert tickets_remaining == 10
await buy_tickets(factory)
tickets_remaining = await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600)
assert tickets_remaining == 9
assert 0 == await db_conn.fetchval('select count(*) from actions where type=$1', ActionTypes.cancel_booked_tickets)
assert 1 == await db_conn.fetchval('select tickets_taken from events where id=$1', factory.event_id)
ticket_id, status = await db_conn.fetchrow('select id, status from tickets')
assert status == 'booked'
r = await cli.json_post(url('event-tickets-cancel', id=factory.event_id, tid=ticket_id), data='{}')
assert r.status == 200, await r.text()
assert 0 == await db_conn.fetchval('select tickets_taken from events where id=$1', factory.event_id)
status = await db_conn.fetchval('select status from tickets where id=$1', ticket_id)
assert status == 'cancelled'
assert 1 == await db_conn.fetchval('select count(*) from actions where type=$1', ActionTypes.cancel_booked_tickets)
assert 'POST stripe_root_url/refunds' not in dummy_server.app['log']
async def test_cancel_ticket_refund(factory: Factory, cli, url, buy_tickets, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published', price=100)
await buy_tickets(factory)
ticket_id, status = await db_conn.fetchrow('select id, status from tickets')
assert status == 'booked'
data = {'refund_amount': 99}
r = await cli.json_post(url('event-tickets-cancel', id=factory.event_id, tid=ticket_id), data=data)
assert r.status == 200, await r.text()
assert 'POST stripe_root_url/refunds' in dummy_server.app['log']
async def test_cancel_ticket_wrong_ticket(factory: Factory, cli, url, buy_tickets, db_conn):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published', price=100)
await buy_tickets(factory)
ticket_id = await db_conn.fetchval('select id, status from tickets')
event2_id = await factory.create_event(status='published', name='Another Event')
r = await cli.json_post(url('event-tickets-cancel', id=event2_id, tid=ticket_id), data='{}')
assert r.status == 404, await r.text()
data = await r.json()
assert data == {'message': 'Ticket not found'}
async def test_cancel_ticket_refund_free(factory: Factory, cli, url, buy_tickets, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published', price=100)
await buy_tickets(factory)
v = await db_conn.execute(
'update actions set type=$1 where type=$2', ActionTypes.book_free_tickets, ActionTypes.buy_tickets
)
assert v == 'UPDATE 1'
ticket_id, status = await db_conn.fetchrow('select id, status from tickets')
assert status == 'booked'
data = {'refund_amount': 99}
r = await cli.json_post(url('event-tickets-cancel', id=factory.event_id, tid=ticket_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'Refund not possible unless ticket was bought through stripe.'}
assert 'POST stripe_root_url/refunds' not in dummy_server.app['log']
async def test_cancel_ticket_refund_too_much(factory: Factory, cli, url, buy_tickets, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat(cover_costs_message='Help!', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(status='published', price=100)
await buy_tickets(factory)
ticket_id, status = await db_conn.fetchrow('select id, status from tickets')
assert status == 'booked'
data = {'refund_amount': 101}
r = await cli.json_post(url('event-tickets-cancel', id=factory.event_id, tid=ticket_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {'message': 'Refund amount must not exceed 100.00.'}
assert 'POST stripe_root_url/refunds' not in dummy_server.app['log']
async def test_ticket_expiry(factory: Factory, db_conn, settings):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published', price=10, ticket_limit=2)
res = await factory.create_reservation()
assert await db_conn.fetchval('select count(*) from tickets') == 1
ticket_id = await db_conn.fetchval('select id from tickets where reserve_action=$1', res.action_id)
assert 1 == await db_conn.fetchval('select check_tickets_remaining($1, $2)', factory.event_id, settings.ticket_ttl)
await db_conn.execute("update tickets set created_ts=now() - '3600 seconds'::interval where id=$1", ticket_id)
assert 2 == await db_conn.fetchval('select check_tickets_remaining($1, $2)', factory.event_id, settings.ticket_ttl)
assert await db_conn.fetchval('select count(*) from tickets') == 1
await db_conn.execute("update tickets set created_ts=now() - '10 days'::interval where id=$1", ticket_id)
assert 2 == await db_conn.fetchval('select check_tickets_remaining($1, $2)', factory.event_id, settings.ticket_ttl)
assert await db_conn.fetchval('select count(*) from tickets') == 0
async def test_index_sold_out(factory: Factory, cli, url, buy_tickets, db_conn):
await factory.create_company()
await factory.create_cat(slug='testing', cover_costs_percentage=5)
await factory.create_user()
await factory.create_event(highlight=True, status='published', price=100, ticket_limit=1)
assert await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600) == 1
r = await cli.get(url('index'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['highlight_events'][0]['sold_out'] is False
r = await cli.get(url('category', category='testing'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['events'][0]['sold_out'] is False
await buy_tickets(factory)
assert await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600) == 0
r = await cli.get(url('index'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['highlight_events'][0]['sold_out'] is True
r = await cli.get(url('category', category='testing'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['events'][0]['sold_out'] is True
async def test_waiting_list(cli, url, factory: Factory, login, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
assert len(dummy_server.app['emails']) == 0
r = await cli.json_post(url('event-waiting-list-add', id=factory.event_id))
assert r.status == 200, await r.text()
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
assert len(dummy_server.app['emails']) == 1
email = dummy_server.app['emails'][0]
assert 'trigger=waiting-list-add' in email['X-SES-MESSAGE-TAGS']
event_id, user_id = await db_conn.fetchrow('select event, user_id from waiting_list')
assert event_id == factory.event_id
assert user_id == factory.user_id
r = await cli.json_post(url('event-waiting-list-add', id=factory.event_id))
assert r.status == 200, await r.text()
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
assert len(dummy_server.app['emails']) == 1
async def test_waiting_list_book_free(cli, url, login, factory: Factory, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=None, status='published')
await login()
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
r = await cli.json_post(url('event-waiting-list-add', id=factory.event_id))
assert r.status == 200, await r.text()
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
res: Reservation = await factory.create_reservation()
app = cli.app['main_app']
data = dict(booking_token=encrypt_json(app, res.dict()), book_action='book-free-tickets')
r = await cli.json_post(url('event-book-tickets'), data=data)
assert r.status == 200, await r.text()
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
async def test_waiting_list_buy(cli, url, login, factory: Factory, db_conn, buy_tickets):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=100, status='published')
await login()
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
r = await cli.json_post(url('event-waiting-list-add', id=factory.event_id))
assert r.status == 200, await r.text()
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
await buy_tickets(factory)
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
async def test_cancel_ticket_waiting_list(factory: Factory, cli, url, buy_tickets, db_conn, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(status='published', price=100, ticket_limit=1)
assert await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600) == 1
await buy_tickets(factory)
assert await db_conn.fetchval('SELECT check_tickets_remaining($1, $2)', factory.event_id, 600) == 0
ben = await factory.create_user(first_name='ben', last_name='ben', email='ben@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', factory.event_id, ben)
ticket_id, status = await db_conn.fetchrow('select id, status from tickets')
assert status == 'booked'
r = await cli.json_post(url('event-tickets-cancel', id=factory.event_id, tid=ticket_id), data='{}')
assert r.status == 200, await r.text()
assert 0 == await db_conn.fetchval('select tickets_taken from events where id=$1', factory.event_id)
assert len(dummy_server.app['emails']) == 3
email = next(e for e in dummy_server.app['emails'] if 'trigger=event-tickets-available' in e['X-SES-MESSAGE-TAGS'])
assert email['To'] == 'ben ben <ben@example.org>'
assert email['Subject'] == 'The Event Name - New Tickets Available'
| 39.48118
| 119
| 0.662793
| 5,270
| 38,810
| 4.696774
| 0.051233
| 0.078054
| 0.116354
| 0.046057
| 0.908331
| 0.879404
| 0.855163
| 0.84377
| 0.830074
| 0.826034
| 0
| 0.013609
| 0.201031
| 38,810
| 982
| 120
| 39.521385
| 0.784636
| 0
| 0
| 0.700127
| 0
| 0.011436
| 0.245088
| 0.05396
| 0
| 0
| 0
| 0
| 0.191868
| 1
| 0.001271
| false
| 0
| 0.007624
| 0
| 0.010165
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fd1e08c6235d3c3a90f315e4fd821a3f14acf54b
| 2,531
|
py
|
Python
|
budget_app/models.py
|
MikeTheCanuck/TB-playground
|
f063a4d198bae2f1164449d491a0d38c3d8e61be
|
[
"MIT"
] | null | null | null |
budget_app/models.py
|
MikeTheCanuck/TB-playground
|
f063a4d198bae2f1164449d491a0d38c3d8e61be
|
[
"MIT"
] | null | null | null |
budget_app/models.py
|
MikeTheCanuck/TB-playground
|
f063a4d198bae2f1164449d491a0d38c3d8e61be
|
[
"MIT"
] | null | null | null |
from django.db import models
class OCRB(models.Model):
id = models.AutoField(primary_key=True)
source_document = models.CharField(max_length=255, default='')
service_area = models.CharField(max_length=255, default='')
bureau = models.CharField(max_length=255, default='')
budget_category = models.CharField(max_length=255, default='')
amount = models.IntegerField(blank=True, null=True)
fy = models.CharField(max_length=255, default='')
budget_type = models.CharField(max_length=255, default='')
class KPM(models.Model):
id = models.AutoField(primary_key=True)
source_document = models.CharField(max_length=255, default='')
service_area = models.CharField(max_length=255, default='')
bureau = models.CharField(max_length=255, default='')
key_performance_measures = models.CharField(max_length=255, default='')
fy = models.CharField(max_length=255, default='')
budget_type = models.CharField(max_length=255, default='')
amount = models.FloatField(blank=True, null=True)
units = models.CharField(max_length=255, default='')
class BudgetHistory(models.Model):
id = models.AutoField(primary_key=True)
fund_center_code = models.CharField(max_length=32, default='')
fund_code = models.CharField(max_length=32, default='')
functional_area_code = models.CharField(max_length=32, default='')
object_code = models.CharField(max_length=32, default='')
fund_center_name = models.CharField(max_length=255, default='')
fund_name = models.CharField(max_length=255, default='')
functional_area_name = models.CharField(max_length=255, default='')
accounting_object_name = models.CharField(max_length=255, default='')
service_area_code = models.CharField(max_length=32, default='')
program_code = models.CharField(max_length=32, default='')
sub_program_code = models.CharField(max_length=32, default='')
fund_center = models.CharField(max_length=32, default='')
division_code = models.CharField(max_length=32, default='')
bureau_code = models.CharField(max_length=32, default='')
bureau_name = models.CharField(max_length=255, default='')
fiscal_year = models.CharField(max_length=32, default='')
amount = models.IntegerField(blank=True, null=True)
class LookupCode(models.Model):
id = models.AutoField(primary_key=True)
code_type = models.CharField(max_length=32, default='')
code = models.CharField(max_length=32, default='')
description = models.CharField(max_length=255, default='')
| 49.627451
| 75
| 0.734492
| 323
| 2,531
| 5.541796
| 0.160991
| 0.268156
| 0.321788
| 0.42905
| 0.875978
| 0.873743
| 0.784358
| 0.627933
| 0.33743
| 0.284916
| 0
| 0.037642
| 0.128803
| 2,531
| 50
| 76
| 50.62
| 0.77415
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022727
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
fd29c259d35cd29943e324d13a2b795540a86baa
| 18,541
|
py
|
Python
|
DVCNN/dvcnn_model.py
|
MaybeShewill-CV/DVCNN_Lane_Detection
|
b66a1a856ba69b0a0a82c7b53dd192e4906a375b
|
[
"Apache-2.0"
] | 19
|
2018-06-19T05:07:47.000Z
|
2022-02-02T11:08:01.000Z
|
DVCNN/dvcnn_model.py
|
MaybeShewill-CV/DVCNN_Lane_Detection
|
b66a1a856ba69b0a0a82c7b53dd192e4906a375b
|
[
"Apache-2.0"
] | 2
|
2018-06-23T06:59:45.000Z
|
2019-12-29T13:10:40.000Z
|
DVCNN/dvcnn_model.py
|
MaybeShewill-CV/DVCNN_Lane_Detection
|
b66a1a856ba69b0a0a82c7b53dd192e4906a375b
|
[
"Apache-2.0"
] | 15
|
2018-06-19T05:07:48.000Z
|
2022-02-02T11:08:06.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : Luo Yao
# @Site : http://github.com/TJCVRS
# @File : dvcnn_model.py
"""
Construct the DVCNN model
"""
import tensorflow as tf
from DVCNN import cnn_util
class DVCNNBuilder(object):
def __init__(self, json_model_path):
self.__dvcnn_architecture = cnn_util.read_json_model(json_model_path=json_model_path)
return
@staticmethod
def __conv2d(_input, _conv_para, name, reuse=False):
"""
Define the convolution function
:param _input:
:param _conv_para:
:param name:
:param reuse:
:return:
"""
with tf.variable_scope(name, reuse=reuse):
# truncated normal initialize
init_w = tf.truncated_normal(shape=_conv_para['ksize'], mean=0, stddev=0.02)
weights = tf.get_variable(name='weights', dtype=tf.float32, initializer=init_w,
trainable=_conv_para['trainable'])
output = tf.nn.conv2d(_input, weights, _conv_para['strides'], _conv_para['padding'])
out_channels = _conv_para['ksize'][-1]
# zero initialize
init_b = tf.zeros([out_channels])
bias = tf.get_variable(name='bias', initializer=init_b, dtype=tf.float32, trainable=_conv_para['trainable'])
output = tf.nn.bias_add(output, bias)
return output
@staticmethod
def __activate(_input, _activate_para, name, reuse=False):
"""
Define the activation function
:param _input:
:param _activate_para:
:param name:
:param reuse:
:return:
"""
with tf.variable_scope(name, reuse=reuse):
if _activate_para['method'] == 'RELU':
return tf.nn.relu(_input, name='Relu_activation')
elif _activate_para['method'] == 'SIGMOID':
return tf.nn.sigmoid(_input, name='Sigmoid_activation')
elif _activate_para['method'] == 'TANH':
return tf.nn.tanh(_input, name='Tanh_activation')
else:
return NotImplementedError
@staticmethod
def __max_pool(_input, _max_pool_para, name, reuse=False):
"""
Define the pooling function
:param _input:
:param _max_pool_para:
:param name:
:param reuse:
:return:
"""
with tf.variable_scope(name, reuse=reuse):
return tf.nn.max_pool(_input, _max_pool_para['ksize'], _max_pool_para['strides'], _max_pool_para['padding'])
@staticmethod
def __concat(_input, _concat_para, name):
"""
Define the concat function
:param _input:
:param _concat_para:
:param name:
:return:
"""
return tf.concat(values=_input, axis=_concat_para['axis'], name=name)
@staticmethod
def __fully_connect(_input, _fc_para, name, reuse=False):
"""
Define the fully connection function
:param _input:
:param _fc_para:
:param name:
:param reuse:
:return:
"""
with tf.variable_scope(name, reuse=reuse):
# truncated normal initialize
init_w = tf.truncated_normal(shape=_fc_para['ksize'], mean=0, stddev=0.02)
weights = tf.get_variable(name='weights', initializer=init_w, dtype=tf.float32,
trainable=_fc_para['trainable'])
output = tf.nn.conv2d(_input, weights, _fc_para['strides'], _fc_para['padding'])
out_channels = _fc_para['ksize'][-1]
# zero initialize
init_b = tf.zeros([out_channels])
bias = tf.get_variable(name='bias', initializer=init_b, dtype=tf.float32, trainable=_fc_para['trainable'])
output = tf.nn.bias_add(output, bias)
return output
@staticmethod
def __batch_norm(_input, name, reuse=False):
"""
Define the batch normally function
:param _input:
:param name:
:param reuse:
:return:
"""
return tf.layers.batch_normalization(_input, name=name, reuse=reuse)
def build_dvcnn(self, top_view_input, front_view_input):
"""
Build dvcnn model
:param top_view_input: top view input tensor normalized into 64*64
:param front_view_input: front view input tensor normalized into 128*128
:return:softmax logits with 2cls [not_road_line, is_road_line]
"""
# front view input begins at conv1 and top view input begins at conv2
# Stage 1
front_conv1 = self.__conv2d(_input=front_view_input, _conv_para=self.__dvcnn_architecture['conv1'],
name='conv1', reuse=False)
front_bn1 = self.__batch_norm(_input=front_conv1, name='bn1', reuse=False)
front_relu1 = self.__activate(_input=front_bn1, _activate_para=self.__dvcnn_architecture['relu1'],
name='relu1', reuse=False)
front_pool1 = self.__max_pool(_input=front_relu1, _max_pool_para=self.__dvcnn_architecture['pool1'],
name='pool1', reuse=False)
# Stage 2
front_conv2 = self.__conv2d(_input=front_pool1, _conv_para=self.__dvcnn_architecture['conv2_front'],
name='conv2_front', reuse=False)
front_bn2 = self.__batch_norm(_input=front_conv2, name='bn2_front', reuse=False)
front_relu2 = self.__activate(_input=front_bn2, _activate_para=self.__dvcnn_architecture['relu2'],
name='relu2', reuse=False)
front_pool2 = self.__max_pool(_input=front_relu2, _max_pool_para=self.__dvcnn_architecture['pool2'],
name='pool2', reuse=False)
top_conv2 = self.__conv2d(_input=top_view_input, _conv_para=self.__dvcnn_architecture['conv2_top'],
name='conv2_top', reuse=False)
top_bn2 = self.__batch_norm(_input=top_conv2, name='bn2_top', reuse=False)
top_relu2 = self.__activate(_input=top_bn2, _activate_para=self.__dvcnn_architecture['relu2'],
name='relu2', reuse=True)
top_pool2 = self.__max_pool(_input=top_relu2, _max_pool_para=self.__dvcnn_architecture['pool2'],
name='pool2', reuse=True)
# Stage 3
front_conv3 = self.__conv2d(_input=front_pool2, _conv_para=self.__dvcnn_architecture['conv3'],
name='conv3', reuse=False)
front_bn3 = self.__batch_norm(_input=front_conv3, name='bn3', reuse=False)
front_relu3 = self.__activate(_input=front_bn3, _activate_para=self.__dvcnn_architecture['relu3'],
name='relu3', reuse=False)
front_pool3 = self.__max_pool(_input=front_relu3, _max_pool_para=self.__dvcnn_architecture['pool3'],
name='pool3', reuse=False)
top_conv3 = self.__conv2d(_input=top_pool2, _conv_para=self.__dvcnn_architecture['conv3'],
name='conv3', reuse=True)
top_bn3 = self.__batch_norm(_input=top_conv3, name='bn3', reuse=True)
top_relu3 = self.__activate(_input=top_bn3, _activate_para=self.__dvcnn_architecture['relu3'],
name='relu3', reuse=True)
top_pool3 = self.__max_pool(_input=top_relu3, _max_pool_para=self.__dvcnn_architecture['pool3'],
name='pool3', reuse=True)
# Stage 4
front_conv4 = self.__conv2d(_input=front_pool3, _conv_para=self.__dvcnn_architecture['conv4'],
name='conv4', reuse=False)
front_bn4 = self.__batch_norm(_input=front_conv4, name='bn4', reuse=False)
front_relu4 = self.__activate(_input=front_bn4, _activate_para=self.__dvcnn_architecture['relu4'],
name='relu4', reuse=False)
front_pool4 = self.__max_pool(_input=front_relu4, _max_pool_para=self.__dvcnn_architecture['pool4'],
name='pool4', reuse=False)
top_conv4 = self.__conv2d(_input=top_pool3, _conv_para=self.__dvcnn_architecture['conv4'],
name='conv4', reuse=True)
top_bn4 = self.__batch_norm(_input=top_conv4, name='bn4', reuse=True)
top_relu4 = self.__activate(_input=top_bn4, _activate_para=self.__dvcnn_architecture['relu4'],
name='relu4', reuse=True)
top_pool4 = self.__max_pool(_input=top_relu4, _max_pool_para=self.__dvcnn_architecture['pool4'],
name='pool4', reuse=True)
# Stage 5
front_conv5 = self.__conv2d(_input=front_pool4, _conv_para=self.__dvcnn_architecture['conv5'],
name='conv5', reuse=False)
front_bn5 = self.__batch_norm(_input=front_conv5, name='bn5', reuse=False)
front_relu5 = self.__activate(_input=front_bn5, _activate_para=self.__dvcnn_architecture['relu5'],
name='relu5', reuse=False)
front_pool5 = self.__max_pool(_input=front_relu5, _max_pool_para=self.__dvcnn_architecture['pool5'],
name='pool5', reuse=False)
top_conv5 = self.__conv2d(_input=top_pool4, _conv_para=self.__dvcnn_architecture['conv5'],
name='conv5', reuse=True)
top_bn5 = self.__batch_norm(_input=top_conv5, name='bn5', reuse=True)
top_relu5 = self.__activate(_input=top_bn5, _activate_para=self.__dvcnn_architecture['relu5'],
name='relu5', reuse=True)
top_pool5 = self.__max_pool(_input=top_relu5, _max_pool_para=self.__dvcnn_architecture['pool5'],
name='pool5', reuse=True)
# Stage 6
front_fc6 = self.__fully_connect(_input=front_pool5, _fc_para=self.__dvcnn_architecture['fc6'],
name='fc6', reuse=False)
front_bn6 = self.__batch_norm(_input=front_fc6, name='bn6', reuse=False)
front_relu6 = self.__activate(_input=front_bn6, _activate_para=self.__dvcnn_architecture['relu6'],
name='relu6', reuse=False)
top_fc6 = self.__fully_connect(_input=top_pool5, _fc_para=self.__dvcnn_architecture['fc6'],
name='fc6', reuse=True)
top_bn6 = self.__batch_norm(_input=top_fc6, name='bn6', reuse=True)
top_relu6 = self.__activate(_input=top_bn6, _activate_para=self.__dvcnn_architecture['relu6'],
name='relu6', reuse=True)
# Stage 7
concat7 = self.__concat(_input=[front_relu6, top_relu6], _concat_para=self.__dvcnn_architecture['concat7'],
name='concat7')
# Stage 8
fc8 = self.__fully_connect(_input=concat7, _fc_para=self.__dvcnn_architecture['fc8'],
name='fc8', reuse=False)
# Convert fc8 from matrix into a vector
out_put = tf.reshape(tensor=fc8, shape=[-1, self.__dvcnn_architecture['fc8']['ksize'][-1]])
return out_put
def build_dvcnn_val(self, top_view_input, front_view_input):
"""
Build dvcnn model for evaluation
:param top_view_input: top view input tensor normalized into 64*64
:param front_view_input: front view input tensor normalized into 128*128
:return:softmax logits with 2cls [not_road_line, is_road_line]
"""
# front view input begins at conv1 and top view input begins at conv2
# Stage 1
front_conv1 = self.__conv2d(_input=front_view_input, _conv_para=self.__dvcnn_architecture['conv1'],
name='conv1', reuse=True)
front_bn1 = self.__batch_norm(_input=front_conv1, name='bn1', reuse=True)
front_relu1 = self.__activate(_input=front_bn1, _activate_para=self.__dvcnn_architecture['relu1'],
name='relu1', reuse=True)
front_pool1 = self.__max_pool(_input=front_relu1, _max_pool_para=self.__dvcnn_architecture['pool1'],
name='pool1', reuse=True)
# Stage 2
front_conv2 = self.__conv2d(_input=front_pool1, _conv_para=self.__dvcnn_architecture['conv2_front'],
name='conv2_front', reuse=True)
front_bn2 = self.__batch_norm(_input=front_conv2, name='bn2_front', reuse=True)
front_relu2 = self.__activate(_input=front_bn2, _activate_para=self.__dvcnn_architecture['relu2'],
name='relu2', reuse=True)
front_pool2 = self.__max_pool(_input=front_relu2, _max_pool_para=self.__dvcnn_architecture['pool2'],
name='pool2', reuse=True)
top_conv2 = self.__conv2d(_input=top_view_input, _conv_para=self.__dvcnn_architecture['conv2_top'],
name='conv2_top', reuse=True)
top_bn2 = self.__batch_norm(_input=top_conv2, name='bn2_top', reuse=True)
top_relu2 = self.__activate(_input=top_bn2, _activate_para=self.__dvcnn_architecture['relu2'],
name='relu2', reuse=True)
top_pool2 = self.__max_pool(_input=top_relu2, _max_pool_para=self.__dvcnn_architecture['pool2'],
name='pool2', reuse=True)
# Stage 3
front_conv3 = self.__conv2d(_input=front_pool2, _conv_para=self.__dvcnn_architecture['conv3'],
name='conv3', reuse=True)
front_bn3 = self.__batch_norm(_input=front_conv3, name='bn3', reuse=True)
front_relu3 = self.__activate(_input=front_bn3, _activate_para=self.__dvcnn_architecture['relu3'],
name='relu3', reuse=True)
front_pool3 = self.__max_pool(_input=front_relu3, _max_pool_para=self.__dvcnn_architecture['pool3'],
name='pool3', reuse=True)
top_conv3 = self.__conv2d(_input=top_pool2, _conv_para=self.__dvcnn_architecture['conv3'],
name='conv3', reuse=True)
top_bn3 = self.__batch_norm(_input=top_conv3, name='bn3', reuse=True)
top_relu3 = self.__activate(_input=top_bn3, _activate_para=self.__dvcnn_architecture['relu3'],
name='relu3', reuse=True)
top_pool3 = self.__max_pool(_input=top_relu3, _max_pool_para=self.__dvcnn_architecture['pool3'],
name='pool3', reuse=True)
# Stage 4
front_conv4 = self.__conv2d(_input=front_pool3, _conv_para=self.__dvcnn_architecture['conv4'],
name='conv4', reuse=True)
front_bn4 = self.__batch_norm(_input=front_conv4, name='bn4', reuse=True)
front_relu4 = self.__activate(_input=front_bn4, _activate_para=self.__dvcnn_architecture['relu4'],
name='relu4', reuse=True)
front_pool4 = self.__max_pool(_input=front_relu4, _max_pool_para=self.__dvcnn_architecture['pool4'],
name='pool4', reuse=True)
top_conv4 = self.__conv2d(_input=top_pool3, _conv_para=self.__dvcnn_architecture['conv4'],
name='conv4', reuse=True)
top_bn4 = self.__batch_norm(_input=top_conv4, name='bn4', reuse=True)
top_relu4 = self.__activate(_input=top_bn4, _activate_para=self.__dvcnn_architecture['relu4'],
name='relu4', reuse=True)
top_pool4 = self.__max_pool(_input=top_relu4, _max_pool_para=self.__dvcnn_architecture['pool4'],
name='pool4', reuse=True)
# Stage 5
front_conv5 = self.__conv2d(_input=front_pool4, _conv_para=self.__dvcnn_architecture['conv5'],
name='conv5', reuse=True)
front_bn5 = self.__batch_norm(_input=front_conv5, name='bn5', reuse=True)
front_relu5 = self.__activate(_input=front_bn5, _activate_para=self.__dvcnn_architecture['relu5'],
name='relu5', reuse=True)
front_pool5 = self.__max_pool(_input=front_relu5, _max_pool_para=self.__dvcnn_architecture['pool5'],
name='pool5', reuse=True)
top_conv5 = self.__conv2d(_input=top_pool4, _conv_para=self.__dvcnn_architecture['conv5'],
name='conv5', reuse=True)
top_bn5 = self.__batch_norm(_input=top_conv5, name='bn5', reuse=True)
top_relu5 = self.__activate(_input=top_bn5, _activate_para=self.__dvcnn_architecture['relu5'],
name='relu5', reuse=True)
top_pool5 = self.__max_pool(_input=top_relu5, _max_pool_para=self.__dvcnn_architecture['pool5'],
name='pool5', reuse=True)
# Stage 6
front_fc6 = self.__fully_connect(_input=front_pool5, _fc_para=self.__dvcnn_architecture['fc6'],
name='fc6', reuse=True)
front_bn6 = self.__batch_norm(_input=front_fc6, name='bn6', reuse=True)
front_relu6 = self.__activate(_input=front_bn6, _activate_para=self.__dvcnn_architecture['relu6'],
name='relu6', reuse=True)
top_fc6 = self.__fully_connect(_input=top_pool5, _fc_para=self.__dvcnn_architecture['fc6'],
name='fc6', reuse=True)
top_bn6 = self.__batch_norm(_input=top_fc6, name='bn6', reuse=True)
top_relu6 = self.__activate(_input=top_bn6, _activate_para=self.__dvcnn_architecture['relu6'],
name='relu6', reuse=True)
# Stage 7
concat7 = self.__concat(_input=[front_relu6, top_relu6], _concat_para=self.__dvcnn_architecture['concat7'],
name='concat7')
# Stage 8
fc8 = self.__fully_connect(_input=concat7, _fc_para=self.__dvcnn_architecture['fc8'],
name='fc8', reuse=True)
# Convert fc8 from matrix into a vector
out_put = tf.reshape(tensor=fc8, shape=[-1, self.__dvcnn_architecture['fc8']['ksize'][-1]])
return out_put
| 54.532353
| 120
| 0.602233
| 2,112
| 18,541
| 4.800189
| 0.079545
| 0.061255
| 0.142928
| 0.162754
| 0.869205
| 0.856579
| 0.841783
| 0.840107
| 0.834287
| 0.834287
| 0
| 0.031857
| 0.288927
| 18,541
| 339
| 121
| 54.693215
| 0.737106
| 0.08732
| 0
| 0.63981
| 0
| 0
| 0.060621
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042654
| false
| 0
| 0.009479
| 0
| 0.113744
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fd38b96066e3216e5b5a1d89741241063a35ad08
| 11,179
|
py
|
Python
|
tests/test_cloudformation.py
|
samjarrett/cfn-deployer
|
45bb9864107a401f5e5f0f0c8215ad2cf0c79400
|
[
"MIT"
] | 2
|
2020-05-15T11:08:42.000Z
|
2021-07-02T20:38:17.000Z
|
tests/test_cloudformation.py
|
samjarrett/cfn-deployer
|
45bb9864107a401f5e5f0f0c8215ad2cf0c79400
|
[
"MIT"
] | 57
|
2020-04-03T19:25:16.000Z
|
2022-03-30T04:06:46.000Z
|
tests/test_cloudformation.py
|
samjarrett/cfn-deployer
|
45bb9864107a401f5e5f0f0c8215ad2cf0c79400
|
[
"MIT"
] | null | null | null |
# pylint:disable=redefined-outer-name
from unittest.mock import MagicMock, patch
import pytest
from botocore.exceptions import ClientError # type: ignore
from cfn_sync import cloudformation
from .conftest import StubbedClient
from .stubs import (
stub_create_stack,
stub_create_stack_error,
stub_delete_stack,
stub_delete_stack_error,
stub_describe_stack,
stub_describe_stack_error,
stub_describe_stack_events,
stub_update_stack,
stub_update_stack_error,
)
@pytest.fixture
def stack(fake_cloudformation_client: StubbedClient) -> cloudformation.Stack:
"""Create a Stack object"""
return cloudformation.Stack(fake_cloudformation_client.client, "MyStack")
def test_status(fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack):
"""Tests Stack.status"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
assert stack.status == "UPDATE_COMPLETE"
# Subsequent calls use the ID
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "UPDATE_ROLLBACK_COMPLETE"
)
assert stack.status == "UPDATE_ROLLBACK_COMPLETE"
def test_exists(fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack):
"""Tests Stack.exists"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
assert stack.exists
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE")
assert stack.exists
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_IN_PROGRESS"
)
assert stack.exists
def test_exists_not_exists(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.exists with an error message"""
stub_describe_stack_error(fake_cloudformation_client.stub)
assert not stack.exists
def test_exists_different_error(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.exists with a non-stack does not exist message"""
stub_describe_stack_error(
fake_cloudformation_client.stub, "A general error occurred"
)
with pytest.raises(ClientError):
_ = stack.exists
def test_deploy_update_success(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy() update successful cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack(
fake_cloudformation_client.stub,
"MyStack",
demo_template,
[{"ParameterKey": "Hello", "ParameterValue": "You"}],
[{"Key": "MyTag", "Value": "TagValue"}],
)
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, False)
def test_deploy_update_capabilities_success(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy() update successful cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack(
fake_cloudformation_client.stub,
"MyStack",
demo_template,
[{"ParameterKey": "Hello", "ParameterValue": "You"}],
[{"Key": "MyTag", "Value": "TagValue"}],
["CAPABILITY_IAM"],
)
stack.set_capabilities(["CAPABILITY_IAM"])
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, False)
def test_deploy_update_failure(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy() update failure cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack_error(fake_cloudformation_client.stub)
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, False)
# Test some other kind of error
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack_error(fake_cloudformation_client.stub, "Template invalid")
with pytest.raises(ClientError):
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, False)
def test_deploy_create_success(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy() create successful cases"""
stub_describe_stack_error(
fake_cloudformation_client.stub
) # to trigger create workflow
stub_create_stack(
fake_cloudformation_client.stub,
"MyStack",
demo_template,
[{"ParameterKey": "Hello", "ParameterValue": "You"}],
[{"Key": "MyTag", "Value": "TagValue"}],
)
stack.deploy(
demo_template,
{"Hello": "You"},
{"MyTag": "TagValue"},
False,
)
def test_deploy_create_failure(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy() update failure cases"""
stub_describe_stack_error(
fake_cloudformation_client.stub
) # to trigger create workflow
stub_create_stack_error(fake_cloudformation_client.stub, "Template invalid")
with pytest.raises(ClientError):
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, False)
def test_delete_success(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.delete() successful cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE")
stub_delete_stack(fake_cloudformation_client.stub, "MyStack")
stack.delete(False)
def test_delete_failure(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.delete() failure cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE")
stub_delete_stack_error(fake_cloudformation_client.stub, "Can not delete")
with pytest.raises(ClientError):
stack.delete(False)
def test_delete_wait_success(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.delete(wait=True) successful cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_delete_stack(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "DELETE_COMPLETE", True
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack", True)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "DELETE_COMPLETE", True
)
stack.delete(True)
def test_delete_wait_failure(
fake_cloudformation_client: StubbedClient, stack: cloudformation.Stack
):
"""Tests Stack.delete(wait=True) failure cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_delete_stack(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE", True
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack", True)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "DELETE_FAILED", True
)
with pytest.raises(Exception):
stack.delete(True)
def test_deploy_wait_success(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy(wait=True) successful cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack(
fake_cloudformation_client.stub,
"MyStack",
demo_template,
[{"ParameterKey": "Hello", "ParameterValue": "You"}],
[{"Key": "MyTag", "Value": "TagValue"}],
)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE", True
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack", True)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE", True
)
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, True)
def test_deploy_wait_failure(
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
demo_template: str,
):
"""Tests Stack.deploy(wait=True) failure cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "UPDATE_COMPLETE")
stub_update_stack(
fake_cloudformation_client.stub,
"MyStack",
demo_template,
[{"ParameterKey": "Hello", "ParameterValue": "You"}],
[{"Key": "MyTag", "Value": "TagValue"}],
)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE", True
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack", True)
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "ROLLBACK_COMPLETE", True
)
with pytest.raises(Exception):
stack.deploy(demo_template, {"Hello": "You"}, {"MyTag": "TagValue"}, True)
@patch("time.sleep")
def test_wait_delay(
patched_sleep: MagicMock,
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
):
"""Tests Stack.wait_delay and Stack.wait()"""
# test default is 5 sec
def perform_wait(
stack: cloudformation.Stack, fake_cloudformation_client: StubbedClient
):
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_IN_PROGRESS"
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE"
)
stack.wait()
perform_wait(stack, fake_cloudformation_client)
patched_sleep.assert_called_once_with(5)
patched_sleep.reset_mock()
stack.wait_delay = 30
perform_wait(stack, fake_cloudformation_client)
patched_sleep.assert_called_once_with(30)
patched_sleep.reset_mock()
stack.wait_delay = 300
perform_wait(stack, fake_cloudformation_client)
patched_sleep.assert_called_once_with(300)
@patch("time.sleep")
def test_wait_success(
patched_sleep: MagicMock,
fake_cloudformation_client: StubbedClient,
stack: cloudformation.Stack,
):
"""Tests Stack.wait() success cases"""
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE")
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack")
stack.wait()
patched_sleep.assert_not_called()
stub_describe_stack(
fake_cloudformation_client.stub, "MyStack", "CREATE_IN_PROGRESS"
)
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack_events(fake_cloudformation_client.stub, "MyStack")
stub_describe_stack(fake_cloudformation_client.stub, "MyStack", "CREATE_COMPLETE")
stack.wait()
patched_sleep.assert_called_once()
| 34.717391
| 88
| 0.725825
| 1,232
| 11,179
| 6.234578
| 0.091721
| 0.178102
| 0.237469
| 0.193204
| 0.895456
| 0.835178
| 0.807968
| 0.798854
| 0.798854
| 0.73467
| 0
| 0.001288
| 0.166294
| 11,179
| 321
| 89
| 34.825545
| 0.822854
| 0.079614
| 0
| 0.649402
| 0
| 0
| 0.133438
| 0.00471
| 0
| 0
| 0
| 0
| 0.043825
| 1
| 0.075697
| false
| 0
| 0.023904
| 0
| 0.103586
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fd3d65e0b5c411a724a61b19181d41e1750ce956
| 146
|
py
|
Python
|
unisul_machine_learning/weka/__init__.py
|
Azganoth/unisul-machine-learning
|
c5c8dd65b0084521e4f5f679f53fedb03207a9a2
|
[
"MIT"
] | null | null | null |
unisul_machine_learning/weka/__init__.py
|
Azganoth/unisul-machine-learning
|
c5c8dd65b0084521e4f5f679f53fedb03207a9a2
|
[
"MIT"
] | null | null | null |
unisul_machine_learning/weka/__init__.py
|
Azganoth/unisul-machine-learning
|
c5c8dd65b0084521e4f5f679f53fedb03207a9a2
|
[
"MIT"
] | null | null | null |
from .arff import Attributes, Instances, load_arff, save_arff
__all__ = [
'Attributes',
'Instances',
'load_arff',
'save_arff',
]
| 16.222222
| 61
| 0.650685
| 16
| 146
| 5.4375
| 0.5
| 0.436782
| 0.528736
| 0.62069
| 0.804598
| 0.804598
| 0
| 0
| 0
| 0
| 0
| 0
| 0.219178
| 146
| 8
| 62
| 18.25
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.253425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fd4d1cefb8d1a7e004bbe42fd48d20361cac8cd6
| 255,787
|
py
|
Python
|
submissions/available/NNSlicer/NNSlicer/eval/common.py
|
ziqi-zhang/fse20
|
f3998abda2e40d67989ec113340236f3460f0dc3
|
[
"MIT"
] | null | null | null |
submissions/available/NNSlicer/NNSlicer/eval/common.py
|
ziqi-zhang/fse20
|
f3998abda2e40d67989ec113340236f3460f0dc3
|
[
"MIT"
] | null | null | null |
submissions/available/NNSlicer/NNSlicer/eval/common.py
|
ziqi-zhang/fse20
|
f3998abda2e40d67989ec113340236f3460f0dc3
|
[
"MIT"
] | 2
|
2020-07-24T20:43:34.000Z
|
2020-09-08T07:10:14.000Z
|
import itertools
import traceback
import uuid
from functools import partial, reduce
from typing import Any, Callable, Dict, Iterable, List, Tuple, Union
from pdb import set_trace as st
import numpy as np
import pandas as pd
import os
import tensorflow as tf
from nninst_graph import AttrMap, Graph, GraphAttrKey
import nninst_mode as mode
from dataset import cifar10
from dataset.mnist_transforms import *
from dataset.config import MNIST_PATH, CIFAR10_PATH
# from nninst.backend.tensorflow.dataset import imagenet, imagenet_raw
# from nninst.backend.tensorflow.dataset.imagenet_hierarchy import imagenet_class_tree
# from nninst.backend.tensorflow.dataset.imagenet_preprocessing import (
# alexnet_preprocess_image,
# )
from tf_graph import (
MaskWeightWithTraceHook,
model_fn_with_fetch_hook,
)
from model import LeNet
from model.resnet18cifar10 import ResNet18Cifar10
from model.resnet10cifar10 import ResNet10Cifar10
# from nninst.backend.tensorflow.model import AlexNet, LeNet, ResNet50
from model.config import ModelConfig
# from nninst.backend.tensorflow.model.config import (
# ALEXNET,
# RESNET_50,
# VGG_16,
# ModelConfig,
# )
from trace.common import (
get_predicted_value,
get_rank,
predict,
reconstruct_class_trace_from_tf,
reconstruct_trace_from_tf,
reconstruct_trace_from_tf_brute_force,
)
from trace.common import (
reconstruct_stat_from_tf,
reconstruct_trace_from_tf_v2,
)
# from nninst.dataset.envs import IMAGENET_RAW_DIR
from nninst_op import Conv2dOp
from nninst_path import (
get_trace_path_in_fc_layers,
get_trace_path_intersection_in_fc_layers,
)
from nninst_statistics import (
calc_trace_path_num,
calc_trace_size,
calc_trace_size_per_layer,
)
from nninst_trace import (
TraceKey,
compact_edge,
compact_trace,
merge_compact_trace,
merge_compact_trace_diff,
merge_compact_trace_intersect,
)
from nninst_utils import filter_value_not_null, merge_dict
from nninst_utils.fs import CsvIOAction, ImageIOAction, IOAction, abspath
from nninst_utils.numpy import arg_approx, arg_sorted_topk
from nninst_utils.ray import ray_iter
__all__ = [
"clean_overlap_ratio",
"overlap_ratio",
"get_overlay_summary",
"resnet_50_imagenet_overlap_ratio",
"alexnet_imagenet_overlap_ratio",
"resnet_50_imagenet_overlap_ratio_error",
"get_overlay_summary_one_side",
"resnet_50_imagenet_overlap_ratio_rand",
"alexnet_imagenet_overlap_ratio_top5",
"resnet_50_imagenet_overlap_ratio_top5_rand",
"resnet_50_imagenet_overlap_ratio_top5",
"alexnet_imagenet_overlap_ratio_error",
"alexnet_imagenet_overlap_ratio_rand",
"alexnet_imagenet_overlap_ratio_top5_rand",
"alexnet_imagenet_overlap_ratio_top5_diff",
]
def calc_all_overlap(
class_trace: AttrMap,
trace: AttrMap,
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
node_name: str = None,
compact: bool = False,
use_intersect_size: bool = False,
key: str = TraceKey.EDGE,
) -> Dict[str, float]:
if node_name is None:
if use_intersect_size:
overlap_ratio, intersect_size = overlap_fn(
class_trace, trace, key, return_size=True
)
return {key + "_size": intersect_size, key: overlap_ratio}
else:
return {
**{
key + "_size": calc_trace_size(trace, key, compact=compact)
for key in [
TraceKey.EDGE,
TraceKey.POINT,
TraceKey.WEIGHT
]
},
**{
key: overlap_fn(class_trace, trace, key)
for key in [
TraceKey.EDGE,
TraceKey.POINT,
TraceKey.WEIGHT
]
},
}
else:
all_overlap = {
key: overlap_fn(class_trace, trace, key, node_name)
for key in [
TraceKey.EDGE,
TraceKey.POINT,
TraceKey.WEIGHT
]
}
for key in [
TraceKey.EDGE,
TraceKey.POINT,
TraceKey.WEIGHT
]:
if node_name in trace.ops:
node_trace = trace.ops[node_name]
if key in node_trace:
if compact:
all_overlap[key + "_size"] = np.count_nonzero(
np.unpackbits(node_trace[key])
)
else:
all_overlap[key + "_size"] = TraceKey.to_array(
node_trace[key]
).size
return all_overlap
# Compute mnist overlap ratio between the traces of clean test images and class traces
def clean_overlap_ratio(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
per_node: bool = False,
num_gpus:float = 0.2,
images_per_class: int = 1,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = abspath(MNIST_PATH)
model_dir = abspath("result/lenet/model_dropout")
create_model = lambda: LeNet(data_format="channels_first")
graph = LeNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
model_dir=model_dir,
)
# print(class_id, predicted_label)
# st()
if predicted_label != class_id:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
if trace is None:
return [{}] if per_node else {}
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
row = {
"image_id": image_id,
**map_prefix(
calc_all_overlap(
class_trace_fn(class_id).load(), trace, overlap_fn
),
"original",
),
}
# st()
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, images_per_class)
for class_id in range(0, 10)
),
chunksize=1,
out_of_order=True,
num_gpus=0.2,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
# Compute transformed (translation, rotation and scale)
# mnist overlap ratio between the traces of clean test images and class traces
def translation_overlap_ratio(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
per_node: bool = False,
images_per_class: int = 1,
transforms=None,
name = None,
num_gpus = 0.2,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = abspath(MNIST_PATH)
model_dir = abspath("result/lenet/model_augmentation")
create_model = lambda: LeNet(data_format="channels_first")
graph = LeNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
# Check the prediction on clean untransformed image, so don't need
# transform
predicted_label = predict(
create_model=create_model,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
model_dir=model_dir,
)
# print(class_id, predicted_label)
# st()
if predicted_label != class_id:
return [{}] if per_node else {}
# Reconstruct regardless of the correctness of prediction
trace = reconstruct_trace_from_tf_brute_force(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: mnist.test(data_dir, transforms=transforms)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
if trace is None:
return [{}] if per_node else {}
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
row = calc_all_overlap(
class_trace_fn(class_id).load(), trace, overlap_fn
)
# row = {
# "image_id": image_id,
# **map_prefix(
# calc_all_overlap(
# class_trace_fn(class_id).load(), trace, overlap_fn
# ),
# "original",
# ),
# }
# st()
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
# for image_id in range(0, images_per_class)
for image_id in range(0, images_per_class)
for class_id in range(0, 10)
),
chunksize=1,
out_of_order=True,
num_gpus=num_gpus,
)
traces = [trace for trace in traces if len(trace) != 0]
acc = len(traces) / (images_per_class * 10)
traces = pd.DataFrame(traces).mean()
traces.loc['accuracy'] = acc
traces = traces.to_frame()
traces.columns = [name]
return traces
return CsvIOAction(path, init_fn=get_overlap_ratio)
# Compute the mean overlap ratio of attacked image
def attack_overlap_ratio(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
per_node: bool = False,
images_per_class: int = 1,
num_gpus: float = 0.2,
model_dir = "result/lenet/model_augmentation",
transforms = None,
transform_name = "noop",
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
nonlocal model_dir
mode.check(False)
data_dir = abspath(MNIST_PATH)
model_dir = abspath(model_dir)
ckpt_dir = f"{model_dir}/ckpts"
create_model = lambda: LeNet(data_format="channels_first")
graph = LeNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook,
create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
model_dir=ckpt_dir,
)
if predicted_label != class_id:
return [{}] if per_node else {}
adversarial_example = lenet_mnist_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
# model_dir not ckpt_dir
model_dir=model_dir,
transforms = transforms,
transform_name = transform_name,
mode = "test",
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
model_dir=ckpt_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: mnist.test(data_dir, transforms=transforms)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
select_fn=select_fn,
model_dir=ckpt_dir,
per_channel=per_channel,
)[0]
if trace is None:
return [{}] if per_node else {}
adversarial_trace = reconstruct_trace_from_tf_brute_force(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=ckpt_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
row = {
"image_id": image_id,
"class_id": class_id,
**map_prefix(
calc_all_overlap(
class_trace_fn(class_id).load(), trace, overlap_fn
),
"original",
),
**map_prefix(
calc_all_overlap(
class_trace_fn(adversarial_label).load(),
adversarial_trace,
overlap_fn,
),
"adversarial",
),
}
# row = calc_all_overlap(
# class_trace_fn(class_id).load(), adversarial_trace, overlap_fn
# )
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, images_per_class)
for class_id in range(0, 10)
),
# ((-1, image_id) for image_id in range(mnist_info.test().size)),
chunksize=1,
out_of_order=True,
num_gpus=num_gpus,
)
traces = [trace for trace in traces if len(trace) != 0]
# acc = len(traces) / (images_per_class * 10)
# traces = pd.DataFrame(traces).mean()
# traces.loc['clean_accuracy'] = acc
# traces = traces.to_frame()
# traces.columns = [attack_name]
# return traces
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def lenet_mnist_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
model_dir: str,
mode: str ,
transform_name: str = "noop",
transforms: Transforms = None,
**kwargs,
) -> IOAction[np.ndarray]:
def get_example() -> np.ndarray:
data_dir = abspath(MNIST_PATH)
ckpt_dir = f"{model_dir}/ckpts"
ckpt_dir = abspath(ckpt_dir)
create_model = lambda: LeNet(data_format="channels_first")
if mode == "test":
dataset = mnist.test
elif mode == "train":
dataset = mnist.train
else:
raise RuntimeError("Dataset invalid")
input = dataset(data_dir,
normed=False,
transforms=transforms,
)
# st()
# input = input.filter(lambda image, label: tf.equal(tf.convert_to_tensor(class_id, dtype=tf.int32), label))
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: dataset(data_dir,
normed=False,
transforms=transforms,
)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=ckpt_dir,
**kwargs,
)
return adversarial_example
name = f"{attack_name}_{transform_name}"
result_dir = f"{model_dir}/attack/{mode}/{name}/{class_id}"
path = os.path.join(result_dir, f"{image_id}.pkl")
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def resnet18_cifar10_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
model_dir: str,
dataset_mode: str ,
transform_name: str = "noop",
transforms: Transforms = None,
**kwargs,
) -> IOAction[np.ndarray]:
def get_one_input_from_dataset(dataset):
input = (dataset
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1)
.make_one_shot_iterator()
.get_next()[0]
)
return input
def get_example() -> np.ndarray:
data_dir = abspath(CIFAR10_PATH)
ckpt_dir = f"{model_dir}/ckpts"
ckpt_dir = abspath(ckpt_dir)
# create_model = lambda: LeNet(data_format="channels_first")
create_model = lambda: partial(
ResNet18Cifar10(),
training = False,
)
from dataset.cifar10_main import input_fn_for_adversarial_examples
# dataset = input_fn_for_adversarial_examples(
# is_training= False,
# data_dir=data_dir,
# num_parallel_batches=1,
# is_shuffle=False,
# transform_fn=None,
# )
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: get_one_input_from_dataset(
# dataset
# ),
# attack_fn=attack_fn,
# model_dir=ckpt_dir,
# **kwargs,
# )
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: (
input_fn_for_adversarial_examples(
is_training= False,
data_dir=data_dir,
num_parallel_batches=1,
is_shuffle=False,
transform_fn=None,
)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1)
.make_one_shot_iterator()
.get_next()[0]
),
attack_fn=attack_fn,
model_dir=ckpt_dir,
**kwargs,
)
return adversarial_example
name = f"{attack_name}_{transform_name}"
result_dir = f"{model_dir}/attack/{dataset_mode}/{name}/{class_id}"
path = os.path.join(result_dir, f"{image_id}.pkl")
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def resnet10_cifar10_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
model_dir: str,
dataset_mode: str ,
transform_name: str = "noop",
transforms: Transforms = None,
**kwargs,
) -> IOAction[np.ndarray]:
def get_one_input_from_dataset(dataset):
input = (dataset
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1)
.make_one_shot_iterator()
.get_next()[0]
)
return input
def get_example() -> np.ndarray:
data_dir = abspath(CIFAR10_PATH)
ckpt_dir = f"{model_dir}/ckpts"
ckpt_dir = abspath(ckpt_dir)
# create_model = lambda: LeNet(data_format="channels_first")
create_model = lambda: partial(
ResNet10Cifar10(),
training = False,
)
from dataset.cifar10_main import input_fn_for_adversarial_examples
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: (
input_fn_for_adversarial_examples(
is_training= (dataset_mode=="train"),
data_dir=data_dir,
num_parallel_batches=1,
is_shuffle=False,
transform_fn=None,
)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1)
.make_one_shot_iterator()
.get_next()[0]
),
attack_fn=attack_fn,
model_dir=ckpt_dir,
**kwargs,
)
return adversarial_example
name = f"{attack_name}"
result_dir = f"{model_dir}/attack/{dataset_mode}/{name}/{class_id}"
path = os.path.join(result_dir, f"{image_id}.pkl")
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def adversarial_example_image(
example_io: IOAction[np.ndarray], cache: bool = True
) -> IOAction[np.ndarray]:
def get_example() -> np.ndarray:
example = example_io.load()
if example is None:
return None
return (np.squeeze(example, axis=0) * 255).astype(np.uint8)
path = example_io.path.replace(".pkl", ".png")
return ImageIOAction(path, init_fn=get_example, cache=cache)
def generate_examples(
example_fn: Callable[..., IOAction[np.ndarray]],
class_ids: Iterable[int],
image_ids: Iterable[int],
attack_name: str,
transform_name: str = "noop",
transforms = None,
cache: bool = True,
num_gpus=0.2,
**kwargs,
):
def generate_examples_fn(
class_id: int, image_id: int
) -> Union[Tuple[int, int], Tuple[int, int, str]]:
try:
class_id = int(class_id)
image_id = int(image_id)
example_io = example_fn(
attack_name=attack_name,
class_id=class_id,
image_id=image_id,
cache=cache,
transforms = transforms,
transform_name = transform_name,
**kwargs,
)
example_io.save()
adversarial_example_image(example_io, cache=cache).save()
return class_id, image_id
except Exception:
return class_id, image_id, traceback.format_exc()
name = f"{attack_name}_{transform_name}"
print(f"begin {name}, num_gpu={num_gpus}")
if len(image_ids) > 99:
chunksize = 4
else:
chunksize = 1
results = ray_iter(
generate_examples_fn,
[(class_id, image_id) for image_id in image_ids for class_id in class_ids],
chunksize=chunksize,
out_of_order=True,
num_gpus=num_gpus,
# huge_task=True,
)
for result in results:
if len(result) == 3:
class_id, image_id, tb = result
print(f"## raise exception from class {class_id}, image {image_id}:")
print(tb)
else:
class_id, image_id = result
# print(f"finish class {class_id} image {image_id}")
print(f"finish {name}")
def get_overlay_summary(
overlap_ratios: pd.DataFrame, trace_key: str, threshold=1
) -> Dict[str, int]:
condition_positive = len(overlap_ratios)
if condition_positive == 0:
return {}
original_key = f"original.{trace_key}"
false_positive = np.count_nonzero(overlap_ratios[original_key] < threshold)
adversarial_key = f"adversarial.{trace_key}"
true_positive = np.count_nonzero(overlap_ratios[adversarial_key] < threshold)
predicted_condition_positive = true_positive + false_positive
recall = (true_positive / condition_positive) if condition_positive != 0 else 0
precision = (
(true_positive / predicted_condition_positive)
if predicted_condition_positive != 0
else 0
)
f1 = (2 / ((1 / recall) + (1 / precision))) if recall != 0 and precision != 0 else 0
return dict(
threshold=threshold,
condition_positive=condition_positive,
# predicted_condition_positive=predicted_condition_positive,
original_is_higher=np.count_nonzero(
(overlap_ratios[original_key] - overlap_ratios[adversarial_key]) > 0
),
# adversarial_is_higher=np.count_nonzero(
# (overlap_ratios[adversarial_key] - overlap_ratios[original_key]) > 0),
true_positive=true_positive,
false_positive=false_positive,
recall=recall,
precision=precision,
f1=f1,
)
def overlap_ratio(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
per_node: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = abspath("/home/yxqiu/data/mnist/raw")
model_dir = abspath("tf/lenet/model_early")
create_model = lambda: LeNet(data_format="channels_first")
graph = LeNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
model_dir=model_dir,
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: mnist.test(data_dir, normed=False)
# .filter(lambda image, label:
# tf.equal(
# tf.convert_to_tensor(class_id, dtype=tf.int32),
# label))
# .skip(image_id).take(1).batch(1)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = lenet_mnist_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
# class_id = mnist_info.test().label(image_id)
#
# if class_id != trace.attrs[GraphAttrKey.PREDICT]:
# return [{}] if per_node else {}
if trace is None:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: mnist.test(data_dir, normed=False)
# .filter(lambda image, label:
# tf.equal(
# tf.convert_to_tensor(class_id, dtype=tf.int32),
# label))
# .skip(image_id).take(1).batch(1)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
#
# if adversarial_example is None:
# return [{}] if per_node else {}
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
if class_id != adversarial_label:
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
row = {
"image_id": image_id,
**map_prefix(
calc_all_overlap(
class_trace_fn(class_id).load(), trace, overlap_fn
),
"original",
),
**map_prefix(
calc_all_overlap(
class_trace_fn(adversarial_label).load(),
adversarial_trace,
overlap_fn,
),
"adversarial",
),
}
return row
else:
return {}
# traces = ray_iter(get_row, (image_id for image_id in range(300, 350)),
# traces = ray_iter(get_row, (image_id for image_id in range(131, 300)),
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 100)
for class_id in range(0, 10)
),
# ((-1, image_id) for image_id in range(mnist_info.test().size)),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
# chunksize=1, out_of_order=False, num_gpus=1)
# count = 0
# result = []
# for trace in traces:
# result.append(trace)
# print(count)
# count += 1
# traces = [trace for trace in result if len(trace) != 0]
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio(
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_class_trace_from_tf(
class_id,
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id),
model_dir=model_dir,
select_fn=select_fn,
per_channel=per_channel,
)
if trace is None:
return {}
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: imagenet_raw.test(
data_dir, class_id, image_id, normed=False
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
if adversarial_example is None:
return {}
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
if class_id != adversarial_label:
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
adversarial_class_trace = class_trace_fn(adversarial_label).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
adversarial_trace = compact_edge(
adversarial_trace, graph, per_channel=per_channel
)
if per_node:
rows = []
for node_name in class_trace.nodes:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"node_name": node_name,
**map_prefix(
calc_all_overlap(
class_trace, trace, overlap_fn, node_name
),
"original",
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace,
adversarial_trace,
overlap_fn,
node_name,
),
"adversarial",
),
}
if (
row[f"original.{TraceKey.WEIGHT}"] is not None
or row[f"original.{TraceKey.EDGE}"] is not None
):
rows.append(row)
return rows
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace, adversarial_trace, overlap_fn
),
"adversarial",
),
}
print(row)
return row
else:
return [{}] if per_node else {}
# traces = ray_iter(get_row, (image_id for image_id in range(300, 350)),
# traces = ray_iter(get_row, (image_id for image_id in range(131, 300)),
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
# for image_id in range(0, 50)
for class_id in range(1, 1001)
),
# for class_id in range(1, 2)),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
# chunksize=1, out_of_order=False, num_gpus=1)
# count = 0
# result = []
# for trace in traces:
# result.append(trace)
# print(count)
# count += 1
# traces = [trace for trace in result if len(trace) != 0]
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio_top5(
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
if trace is None:
return {}
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: imagenet_raw.test(
data_dir, class_id, image_id, normed=False
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
if adversarial_example is None:
return {}
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
if adversarial_label not in label_top5:
# if np.intersect1d(label_top5, adversarial_label_top5).size == 0:
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = merge_compact_trace(
*[class_trace_fn(label).load() for label in label_top5]
)
adversarial_class_trace = merge_compact_trace(
*[class_trace_fn(label).load() for label in adversarial_label_top5]
)
trace = compact_edge(trace, graph, per_channel=per_channel)
adversarial_trace = compact_edge(
adversarial_trace, graph, per_channel=per_channel
)
if per_node:
rows = []
for node_name in class_trace.nodes:
row = {
"image_id": image_id,
"node_name": node_name,
"label": class_id,
"adversarial_label": adversarial_label,
**map_prefix(
calc_all_overlap(
class_trace, trace, overlap_fn, node_name
),
"original",
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace,
adversarial_trace,
overlap_fn,
node_name,
),
"adversarial",
),
}
if (
row[f"original.{TraceKey.WEIGHT}"] is not None
or row[f"original.{TraceKey.EDGE}"] is not None
):
rows.append(row)
return rows
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace, adversarial_trace, overlap_fn
),
"adversarial",
),
}
print(row)
return row
else:
return [{}] if per_node else {}
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(1, 1001)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio_error(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
if class_id == trace.attrs[GraphAttrKey.PREDICT]:
return {}
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 3)
for class_id in range(1, 1001)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio_rand(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
example = np.random.random_sample((1, 224, 224, 3)).astype(np.float32)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(1, 1001)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio_top5_rand(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
example = np.random.random_sample((1, 224, 224, 3)).astype(np.float32)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(example)
),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = merge_compact_trace(
*[
class_trace_fn(label).load()
for label in trace.attrs[GraphAttrKey.PREDICT_TOP5]
]
)
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(1, 1001)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
cache: bool = True,
**kwargs,
) -> IOAction[np.ndarray]:
return imagenet_example(
model_config=ALEXNET.with_model_dir("tf/alexnet/model_import"),
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
cache=cache,
**kwargs,
)
# deprecated
def alexnet_imagenet_example_trace_old(
attack_name: str, class_id: int, image_id: int, threshold: float
) -> IOAction[AttrMap]:
def get_example() -> AttrMap:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return None
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=lambda input: arg_approx(input, threshold),
model_dir=model_dir,
)[0]
return compact_trace(trace, graph)
name = "alexnet_imagenet"
path = f"store/analysis/example_trace/{name}/threshold={threshold:.3f}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def alexnet_imagenet_example_trace_of_target_class(
attack_name: str, class_id: int, image_id: int, threshold: float
) -> IOAction[AttrMap]:
def get_example() -> AttrMap:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return None
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=None,
generate_adversarial_fn=None,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return None
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
trace_of_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=lambda input: arg_approx(input, threshold),
model_dir=model_dir,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
return compact_trace(trace_of_target_class, graph)
name = "alexnet_imagenet"
path = f"store/analysis/example_trace_of_target_class/{name}/attack={attack_name}/threshold={threshold:.3f}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def alexnet_imagenet_adversarial_example_trace(
attack_name: str, class_id: int, image_id: int, threshold: float
) -> IOAction[AttrMap]:
def get_example() -> AttrMap:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return None
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=None,
generate_adversarial_fn=None,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return None
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return None
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=lambda input: arg_approx(input, threshold),
model_dir=model_dir,
)[0]
return compact_trace(adversarial_trace, graph)
name = "alexnet_imagenet"
path = f"store/analysis/adversarial_example_trace/{name}/attack={attack_name}/threshold={threshold:.3f}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def alexnet_imagenet_adversarial_example_trace_of_original_class(
attack_name: str, class_id: int, image_id: int, threshold: float
) -> IOAction[AttrMap]:
def get_example() -> AttrMap:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return None
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=None,
generate_adversarial_fn=None,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return None
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return None
adversarial_trace_of_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=lambda input: arg_approx(input, threshold),
model_dir=model_dir,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
return compact_trace(adversarial_trace_of_original_class, graph)
name = "alexnet_imagenet"
path = f"store/analysis/adversarial_example_trace_of_original_class/{name}/attack={attack_name}/threshold={threshold:.3f}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example, cache=True, compress=True)
def generate_traces(
trace_fn: Callable[..., IOAction[AttrMap]],
attack_name: str,
class_ids: Iterable[int],
image_ids: Iterable[int],
**kwargs,
):
def generate_traces_fn(
class_id: int, image_id: int
) -> Union[Tuple[int, int], Tuple[int, int, str]]:
try:
class_id = int(class_id)
image_id = int(image_id)
trace_fn(
attack_name=attack_name, class_id=class_id, image_id=image_id, **kwargs
).save()
return class_id, image_id
except Exception:
return class_id, image_id, traceback.format_exc()
results = ray_iter(
generate_traces_fn,
[(class_id, image_id) for image_id in image_ids for class_id in class_ids],
chunksize=1,
out_of_order=True,
num_gpus=0,
huge_task=True,
)
for result in results:
if len(result) == 3:
class_id, image_id, tb = result
print(f"## raise exception from class {class_id}, image {image_id}:")
print(tb)
else:
class_id, image_id = result
print(f"finish class {class_id} image {image_id}")
def resnet_50_imagenet_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
cache: bool = True,
**kwargs,
) -> IOAction[np.ndarray]:
return imagenet_example(
model_config=RESNET_50,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
cache=cache,
**kwargs,
)
def vgg_16_imagenet_example(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
cache: bool = True,
**kwargs,
) -> IOAction[np.ndarray]:
return imagenet_example(
model_config=VGG_16,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
cache=cache,
**kwargs,
)
def imagenet_example(
model_config: ModelConfig,
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
cache: bool = True,
**kwargs,
) -> IOAction[np.ndarray]:
def get_example() -> np.ndarray:
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
normed=False,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
return adversarial_example
name = f"{model_config.name}_imagenet"
path = f"store/example/{attack_name}/{name}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example, cache=cache, compress=True)
def alexnet_imagenet_example_stat(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
stat_name: str = None,
cache: bool = True,
**kwargs,
) -> IOAction[Dict[str, np.ndarray]]:
return imagenet_example_stat(
model_config=ALEXNET.with_model_dir("tf/alexnet/model_import"),
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
stat_name=stat_name,
cache=cache,
**kwargs,
)
def resnet_50_imagenet_example_stat(
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
stat_name: str = None,
cache: bool = True,
**kwargs,
) -> IOAction[Dict[str, np.ndarray]]:
return imagenet_example_stat(
model_config=RESNET_50,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
stat_name=stat_name,
cache=cache,
**kwargs,
)
def imagenet_example_trace(
model_config: ModelConfig,
attack_name,
attack_fn,
generate_adversarial_fn,
trace_fn,
class_id: int,
image_id: int,
threshold: float,
per_channel: bool = False,
cache: bool = True,
train: bool = False,
**kwargs,
) -> IOAction[AttrMap]:
def get_example_trace() -> AttrMap:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: (imagenet_raw.train if train else imagenet_raw.test)(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return None
if attack_name == "original":
trace = reconstruct_trace_from_tf_v2(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
trace_fn=partial(
trace_fn, select_fn=lambda input: arg_approx(input, threshold)
),
model_dir=model_dir,
)[0]
trace = compact_trace(trace, graph, per_channel=per_channel)
return trace
adversarial_example = imagenet_example(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return None
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
model_config.normalize_fn(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return None
adversarial_trace = reconstruct_trace_from_tf_v2(
model_fn=model_fn,
input_fn=adversarial_input_fn,
trace_fn=partial(
trace_fn, select_fn=lambda input: arg_approx(input, threshold)
),
model_dir=model_dir,
)[0]
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
return adversarial_trace
name = f"{model_config.name}_imagenet"
if train:
name = f"{name}_train"
if per_channel:
trace_name = "example_channel_trace"
else:
trace_name = "example_trace"
path = f"store/{trace_name}/approx_{threshold:.3f}/{attack_name}/{name}/{class_id}/{image_id}.pkl"
return IOAction(path, init_fn=get_example_trace, cache=cache, compress=True)
# alexnet_imagenet_example_trace = partial(
# imagenet_example_trace,
# model_config=ALEXNET.with_model_dir("tf/alexnet/model_import"),
# )
#
# resnet_50_imagenet_example_trace = partial(
# imagenet_example_trace, model_config=RESNET_50
# )
#
# vgg_16_imagenet_example_trace = partial(imagenet_example_trace, model_config=VGG_16)
def imagenet_example_stat(
model_config: ModelConfig,
attack_name,
attack_fn,
generate_adversarial_fn,
class_id: int,
image_id: int,
stat_name: str = "avg",
cache: bool = True,
**kwargs,
) -> IOAction[Dict[str, np.ndarray]]:
def get_example_trace() -> Dict[str, np.ndarray]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
# input_fn = lambda: imagenet_raw.test(data_dir, class_id, image_id,
input_fn = lambda: imagenet_raw.train(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
# if predicted_label != class_id:
# return None
if attack_name == "original":
trace = reconstruct_stat_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
model_dir=model_dir,
stat_name=stat_name,
)[0]
return trace
adversarial_example = imagenet_example(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return None
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
model_config.normalize_fn(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return None
adversarial_trace = reconstruct_stat_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
model_dir=model_dir,
stat_name=stat_name,
)[0]
return adversarial_trace
name = f"{model_config.name}_imagenet"
trace_name = "example_stat"
path = (
f"store/{trace_name}/{stat_name}/{attack_name}/{name}/{class_id}/{image_id}.pkl"
)
return IOAction(path, init_fn=get_example_trace, cache=cache, compress=True)
def generate_example_traces(
example_trace_fn: Callable[..., IOAction[AttrMap]],
class_ids: Iterable[int],
image_ids: Iterable[int],
attack_name: str,
attack_fn,
generate_adversarial_fn,
threshold: float,
per_channel: bool = False,
cache: bool = True,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
train: bool = False,
**kwargs,
):
def generate_examples_fn(
class_id: int, image_id: int
) -> Union[Tuple[int, int], Tuple[int, int, str]]:
try:
class_id = int(class_id)
image_id = int(image_id)
example_trace_io = example_trace_fn(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
threshold=threshold,
per_channel=per_channel,
cache=cache,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
train=train,
**kwargs,
)
example_trace_io.save()
return class_id, image_id
except Exception as e:
raise e
# return class_id, image_id, traceback.format_exc()
print(f"begin {attack_name}")
results = ray_iter(
generate_examples_fn,
[(class_id, image_id) for image_id in image_ids for class_id in class_ids],
chunksize=1,
out_of_order=True,
num_gpus=0,
huge_task=True,
)
for result in results:
if len(result) == 3:
class_id, image_id, tb = result
print(f"## raise exception from class {class_id}, image {image_id}:")
print(tb)
else:
class_id, image_id = result
# print(f"finish class {class_id} image {image_id}")
print(f"finish {attack_name}")
def generate_example_stats(
example_trace_fn: Callable[..., IOAction[Dict[str, np.ndarray]]],
class_ids: Iterable[int],
image_ids: Iterable[int],
attack_name: str,
attack_fn,
generate_adversarial_fn,
stat_name: str = None,
cache: bool = True,
**kwargs,
):
def generate_examples_fn(
class_id: int, image_id: int
) -> Union[Tuple[int, int], Tuple[int, int, str]]:
try:
class_id = int(class_id)
image_id = int(image_id)
example_trace_io = example_trace_fn(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
stat_name=stat_name,
cache=cache,
**kwargs,
)
example_trace_io.save()
return class_id, image_id
except Exception as e:
raise e
# return class_id, image_id, traceback.format_exc()
print(f"begin {attack_name}")
results = ray_iter(
generate_examples_fn,
[(class_id, image_id) for image_id in image_ids for class_id in class_ids],
chunksize=1,
out_of_order=True,
num_gpus=0,
huge_task=True,
)
for result in results:
if len(result) == 3:
class_id, image_id, tb = result
print(f"## raise exception from class {class_id}, image {image_id}:")
print(tb)
else:
class_id, image_id = result
# print(f"finish class {class_id} image {image_id}")
print(f"finish {attack_name}")
def alexnet_imagenet_overlap_ratio(
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_class_trace_from_tf(
class_id,
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
),
model_dir=model_dir,
select_fn=select_fn,
per_channel=per_channel,
)
if trace is None:
return {}
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
normed=False,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
if adversarial_example is None:
return {}
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
if class_id != adversarial_label:
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
adversarial_class_trace = class_trace_fn(adversarial_label).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
adversarial_trace = compact_edge(
adversarial_trace, graph, per_channel=per_channel
)
if per_node:
rows = []
for node_name in class_trace.nodes:
row = {
"image_id": image_id,
"node_name": node_name,
"label": class_id,
"adversarial_label": adversarial_label,
**map_prefix(
calc_all_overlap(
class_trace, trace, overlap_fn, node_name
),
"original",
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace,
adversarial_trace,
overlap_fn,
node_name,
),
"adversarial",
),
}
if (
(
f"original.{TraceKey.WEIGHT}" in row
and row[f"original.{TraceKey.WEIGHT}"] is not None
)
or (
f"original.{TraceKey.EDGE}" in row
and row[f"original.{TraceKey.EDGE}"]
)
is not None
):
rows.append(row)
return rows
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace, adversarial_trace, overlap_fn
),
"adversarial",
),
}
print(row)
return row
else:
return [{}] if per_node else {}
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def get_predicted_value_contribution(
trace: AttrMap, graph: Graph, class_id: int, create_model, input_fn, model_dir
) -> float:
# print(calc_density_compact(trace, TraceKey.EDGE))
return get_predicted_value(
class_id=class_id,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
prediction_hooks=[MaskWeightWithTraceHook(graph, trace)],
)
def alexnet_imagenet_overlap_ratio_top5_diff(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
with tf.Session() as sess:
original_example = sess.run(
imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
normed=False,
)
.make_one_shot_iterator()
.get_next()[0]
)
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, class_ids: List[int], trace: AttrMap, input_fn
):
rest_class_ids = class_ids.copy()
rest_class_ids.remove(base_class_id)
rest_class_trace = merge_compact_trace(
*[get_class_trace(class_id) for class_id in rest_class_ids]
)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
example_trace_share = merge_compact_trace_diff(
trace, example_trace_not_in_class_not_in_rest
)
example_trace_specific = merge_compact_trace_diff(
trace, example_trace_not_in_class_in_rest
)
predicted_value_contributions = {
key: get_predicted_value_contribution(
current_trace,
graph=graph,
class_id=base_class_id,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
)
for key, current_trace in [
("pvc_total", trace),
("pvc_share", example_trace_share),
("pvc_specific", example_trace_specific),
("pvc_in_class_in_rest", example_trace_in_class_in_rest),
(
"pvc_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
# ("pvc_not_in_class_in_rest", example_trace_not_in_class_in_rest),
# ("pvc_not_in_class_not_in_rest", example_trace_not_in_class_not_in_rest),
]
}
overlap_sizes = {
key: calc_trace_size(current_trace, compact=True)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**predicted_value_contributions,
**overlap_sizes,
}
row = {}
for k, base_class_id in zip(range(1, topk_calc_range + 1), label_top5):
row = {
**row,
**map_prefix(
get_overlap(base_class_id, label_top5, trace, input_fn),
f"original.top{k}",
),
}
for k, base_class_id in zip(
range(1, topk_calc_range + 1), adversarial_label_top5
):
row = {
**row,
**map_prefix(
get_overlap(
base_class_id,
adversarial_label_top5,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.top{k}",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
"perturbation": np.linalg.norm(
adversarial_example - original_example
)
/ original_example.size,
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_top5_diff_uint8(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = adversarial_example_image(
alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
)
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_example = (
np.expand_dims(adversarial_example, axis=0).astype(np.float32) / 255
)
with tf.Session() as sess:
original_example = sess.run(
imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
normed=False,
)
.make_one_shot_iterator()
.get_next()[0]
)
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, class_ids: List[int], trace: AttrMap, input_fn
):
rest_class_ids = class_ids.copy()
rest_class_ids.remove(base_class_id)
rest_class_trace = merge_compact_trace(
*[get_class_trace(class_id) for class_id in rest_class_ids]
)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
example_trace_share = merge_compact_trace_diff(
trace, example_trace_not_in_class_not_in_rest
)
example_trace_specific = merge_compact_trace_diff(
trace, example_trace_not_in_class_in_rest
)
predicted_value_contributions = {
key: get_predicted_value_contribution(
current_trace,
graph=graph,
class_id=base_class_id,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
)
for key, current_trace in [
("pvc_total", trace),
("pvc_share", example_trace_share),
("pvc_specific", example_trace_specific),
("pvc_in_class_in_rest", example_trace_in_class_in_rest),
(
"pvc_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
# ("pvc_not_in_class_in_rest", example_trace_not_in_class_in_rest),
# ("pvc_not_in_class_not_in_rest", example_trace_not_in_class_not_in_rest),
]
}
overlap_sizes = {
key: calc_trace_size(current_trace, compact=True)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**predicted_value_contributions,
**overlap_sizes,
}
row = {}
for k, base_class_id in zip(range(1, topk_calc_range + 1), label_top5):
row = {
**row,
**map_prefix(
get_overlap(base_class_id, label_top5, trace, input_fn),
f"original.top{k}",
),
}
for k, base_class_id in zip(
range(1, topk_calc_range + 1), adversarial_label_top5
):
row = {
**row,
**map_prefix(
get_overlap(
base_class_id,
adversarial_label_top5,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.top{k}",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
"perturbation": np.linalg.norm(
adversarial_example - original_example
)
/ original_example.size,
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_logit_diff(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, class_ids: List[int], trace: AttrMap, input_fn
):
rest_class_ids = class_ids.copy()
if base_class_id in rest_class_ids:
rest_class_ids.remove(base_class_id)
rest_class_trace = merge_compact_trace(
*[get_class_trace(class_id) for class_id in rest_class_ids]
)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
example_trace_share = merge_compact_trace_diff(
trace, example_trace_not_in_class_not_in_rest
)
example_trace_specific = merge_compact_trace_diff(
trace, example_trace_not_in_class_in_rest
)
predicted_value_contributions = {
key: get_predicted_value_contribution(
current_trace,
graph=graph,
class_id=base_class_id,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
)
for key, current_trace in [
("pvc_total", trace),
("pvc_share", example_trace_share),
("pvc_specific", example_trace_specific),
# ("pvc_in_class_in_rest", example_trace_in_class_in_rest),
("pvc_in_class", example_trace_in_class),
(
"pvc_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
# ("pvc_not_in_class_in_rest", example_trace_not_in_class_in_rest),
# ("pvc_not_in_class_not_in_rest", example_trace_not_in_class_not_in_rest),
]
}
overlap_sizes = {
key: calc_trace_size(current_trace, compact=True)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**predicted_value_contributions,
**overlap_sizes,
}
# if (class_id not in adversarial_label_top5) or (adversarial_label not in label_top5):
# return [{}] if per_node else {}
row = {}
row = {
**row,
**map_prefix(
get_overlap(class_id, label_top5, trace, input_fn),
f"original.origin",
),
}
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label, label_top5, trace_target_class, input_fn
),
f"original.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
adversarial_label_top5,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
topk=topk_share_range,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label_top5,
adversarial_trace_original_class,
adversarial_input_fn,
),
f"adversarial.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_ideal_metrics(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, rest_class_id: int, trace: AttrMap, input_fn
):
rest_class_trace = get_class_trace(rest_class_id)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
example_trace_share = merge_compact_trace_diff(
trace, example_trace_not_in_class_not_in_rest
)
example_trace_specific = merge_compact_trace_diff(
trace, example_trace_not_in_class_in_rest
)
predicted_value_contributions = {
key: get_predicted_value_contribution(
current_trace,
graph=graph,
class_id=base_class_id,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
)
for key, current_trace in [
("pvc_total", trace),
("pvc_share", example_trace_share),
("pvc_specific", example_trace_specific),
# ("pvc_in_class_in_rest", example_trace_in_class_in_rest),
("pvc_in_class", example_trace_in_class),
(
"pvc_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
# ("pvc_not_in_class_in_rest", example_trace_not_in_class_in_rest),
# ("pvc_not_in_class_not_in_rest", example_trace_not_in_class_not_in_rest),
]
}
overlap_sizes = {
key: calc_trace_size(current_trace, compact=True)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**predicted_value_contributions,
**overlap_sizes,
}
row = {}
row = {
**row,
**map_prefix(
get_overlap(class_id, adversarial_label, trace, input_fn),
f"original.origin",
),
}
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label, class_id, trace_target_class, input_fn
),
f"original.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
class_id,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label,
adversarial_trace_original_class,
adversarial_input_fn,
),
f"adversarial.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
"original_class_rank_in_adversarial_example": get_rank(
class_id=class_id,
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
),
"target_class_rank_in_original_example": get_rank(
class_id=adversarial_label,
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
),
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_fc_layer_path_ideal_metrics(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
path_layer_name = graph.layers()[-11]
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
trace = compact_trace(trace, graph, per_channel=per_channel)
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
class_trace_paths = {}
def get_class_trace_path(class_id: int) -> AttrMap:
if class_id not in class_trace_paths:
class_trace = get_class_trace(class_id)
class_trace_paths[class_id] = get_trace_path_in_fc_layers(
graph, class_trace, compact=True
)
return class_trace_paths[class_id]
def get_overlap(base_class_id: int, trace: AttrMap):
class_trace = get_class_trace(base_class_id)
example_trace_path = get_trace_path_in_fc_layers(
graph, trace, compact=True
)
trace_path_intersection = get_trace_path_intersection_in_fc_layers(
trace, class_trace, graph=graph, compact=True
)
return {
"overlap_size": calc_trace_path_num(
trace_path_intersection, path_layer_name
),
"trace_path_size": calc_trace_path_num(
example_trace_path, path_layer_name
),
"class_trace_path_size": calc_trace_path_num(
get_class_trace_path(base_class_id), path_layer_name
),
}
row = {}
row = {
**row,
**map_prefix(get_overlap(class_id, trace), f"original.origin"),
}
row = {
**row,
**map_prefix(
get_overlap(adversarial_label, adversarial_trace),
f"adversarial.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(adversarial_label, trace_target_class),
f"original.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(class_id, adversarial_trace_original_class),
f"adversarial.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_ideal_metrics_per_layer(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = alexnet_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, rest_class_id: int, trace: AttrMap, input_fn
):
rest_class_trace = get_class_trace(rest_class_id)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace, layer_name, compact=True
)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**overlap_sizes,
}
row = {}
row = {
**row,
**map_prefix(
get_overlap(class_id, adversarial_label, trace, input_fn),
f"original.origin",
),
}
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label, class_id, trace_target_class, input_fn
),
f"original.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
class_id,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
class_id,
merge_compact_trace_intersect(
trace_target_class, adversarial_trace
),
adversarial_input_fn,
),
f"shared.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label,
adversarial_trace_original_class,
adversarial_input_fn,
),
f"adversarial.origin",
),
}
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label,
merge_compact_trace_intersect(
adversarial_trace_original_class, trace
),
adversarial_input_fn,
),
f"shared.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_real_metrics_per_layer(rank: int = None, **kwargs):
return (
imagenet_real_metrics_per_layer_per_rank
if rank
else imagenet_real_metrics_per_layer_v2
)(
model_config=ALEXNET.with_model_dir("tf/alexnet/model_import"),
rank=rank,
**kwargs,
)
def imagenet_real_metrics_per_layer(
model_config: ModelConfig,
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
path: str,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
per_node: bool = False,
per_channel: bool = False,
use_weight: bool = False,
support_diff: bool = True,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=model_config.class_from_zero,
# preprocessing_fn=model_config.preprocessing_fn)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = imagenet_example(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
model_config.normalize_fn(adversarial_example)
)
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
# if class_id not in class_traces:
# class_traces[class_id] = class_trace_fn(class_id).load()
# return class_traces[class_id]
return class_trace_fn(class_id).load()
def get_overlap(base_class_id: int, trace: AttrMap):
class_trace = get_class_trace(base_class_id)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace,
layer_name,
compact=True,
key=TraceKey.WEIGHT
if use_weight
else TraceKey.EDGE,
)
for key, current_trace in [
("overlap_size_total", trace),
("overlap_size_in_class", example_trace_in_class),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return overlap_sizes
row = {}
row = {
**row,
**map_prefix(get_overlap(class_id, trace), f"original.origin"),
}
row = {
**row,
**map_prefix(
get_overlap(adversarial_label, adversarial_trace),
f"adversarial.target",
),
}
if support_diff:
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([label_top5[1]]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(label_top5[1], trace_target_class),
f"original.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label_top5[1]]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label_top5[1], adversarial_trace_original_class
),
f"adversarial.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
images = (
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
)
images = map(
lambda class_with_image: (
class_with_image[0]
if model_config.class_from_zero
else class_with_image[0] + 1,
class_with_image[1],
),
images,
)
traces = ray_iter(get_row, images, chunksize=1, out_of_order=True, num_gpus=0)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def imagenet_real_metrics_per_layer_v2(
model_config: ModelConfig,
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
path: str,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
per_node: bool = False,
per_channel: bool = False,
use_weight: bool = False,
support_diff: bool = True,
threshold: float = None,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
assert threshold is not None
trace = imagenet_example_trace(
model_config=model_config,
attack_name="original",
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
threshold=threshold,
per_channel=per_channel,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
).load()
if trace is None:
return [{}] if per_node else {}
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = imagenet_example_trace(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
threshold=threshold,
per_channel=per_channel,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
).load()
if adversarial_trace is None:
return [{}] if per_node else {}
adversarial_example = imagenet_example(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
model_config.normalize_fn(adversarial_example)
)
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
# if class_id not in class_traces:
# class_traces[class_id] = class_trace_fn(class_id).load()
# return class_traces[class_id]
return class_trace_fn(class_id).load()
def get_overlap(base_class_id: int, trace: AttrMap):
class_trace = get_class_trace(base_class_id)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace,
layer_name,
compact=True,
key=TraceKey.WEIGHT
if use_weight
else TraceKey.EDGE,
)
for key, current_trace in [
("overlap_size_total", trace),
("overlap_size_in_class", example_trace_in_class),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return overlap_sizes
row = {}
row = {
**row,
**map_prefix(get_overlap(class_id, trace), f"original.origin"),
}
row = {
**row,
**map_prefix(
get_overlap(adversarial_label, adversarial_trace),
f"adversarial.target",
),
}
if support_diff:
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([label_top5[1]]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(label_top5[1], trace_target_class),
f"original.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label_top5[1]]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label_top5[1], adversarial_trace_original_class
),
f"adversarial.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
images = (
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
)
images = map(
lambda class_with_image: (
class_with_image[0]
if model_config.class_from_zero
else class_with_image[0] + 1,
class_with_image[1],
),
images,
)
traces = ray_iter(get_row, images, chunksize=1, out_of_order=True, num_gpus=0)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def imagenet_real_metrics_per_layer_per_rank(
model_config: ModelConfig,
attack_name: str,
attack_fn,
generate_adversarial_fn,
trace_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
path: str,
rank: int,
use_weight: bool = False,
threshold: float = None,
use_point: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
assert threshold is not None
if attack_name == "normal":
trace = reconstruct_trace_from_tf_v2(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
trace_fn=partial(
trace_fn,
select_seed_fn=lambda output: arg_sorted_topk(output, rank)[
rank - 1 : rank
],
),
model_dir=model_dir,
rank=rank,
)[0]
else:
adversarial_example = imagenet_example(
model_config=model_config,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return {}
adversarial_input_fn = lambda: tf.data.Dataset.from_tensors(
model_config.normalize_fn(adversarial_example)
)
trace = reconstruct_trace_from_tf_v2(
model_fn=model_fn,
input_fn=adversarial_input_fn,
trace_fn=partial(
trace_fn,
select_seed_fn=lambda output: arg_sorted_topk(output, rank)[
rank - 1 : rank
],
),
model_dir=model_dir,
rank=rank,
)[0]
if trace is None:
return {}
label = trace.attrs[GraphAttrKey.SEED]
def get_class_trace(class_id: int) -> AttrMap:
return class_trace_fn(class_id).load()
def get_overlap(base_class_id: int, trace: AttrMap):
class_trace = get_class_trace(base_class_id)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
if use_point:
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace,
graph.op(graph.id(layer_name))
.output_nodes[0]
.name,
compact=True,
key=TraceKey.POINT,
)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class",
example_trace_in_class,
),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
else:
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace,
layer_name,
compact=True,
key=TraceKey.WEIGHT
if use_weight
else TraceKey.EDGE,
)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class",
example_trace_in_class,
),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return overlap_sizes
trace = compact_trace(trace, graph, per_channel=per_channel)
row = {}
row = {**row, **get_overlap(label, trace)}
row = {"class_id": class_id, "image_id": image_id, "label": label, **row}
# print(row)
return row
images = (
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
)
images = map(
lambda class_with_image: (
class_with_image[0]
if model_config.class_from_zero
else class_with_image[0] + 1,
class_with_image[1],
),
images,
)
traces = list(
ray_iter(get_row, images, chunksize=1, out_of_order=True, num_gpus=0)
)
assert len(traces) == 1000
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces).sort_values(by=["class_id", "image_id"])
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_real_metrics_per_layer(rank: int = None, **kwargs):
return (
imagenet_real_metrics_per_layer_per_rank
if rank
else imagenet_real_metrics_per_layer_v2
)(model_config=RESNET_50, rank=rank, **kwargs)
def vgg_16_imagenet_real_metrics_per_layer(rank: int = None, **kwargs):
return (
imagenet_real_metrics_per_layer_per_rank
if rank
else imagenet_real_metrics_per_layer_v2
)(model_config=VGG_16, rank=rank, **kwargs)
def alexnet_imagenet_real_metrics_per_layer_targeted(target_class: int):
def metrics_fn(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
use_weight: bool = False,
support_diff: bool = True,
**kwargs,
):
return imagenet_real_metrics_per_layer_targeted(
target_class=target_class,
model_config=ALEXNET.with_model_dir("tf/alexnet/model_import"),
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_trace_fn=class_trace_fn,
select_fn=select_fn,
path=path,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
per_node=per_node,
per_channel=per_channel,
use_weight=use_weight,
support_diff=support_diff,
**kwargs,
)
return metrics_fn
def resnet_50_imagenet_real_metrics_per_layer_targeted(target_class: int):
def metrics_fn(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
use_weight: bool = False,
support_diff: bool = True,
**kwargs,
):
return imagenet_real_metrics_per_layer_targeted(
target_class=target_class,
model_config=RESNET_50,
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_trace_fn=class_trace_fn,
select_fn=select_fn,
path=path,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
per_node=per_node,
per_channel=per_channel,
use_weight=use_weight,
support_diff=support_diff,
**kwargs,
)
return metrics_fn
def imagenet_real_metrics_per_layer_targeted(
target_class: int,
model_config: ModelConfig,
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
path: str,
select_seed_fn: Callable[[np.ndarray], np.ndarray] = None,
entry_points: List[int] = None,
per_node: bool = False,
per_channel: bool = False,
use_weight: bool = False,
support_diff: bool = True,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
if image_id == -1:
image_id = 0
while True:
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
try:
predicted_label = predict(
create_model=create_model,
input_fn=input_fn,
model_dir=model_dir,
)
if predicted_label != class_id:
image_id += 1
else:
break
except IndexError:
return [{}] if per_node else {}
else:
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=select_seed_fn,
entry_points=entry_points,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
trace = compact_trace(trace, graph, per_channel=per_channel)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
# if class_id not in class_traces:
# class_traces[class_id] = class_trace_fn(class_id).load()
# return class_traces[class_id]
return class_trace_fn(class_id).load()
def get_overlap(base_class_id: int, trace: AttrMap):
class_trace = get_class_trace(base_class_id)
example_trace_in_class = merge_compact_trace_intersect(
class_trace, trace
)
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace,
layer_name,
compact=True,
key=TraceKey.WEIGHT
if use_weight
else TraceKey.EDGE,
)
for key, current_trace in [
("overlap_size_total", trace),
("overlap_size_in_class", example_trace_in_class),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return overlap_sizes
row = {}
row = {
**row,
**map_prefix(get_overlap(class_id, trace), f"original.origin"),
}
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([target_class]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(label_top5[1], trace_target_class), f"original.target"
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"label_top5": label_top5,
"label_top5_value": label_top5_value,
"label_value": label_top5_value[0],
**row,
}
print(row)
return row
images = [(target_class, image_id) for image_id in range(0, 40)] + [
(class_id, -1) for class_id in range(0, 1000) if class_id != target_class
]
images = map(
lambda class_with_image: (
class_with_image[0]
if model_config.class_from_zero
else class_with_image[0] + 1,
class_with_image[1],
),
images,
)
traces = ray_iter(get_row, images, chunksize=1, out_of_order=True, num_gpus=0)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_negative_example_ideal_metrics_per_layer(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
topk_share_range: int = 5,
topk_calc_range: int = 5,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
predicted_label = predict(
create_model=create_model, input_fn=input_fn, model_dir=model_dir
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_image_id = image_id + 1
while True:
adversarial_input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
adversarial_image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
try:
adversarial_predicted_label_rank = get_rank(
class_id=predicted_label,
create_model=create_model,
input_fn=adversarial_input_fn,
model_dir=model_dir,
)
except IndexError:
return [{}] if per_node else {}
if adversarial_predicted_label_rank == 0:
adversarial_image_id += 1
else:
if attack_name == "negative_example":
stop = True
elif attack_name == "negative_example_top5":
if adversarial_predicted_label_rank < 5:
stop = True
else:
stop = False
elif attack_name == "negative_example_out_of_top5":
if adversarial_predicted_label_rank >= 5:
stop = True
else:
stop = False
else:
raise RuntimeError()
if stop:
break
else:
adversarial_image_id += 1
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
# return class_trace_fn(class_id).load()
def get_overlap(
base_class_id: int, rest_class_id: int, trace: AttrMap, input_fn
):
rest_class_trace = get_class_trace(rest_class_id)
class_trace = get_class_trace(base_class_id)
class_specific_trace = merge_compact_trace_diff(
class_trace, rest_class_trace
)
example_specific_trace = merge_compact_trace_diff(
trace, rest_class_trace
)
example_trace_in_class_in_rest = merge_compact_trace_intersect(
class_trace, trace, rest_class_trace
)
example_trace_in_class_not_in_rest = merge_compact_trace_intersect(
class_specific_trace, example_specific_trace
)
example_trace_not_in_class_in_rest = merge_compact_trace_diff(
merge_compact_trace_intersect(trace, rest_class_trace), class_trace
)
example_trace_not_in_class_not_in_rest = merge_compact_trace_diff(
example_specific_trace, class_specific_trace
)
overlap_sizes = merge_dict(
*[
filter_value_not_null(
{
f"{layer_name}.{key}": calc_trace_size_per_layer(
current_trace, layer_name, compact=True
)
for key, current_trace in [
("overlap_size_total", trace),
(
"overlap_size_in_class_in_rest",
example_trace_in_class_in_rest,
),
(
"overlap_size_in_class_not_in_rest",
example_trace_in_class_not_in_rest,
),
(
"overlap_size_not_in_class_in_rest",
example_trace_not_in_class_in_rest,
),
(
"overlap_size_not_in_class_not_in_rest",
example_trace_not_in_class_not_in_rest,
),
]
}
)
for layer_name in graph.ops_in_layers()
]
)
return {
**calc_all_overlap(
class_specific_trace,
example_specific_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
),
**overlap_sizes,
}
row = {}
row = {
**row,
**map_prefix(
get_overlap(class_id, adversarial_label, trace, input_fn),
f"original.origin",
),
}
trace_target_class = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([adversarial_label]),
)[0]
trace_target_class = compact_trace(
trace_target_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label, class_id, trace_target_class, input_fn
),
f"original.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
class_id,
adversarial_trace,
adversarial_input_fn,
),
f"adversarial.target",
),
}
row = {
**row,
**map_prefix(
get_overlap(
adversarial_label,
class_id,
merge_compact_trace_intersect(
trace_target_class, adversarial_trace
),
adversarial_input_fn,
),
f"shared.target",
),
}
adversarial_trace_original_class = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=adversarial_input_fn,
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
select_seed_fn=lambda _: np.array([class_id]),
)[0]
adversarial_trace_original_class = compact_trace(
adversarial_trace_original_class, graph, per_channel=per_channel
)
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label,
adversarial_trace_original_class,
adversarial_input_fn,
),
f"adversarial.origin",
),
}
row = {
**row,
**map_prefix(
get_overlap(
class_id,
adversarial_label,
merge_compact_trace_intersect(
adversarial_trace_original_class, trace
),
adversarial_input_fn,
),
f"shared.origin",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
"label_value": label_top5_value[0],
"adversarial_label_value": adversarial_label_top5_value[0],
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_top5_unique(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id,
input_fn=lambda: imagenet_raw.train(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
),
model_dir=model_dir,
)
if predicted_label != class_id:
return [{}] if per_node else {}
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
input_fn=lambda: imagenet_raw.train(
data_dir,
class_id,
image_id,
normed=False,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
# adversarial_example = alexnet_imagenet_example(
# attack_name=attack_name,
# attack_fn=attack_fn,
# generate_adversarial_fn=generate_adversarial_fn,
# class_id=class_id,
# image_id=image_id,
# ).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
),
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id,
input_fn=lambda: imagenet_raw.train(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
class_traces = {}
def get_class_trace(class_id: int) -> AttrMap:
if class_id not in class_traces:
class_traces[class_id] = class_trace_fn(class_id).load()
return class_traces[class_id]
def get_overlap(base_class_id: int, class_ids: List[int], trace: AttrMap):
class_trace = get_class_trace(base_class_id)
return calc_all_overlap(
trace,
class_trace,
overlap_fn,
compact=True,
use_intersect_size=True,
key=TraceKey.WEIGHT,
# key=TraceKey.EDGE,
)
row = {}
for k, base_class_id in zip(range(1, 6), label_top5):
row = {
**row,
**map_prefix(
get_overlap(base_class_id, label_top5, trace),
f"original.top{k}",
),
}
for k, base_class_id in zip(range(1, 6), adversarial_label_top5):
row = {
**row,
**map_prefix(
get_overlap(
base_class_id, adversarial_label_top5, adversarial_trace
),
f"adversarial.top{k}",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def resnet_50_imagenet_overlap_ratio_top5_diff(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/resnet-50-v2/model")
create_model = lambda: ResNet50()
graph = ResNet50.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id),
model_dir=model_dir,
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id, normed=False,
# class_from_zero=True, preprocessing_fn=alexnet_preprocess_image)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = resnet_50_imagenet_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(adversarial_example)
),
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(data_dir, class_id, image_id),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
assert trace is not None
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
assert class_id != adversarial_label
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
def get_overlap(base_class_id: int, class_ids: List[int], trace: AttrMap):
rest_class_ids = class_ids.copy()
rest_class_ids.remove(base_class_id)
rest_class_trace = merge_compact_trace(
*[class_trace_fn(class_id).load() for class_id in rest_class_ids]
)
class_trace = merge_compact_trace_diff(
class_trace_fn(base_class_id).load(), rest_class_trace
)
trace = merge_compact_trace_diff(trace, rest_class_trace)
return calc_all_overlap(
class_trace,
trace,
overlap_fn,
compact=True,
use_intersect_size=True,
)
row = {}
for k, base_class_id in zip(range(1, 3), label_top5):
row = {
**row,
**map_prefix(
get_overlap(base_class_id, label_top5, trace),
f"original.top{k}",
),
}
for k, base_class_id in zip(range(1, 3), adversarial_label_top5):
row = {
**row,
**map_prefix(
get_overlap(
base_class_id, adversarial_label_top5, adversarial_trace
),
f"adversarial.top{k}",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(1, 1001)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def lenet_mnist_overlap_ratio_top5_diff(
attack_name: str,
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = abspath("/home/yxqiu/data/mnist/raw")
model_dir = abspath("tf/lenet/model_early")
create_model = lambda: LeNet(data_format="channels_first")
graph = LeNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
predicted_label = predict(
create_model=create_model,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
model_dir=model_dir,
)
if predicted_label != class_id:
return [{}] if per_node else {}
# adversarial_example = generate_adversarial_fn(
# label=class_id,
# create_model=create_model,
# input_fn=lambda: mnist.test(data_dir, normed=False)
# .filter(lambda image, label:
# tf.equal(
# tf.convert_to_tensor(class_id, dtype=tf.int32),
# label)).skip(image_id).take(1).batch(1)
# .make_one_shot_iterator().get_next()[0],
# attack_fn=attack_fn,
# model_dir=model_dir,
# **kwargs,
# )
adversarial_example = lenet_mnist_example(
attack_name=attack_name,
attack_fn=attack_fn,
generate_adversarial_fn=generate_adversarial_fn,
class_id=class_id,
image_id=image_id,
).load()
if adversarial_example is None:
return [{}] if per_node else {}
adversarial_predicted_label = predict(
create_model=create_model,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
model_dir=model_dir,
)
if predicted_label == adversarial_predicted_label:
return [{}] if per_node else {}
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: mnist.test(data_dir)
.filter(
lambda image, label: tf.equal(
tf.convert_to_tensor(class_id, dtype=tf.int32), label
)
)
.skip(image_id)
.take(1)
.batch(1),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
if trace is None:
return [{}] if per_node else {}
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
label_top5_value = trace.attrs[GraphAttrKey.PREDICT_TOP5_VALUE]
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
mnist.normalize(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_label_top5_value = adversarial_trace.attrs[
GraphAttrKey.PREDICT_TOP5_VALUE
]
if class_id == adversarial_label:
return [{}] if per_node else {}
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
assert (
class_id == label_top5[0]
and adversarial_label == adversarial_label_top5[0]
)
trace = compact_trace(trace, graph, per_channel=per_channel)
adversarial_trace = compact_trace(
adversarial_trace, graph, per_channel=per_channel
)
def get_overlap(base_class_id: int, class_ids: List[int], trace: AttrMap):
rest_class_ids = class_ids.copy()
rest_class_ids.remove(base_class_id)
rest_class_trace = merge_compact_trace(
*[class_trace_fn(class_id).load() for class_id in rest_class_ids]
)
class_trace = merge_compact_trace_diff(
class_trace_fn(base_class_id).load(), rest_class_trace
)
trace = merge_compact_trace_diff(trace, rest_class_trace)
return calc_all_overlap(
class_trace,
trace,
overlap_fn,
compact=True,
use_intersect_size=True,
)
row = {}
for k, base_class_id in zip(range(1, 3), label_top5):
row = {
**row,
**map_prefix(
get_overlap(base_class_id, label_top5, trace),
f"original.top{k}",
),
}
for k, base_class_id in zip(range(1, 3), adversarial_label_top5):
row = {
**row,
**map_prefix(
get_overlap(
base_class_id, adversarial_label_top5, adversarial_trace
),
f"adversarial.top{k}",
),
}
if per_node:
raise RuntimeError()
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
"label_top5_value": label_top5_value,
"adversarial_label_top5_value": adversarial_label_top5_value,
**row,
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 100)
for class_id in range(0, 10)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_top5(
attack_fn,
generate_adversarial_fn,
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_node: bool = False,
per_channel: bool = False,
**kwargs,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
if trace is None:
return {}
label_top5 = trace.attrs[GraphAttrKey.PREDICT_TOP5]
adversarial_example = generate_adversarial_fn(
label=class_id,
create_model=create_model,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
normed=False,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
)
.make_one_shot_iterator()
.get_next()[0],
attack_fn=attack_fn,
model_dir=model_dir,
**kwargs,
)
if adversarial_example is None:
return {}
adversarial_trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(adversarial_example)
),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
adversarial_label = adversarial_trace.attrs[GraphAttrKey.PREDICT]
adversarial_label_top5 = adversarial_trace.attrs[GraphAttrKey.PREDICT_TOP5]
if adversarial_label not in label_top5:
# if np.intersect1d(label_top5, adversarial_label_top5).size == 0:
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = merge_compact_trace(
*[class_trace_fn(label).load() for label in label_top5]
)
adversarial_class_trace = merge_compact_trace(
*[class_trace_fn(label).load() for label in adversarial_label_top5]
)
trace = compact_edge(trace, graph, per_channel=per_channel)
adversarial_trace = compact_edge(
adversarial_trace, graph, per_channel=per_channel
)
if per_node:
rows = []
for node_name in class_trace.nodes:
row = {
"image_id": image_id,
"node_name": node_name,
"label": class_id,
"adversarial_label": adversarial_label,
**map_prefix(
calc_all_overlap(
class_trace, trace, overlap_fn, node_name
),
"original",
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace,
adversarial_trace,
overlap_fn,
node_name,
),
"adversarial",
),
}
if (
row[f"original.{TraceKey.WEIGHT}"] is not None
or row[f"original.{TraceKey.EDGE}"] is not None
):
rows.append(row)
return rows
else:
row = {
"image_id": image_id,
"label": class_id,
"adversarial_label": adversarial_label,
"label_top5": label_top5,
"adversarial_label_top5": adversarial_label_top5,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
**map_prefix(
calc_all_overlap(
adversarial_class_trace, adversarial_trace, overlap_fn
),
"adversarial",
),
}
print(row)
return row
else:
return [{}] if per_node else {}
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
if per_node:
traces = list(itertools.chain.from_iterable(traces))
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_error(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=True,
preprocessing_fn=alexnet_preprocess_image,
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
if class_id == trace.attrs[GraphAttrKey.PREDICT]:
return {}
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_rand(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
example = np.random.random_sample((1, 224, 224, 3)).astype(np.float32)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(example)
),
select_fn=select_fn,
model_dir=model_dir,
per_channel=per_channel,
)[0]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = class_trace_fn(class_id).load()
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def alexnet_imagenet_overlap_ratio_top5_rand(
class_trace_fn: Callable[[int], IOAction[AttrMap]],
select_fn: Callable[[np.ndarray], np.ndarray],
overlap_fn: Callable[[AttrMap, AttrMap, str], float],
path: str,
per_channel: bool = False,
):
def get_overlap_ratio() -> pd.DataFrame:
def get_row(class_id: int, image_id: int) -> Dict[str, Any]:
mode.check(False)
model_dir = abspath("tf/alexnet/model_import")
create_model = lambda: AlexNet()
graph = AlexNet.graph().load()
model_fn = partial(
model_fn_with_fetch_hook, create_model=create_model, graph=graph
)
example = np.random.random_sample((1, 224, 224, 3)).astype(np.float32)
trace = reconstruct_trace_from_tf(
model_fn=model_fn,
input_fn=lambda: tf.data.Dataset.from_tensors(
imagenet.normalize_alexnet(example)
),
select_fn=select_fn,
model_dir=model_dir,
top_5=True,
per_channel=per_channel,
)[0]
def map_prefix(map: Dict[str, Any], prefix: str) -> Dict[str, Any]:
return {f"{prefix}.{key}": value for key, value in map.items()}
class_trace = merge_compact_trace(
*[
class_trace_fn(label).load()
for label in trace.attrs[GraphAttrKey.PREDICT_TOP5]
]
)
trace = compact_edge(trace, graph, per_channel=per_channel)
row = {
"image_id": image_id,
"label": class_id,
**map_prefix(
calc_all_overlap(class_trace, trace, overlap_fn), "original"
),
}
print(row)
return row
traces = ray_iter(
get_row,
(
(class_id, image_id)
for image_id in range(0, 1)
for class_id in range(0, 1000)
),
chunksize=1,
out_of_order=True,
num_gpus=0,
)
traces = [trace for trace in traces if len(trace) != 0]
return pd.DataFrame(traces)
return CsvIOAction(path, init_fn=get_overlap_ratio)
def get_overlay_summary_top1(
overlap_ratios: pd.DataFrame, trace_key: str, threshold=1
) -> Dict[str, int]:
condition_positive = len(overlap_ratios)
if condition_positive == 0:
return {}
original_key = f"original.top1.{trace_key}"
false_positive = np.count_nonzero(overlap_ratios[original_key] < threshold)
adversarial_key = f"adversarial.top1.{trace_key}"
true_positive = np.count_nonzero(overlap_ratios[adversarial_key] < threshold)
predicted_condition_positive = true_positive + false_positive
recall = (true_positive / condition_positive) if condition_positive != 0 else 0
precision = (
(true_positive / predicted_condition_positive)
if predicted_condition_positive != 0
else 0
)
f1 = (2 / ((1 / recall) + (1 / precision))) if recall != 0 and precision != 0 else 0
return dict(
condition_positive=condition_positive,
# predicted_condition_positive=predicted_condition_positive,
original_is_higher=np.count_nonzero(
(overlap_ratios[original_key] - overlap_ratios[adversarial_key]) > 0
),
# adversarial_is_higher=np.count_nonzero(
# (overlap_ratios[adversarial_key] - overlap_ratios[original_key]) > 0),
true_positive=true_positive,
false_positive=false_positive,
recall=recall,
precision=precision,
f1=f1,
)
def get_overlay_summary_compare(
overlap_ratios: pd.DataFrame, trace_key: str, threshold=0
) -> Dict[str, int]:
condition_positive = len(overlap_ratios)
if condition_positive == 0:
return {}
def confidence_score(kind: str, key: str) -> np.ndarray:
current_logits = logits if kind == "original" else adversarial_logits
return overlap_ratios[f"{kind}.top1.{key}"] * current_logits[:, 0] - np.max(
[
overlap_ratios[f"{kind}.top{k}.{key}"] * current_logits[:, k - 1]
for k in range(2, 3)
],
axis=0,
)
logits = np.array(
list(
map(
lambda line: list(map(lambda x: float(x), line[1:-1].split(","))),
overlap_ratios["label_top5_value"],
)
)
)
adversarial_logits = np.array(
list(
map(
lambda line: list(map(lambda x: float(x), line[1:-1].split(","))),
overlap_ratios["adversarial_label_top5_value"],
)
)
)
false_positive = condition_positive - np.count_nonzero(
confidence_score("original", trace_key) >= threshold
)
true_positive = condition_positive - np.count_nonzero(
confidence_score("adversarial", trace_key) >= threshold
)
# false_positive = (condition_positive -
# np.count_nonzero(reduce(np.logical_and,
# [(overlap_ratios[f"original.top1.{trace_key}"] -
# overlap_ratios[f"original.top{k}.{trace_key}"]) >= threshold
# for k in range(2, 3)])))
# true_positive = (condition_positive -
# np.count_nonzero(reduce(np.logical_and,
# [(overlap_ratios[f"adversarial.top1.{trace_key}"] -
# overlap_ratios[f"adversarial.top{k}.{trace_key}"]) >= threshold
# for k in range(2, 3)])))
predicted_condition_positive = true_positive + false_positive
recall = (true_positive / condition_positive) if condition_positive != 0 else 0
precision = (
(true_positive / predicted_condition_positive)
if predicted_condition_positive != 0
else 0
)
f1 = (2 / ((1 / recall) + (1 / precision))) if recall != 0 and precision != 0 else 0
return dict(
threshold=threshold,
condition_positive=condition_positive,
true_positive=true_positive,
false_positive=false_positive,
recall=recall,
precision=precision,
f1=f1,
diff=true_positive - false_positive,
)
def get_overlay_summary_compare_detail(
path: str, overlap_ratios: pd.DataFrame, from_zero: bool = True
) -> CsvIOAction:
def init_fn() -> pd.DataFrame:
trace_key = TraceKey.EDGE
def confidence_score(kind: str, key: str) -> np.ndarray:
current_logits = logits if kind == "original" else adversarial_logits
return overlap_ratios[f"{kind}.top1.{key}"] * current_logits[0] - np.max(
[
overlap_ratios[f"{kind}.top{k}.{key}"] * current_logits[k - 1]
for k in range(2, 3)
],
axis=0,
)
# return overlap_ratios[f"{kind}.top1.{key}"] - overlap_ratios[f"{kind}.top2.{key}"]
def top1(kind: str, key: str) -> np.ndarray:
return overlap_ratios[f"{kind}.top1.{key}"]
logits = np.array(
list(
map(
lambda line: list(map(lambda x: float(x), line[1:-1].split(","))),
overlap_ratios["label_top5_value"],
)
)
).transpose()
adversarial_logits = np.array(
list(
map(
lambda line: list(map(lambda x: float(x), line[1:-1].split(","))),
overlap_ratios["adversarial_label_top5_value"],
)
)
).transpose()
logit_confidence_score = logits[0] - np.max(logits[1:2], axis=1)
adversarial_logit_confidence_score = adversarial_logits[0] - np.max(
adversarial_logits[1:2], axis=1
)
# label_top5 = np.array(list(map(lambda line: list(map(lambda x: int(x), line[1:-1].split(","))),
# overlap_ratios["label_top5"])))
# logit_distance = []
# logit_distance_mask = []
# for index in range(len(overlap_ratios)):
# adversarial_label = overlap_ratios["adversarial_label"][index]
# if adversarial_label in label_top5[index]:
# logit_distance_mask.append(True)
# logit_distance.append(logits[index][0] -
# logits[index][np.where(label_top5[index] == adversarial_label)][0])
# else:
# logit_distance_mask.append(False)
#
# logit_distance_mask = np.array(logit_distance_mask)
# logit_distance = np.array(logit_distance)
label_top5 = np.array(
list(
map(
lambda line: list(map(lambda x: int(x), line[1:-1].split(","))),
overlap_ratios["label_top5"],
)
)
).transpose()
# logit_distance_mask = []
# for index in range(len(overlap_ratios)):
# adversarial_label = overlap_ratios["adversarial_label"][index]
# if adversarial_label in label_top5[index][:2]:
# logit_distance_mask.append(True)
# else:
# logit_distance_mask.append(False)
#
# logit_distance_mask = np.array(logit_distance_mask)
if from_zero:
labels = overlap_ratios["label"]
adversarial_labels = overlap_ratios["adversarial_label"].values
else:
labels = overlap_ratios["label"] - 1
adversarial_labels = overlap_ratios["adversarial_label"].values - 1
label_top5 = label_top5 - 1
class_tree = imagenet_class_tree().load()
distance = np.array(
[
class_tree.distance_of(
class_tree.imagenet_labels[label],
class_tree.imagenet_labels[adversarial_label],
)
for label, adversarial_label in zip(labels, adversarial_labels)
]
)
distance_rank_2 = np.array(
[
class_tree.distance_of(
class_tree.imagenet_labels[label],
class_tree.imagenet_labels[label_rank_2],
)
for label, label_rank_2 in zip(label_top5[0], label_top5[1])
]
)
distance_rank_3 = np.array(
[
class_tree.distance_of(
class_tree.imagenet_labels[label],
class_tree.imagenet_labels[label_rank_3],
)
for label, label_rank_3 in zip(label_top5[0], label_top5[2])
]
)
distance_rank_4 = np.array(
[
class_tree.distance_of(
class_tree.imagenet_labels[label],
class_tree.imagenet_labels[label_rank_4],
)
for label, label_rank_4 in zip(label_top5[0], label_top5[3])
]
)
distance_rank_5 = np.array(
[
class_tree.distance_of(
class_tree.imagenet_labels[label],
class_tree.imagenet_labels[label_rank_5],
)
for label, label_rank_5 in zip(label_top5[0], label_top5[4])
]
)
distance_diff_5 = distance_rank_5 - distance_rank_2
distance_diff_4 = distance_rank_4 - distance_rank_2
distance_diff_3 = distance_rank_3 - distance_rank_2
logits_distance_rank_2 = logits[0] - logits[1]
logits_distance_rank_5 = logits[0] - logits[4]
logits_distance_diff = logits_distance_rank_5 - logits_distance_rank_2
# return pd.DataFrame(dict(
# original_overlap=confidence_score("original", trace_key)[logit_distance_mask],
# adversarial_overlap=confidence_score("adversarial", trace_key)[logit_distance_mask],
# original_size=confidence_score("original", trace_key + "_size")[logit_distance_mask],
# adversarial_size=confidence_score("adversarial", trace_key + "_size")[logit_distance_mask],
# original_top1=top1("original", trace_key)[logit_distance_mask],
# adversarial_top1=top1("adversarial", trace_key)[logit_distance_mask],
# # distance=distance[logit_distance_mask],
# logit_confidence_score=logit_confidence_score[logit_distance_mask],
# adversarial_logit_confidence_score=adversarial_logit_confidence_score[logit_distance_mask],
# # logit_distance=logit_distance,
# ))
return pd.DataFrame(
dict(
original_overlap=confidence_score("original", trace_key),
adversarial_overlap=confidence_score("adversarial", trace_key),
original_size=confidence_score("original", trace_key + "_size"),
adversarial_size=confidence_score("adversarial", trace_key + "_size"),
original_top1=top1("original", trace_key),
adversarial_top1=top1("adversarial", trace_key),
distance=distance,
distance_rank_2=distance_rank_2,
distance_rank_5=distance_rank_5,
logits_distance_rank_2=logits_distance_rank_2,
logits_distance_rank_5=logits_distance_rank_5,
distance_diff_3=distance_diff_3,
distance_diff_4=distance_diff_4,
distance_diff_5=distance_diff_5,
logits_distance_diff=logits_distance_diff,
logit_confidence_score=logit_confidence_score,
adversarial_logit_confidence_score=adversarial_logit_confidence_score,
)
)
return CsvIOAction(path, init_fn=init_fn)
def get_overlay_summary_compare_filter(
overlap_ratios: pd.DataFrame, trace_key: str, threshold=0
) -> Dict[str, int]:
overlap_ratios = overlap_ratios[
reduce(
np.logical_and,
[
(
overlap_ratios[f"original.top1.{trace_key}"]
- overlap_ratios[f"original.top{k}.{trace_key}"]
)
>= 0
for k in range(2, 6)
],
)
]
condition_positive = len(overlap_ratios)
if condition_positive == 0:
return {}
false_positive = condition_positive - np.count_nonzero(
reduce(
np.logical_and,
[
(
overlap_ratios[f"original.top1.{trace_key}"]
- overlap_ratios[f"original.top{k}.{trace_key}"]
)
>= threshold
for k in range(2, 6)
],
)
)
true_positive = condition_positive - np.count_nonzero(
reduce(
np.logical_and,
[
(
overlap_ratios[f"adversarial.top1.{trace_key}"]
- overlap_ratios[f"adversarial.top{k}.{trace_key}"]
)
>= threshold
for k in range(2, 6)
],
)
)
predicted_condition_positive = true_positive + false_positive
recall = (true_positive / condition_positive) if condition_positive != 0 else 0
precision = (
(true_positive / predicted_condition_positive)
if predicted_condition_positive != 0
else 0
)
f1 = (2 / ((1 / recall) + (1 / precision))) if recall != 0 and precision != 0 else 0
return dict(
condition_positive=condition_positive,
true_positive=true_positive,
false_positive=false_positive,
recall=recall,
precision=precision,
f1=f1,
diff=true_positive - false_positive,
)
def get_overlay_summary_one_side(
overlap_ratios: pd.DataFrame, trace_key: str, threshold=1
) -> Dict[str, int]:
condition_positive = len(overlap_ratios)
if condition_positive == 0:
return {}
original_key = f"original.{trace_key}"
true_positive = np.count_nonzero(overlap_ratios[original_key] < threshold)
recall = (true_positive / condition_positive) if condition_positive != 0 else 0
return dict(
condition_positive=condition_positive,
true_positive=true_positive,
recall=recall,
)
def benchmark_trace():
class_id = 1
image_id = 0
threshold = 0.5
per_channel = False
# model_config = ALEXNET.with_model_dir("tf/alexnet/model_import")
# model_config = RESNET_50
model_config = VGG_16
mode.check(False)
data_dir = IMAGENET_RAW_DIR
model_dir = abspath(model_config.model_dir)
create_model = lambda: model_config.network_class()
graph = model_config.network_class.graph().load()
model_fn = partial(model_fn_with_fetch_hook, create_model=create_model, graph=graph)
input_fn = lambda: imagenet_raw.test(
data_dir,
class_id,
image_id,
class_from_zero=model_config.class_from_zero,
preprocessing_fn=model_config.preprocessing_fn,
)
# predicted_label = predict(
# create_model=create_model,
# input_fn=input_fn,
# model_dir=model_dir,
# )
#
# if predicted_label != class_id:
# return None
conv_op_count = 0
def stop_hook(op):
nonlocal conv_op_count
if isinstance(op, Conv2dOp):
conv_op_count += 1
if conv_op_count >= 2:
return True
else:
return False
reconstruct_trace_from_tf(
class_id=class_id,
model_fn=model_fn,
input_fn=input_fn,
select_fn=lambda input: arg_approx(input, threshold),
model_dir=model_dir,
per_channel=per_channel,
stop_hook=stop_hook,
)
if __name__ == "__main__":
# with tf.Graph().as_default():
# input_dataset = (mnist.test(abspath("/home/yxqiu/data/mnist/raw"))
# .filter(lambda image, label:
# tf.equal(
# tf.convert_to_tensor(5, dtype=tf.int32),
# label)).skip(891).make_one_shot_iterator().get_next())
# with tf.Session() as sess:
# while True:
# try:
# result = sess.run(input_dataset)[1]
# print(result)
# except tf.errors.OutOfRangeError:
# break
# print("check")
# for attack_name in [
# "DeepFool",
# "FGSM",
# "BIM",
# "JSMA",
# # "DeepFool_full",
# # "CWL2",
# ]:
# try:
# for class_id in range(1, 1001):
# adversarial_example = resnet_50_imagenet_example(
# # adversarial_example = alexnet_imagenet_example(
# attack_name=attack_name,
# attack_fn=None,
# generate_adversarial_fn=None,
# class_id=class_id,
# image_id=0,
# ).load()
# except:
# print(f"attack {attack_name} class {class_id}")
benchmark_trace()
| 37.016932
| 152
| 0.516187
| 25,620
| 255,787
| 4.766354
| 0.015652
| 0.038177
| 0.015109
| 0.01628
| 0.951275
| 0.937214
| 0.926994
| 0.916062
| 0.90739
| 0.897891
| 0
| 0.009356
| 0.407073
| 255,787
| 6,909
| 153
| 37.02229
| 0.79581
| 0.060042
| 0
| 0.820526
| 0
| 0.000509
| 0.038211
| 0.019789
| 0
| 0
| 0
| 0
| 0.006277
| 1
| 0.033249
| false
| 0
| 0.008991
| 0.007294
| 0.091433
| 0.006446
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.