hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7aeaf9e1923d41e232994ea25295ce1754031792 | 17,332 | py | Python | components/ai.py | TheNicGard/DungeonStar | 525aeb53217166d2ce83e4e91a3b8c1b102f0dcb | [
"MIT"
] | 3 | 2019-07-11T17:54:42.000Z | 2021-03-09T10:58:13.000Z | components/ai.py | BandW2011/DungeonStar | 525aeb53217166d2ce83e4e91a3b8c1b102f0dcb | [
"MIT"
] | 1 | 2019-07-11T17:55:38.000Z | 2020-05-03T06:34:56.000Z | components/ai.py | TheNicGard/DungeonStar | 525aeb53217166d2ce83e4e91a3b8c1b102f0dcb | [
"MIT"
] | null | null | null | import tcod as libtcod
from entity import get_blocking_entities_at_location, get_entities_at_location
from game_messages import Message
from random import randint, random
from rpg_mechanics import attack_success, get_modifier
def check_for_traps(monster, entities, game_map, fov_map):
results = []
entities_in_loc = get_entities_at_location(entities, monster.x, monster.y)
for e in entities_in_loc:
# 50% chance to set off trap
if e.trap and attack_success(get_modifier(monster.fighter.dexterity), 10):
if fov_map.fov[monster.y][monster.x]:
e.trap.set_reveal(True)
results.extend(e.trap.trap_function(monster, **{"game_map": game_map,
"entities": entities,
"fov_map": fov_map}))
return results
class BasicMonster:
def __str__(self):
return "Basic monster AI. Hunts closest target when in FOV."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if fov_map.fov[monster.y][monster.x]:
if monster.distance_to(target) >= 2:
if not self.owner.fighter.is_effect("stuck"):
if target.fighter.is_effect("invisible"):
random_x = self.owner.x + randint(0, 2) - 1
random_y = self.owner.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
self.owner.move_towards(random_x, random_y, game_map, entities)
else:
monster.move_astar(target, entities, game_map)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif target.fighter.hp > 0:
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
else:
if not self.owner.fighter.is_effect("stuck"):
random_x = self.owner.x + randint(0, 2) - 1
random_y = self.owner.y + randint(0, 2) - 1
if not (random_x == self.owner.x and random_y == self.owner.y):
self.owner.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
return results
class AggressiveMonster:
def __init__(self, patience=20):
self.max_patience = patience
self.current_patience = 0
self.seeking = False
def __str__(self):
return "Aggressive monster AI. Hunts closest target until patience runs out."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if fov_map.fov[monster.y][monster.x]:
self.seeking = True
self.current_patience = self.max_patience
if monster.distance_to(target) >= 2:
if not self.owner.fighter.is_effect("stuck"):
if target.fighter.is_effect("invisible"):
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
monster.move_towards(random_x, random_y, game_map, entities)
else:
monster.move_astar(target, entities, game_map)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif target.fighter.hp > 0:
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
elif self.current_patience > 0 and self.seeking:
self.current_patience -= 1
if self.current_patience <= 0:
self.seeking = False
if monster.distance_to(target) >= 2:
if not self.owner.fighter.is_effect("stuck"):
monster.move_astar(target, entities, game_map)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif target.fighter.hp > 0:
if self.current_patience < self.max_patience:
self.current_patience += 1
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
else:
if self.current_patience < self.max_patience:
self.current_patience += 1
if not monster.fighter.is_effect("stuck"):
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
monster.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
return results
class IntelligentMonster:
def __init__(self, patience=20, health_threshold=0.10, safe_range = 12):
self.max_patience = patience
self.current_patience = 0
self.seeking = False
self.health_threshold = health_threshold
self.safe_range = safe_range
def __str__(self):
return "Intelligent monster AI. Hunts closest target until patience runs out. Runs away if health is too low."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if (monster.fighter.hp / monster.fighter.max_hp) <= self.health_threshold:
if not monster.fighter.is_effect("stuck"):
if monster.distance(target.x, target.y) < self.safe_range and not target.fighter.is_effect("invisible"):
monster.flee_astar(target, entities, game_map, safe_range)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
else:
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
monster.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif fov_map.fov[monster.y][monster.x]:
self.seeking = True
self.current_patience = self.max_patience
if monster.distance_to(target) >= 2:
if not self.owner.fighter.is_effect("stuck"):
if target.fighter.is_effect("invisible"):
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
monster.move_towards(random_x, random_y, game_map, entities)
else:
monster.move_astar(target, entities, game_map)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif target.fighter.hp > 0:
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
elif self.current_patience > 0 and self.seeking:
self.current_patience -= 1
if self.current_patience <= 0:
self.seeking = False
if monster.distance_to(target) >= 2:
if not self.owner.fighter.is_effect("stuck"):
monster.move_astar(target, entities, game_map)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
elif target.fighter.hp > 0:
if self.current_patience < self.max_patience:
self.current_patience += 1
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
else:
if self.current_patience < self.max_patience:
self.current_patience += 1
if not monster.fighter.is_effect("stuck"):
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
monster.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
return results
class ConfusedMonster:
def __init__(self, previous_ai, number_of_turns=10):
self.previous_ai = previous_ai
self.number_of_turns = number_of_turns
def __str__(self):
return "Confused monster AI. Walks in a random direction until no longer confused."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if self.number_of_turns > 0:
if not monster.fighter.is_effect("stuck"):
random_x = monster.x + randint(0, 2) - 1
random_y = monster.y + randint(0, 2) - 1
if random_x != self.owner.x and random_y != self.owner.y:
self.owner.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
self.number_of_turns -= 1
else:
self.owner.ai = self.previous_ai
results.append({'message': Message('The {0} is no longer confused!'.format(self.owner.name),
libtcod.red)})
return results
class DummyMonster:
def __str__(self):
return "Dummy monster AI. Does nothing."
def take_turn(self, target, fov_map, game_map, entities):
return []
class HardStoppedMonster:
def __init__(self, previous_ai, number_of_turns=10, resume_text="stopped"):
self.previous_ai = previous_ai
self.number_of_turns = number_of_turns
self.resume_text = resume_text
def __str__(self):
return "Hard stopped monster AI. Resumes previous AI after x turns."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if self.number_of_turns > 0:
self.number_of_turns -= 1
else:
monster.ai = self.previous_ai
results.append({'message': Message(
'The {0} is no longer {1}!'.format(monster.name, self.resume_text),
libtcod.red)})
return results
class SoftStoppedMonster:
def __init__(self, previous_ai, number_of_turns=10, chance_to_resume=0.2, resume_text="stopped"):
self.previous_ai = previous_ai
self.number_of_turns = number_of_turns
self.resume_text = resume_text
self.chance_to_resume = chance_to_resume
self.first_turn = True
def __str__(self):
return "Soft sttopped monster AI. Resumes previous AI after x turns, or with a chance to resume action."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if self.first_turn:
self.first_turn = False
elif self.number_of_turns != 0:
self.number_of_turns -= 1
if random() < self.chance_to_resume:
monster.ai = self.previous_ai
results.append({'message': Message(
'The {0} is no longer {1}!'.format(monster.name, self.resume_text),
libtcod.red)})
else:
monster.ai = self.previous_ai
results.append({'message': Message(
'The {0} is no longer {1}!'.format(monster.name, self.resume_text),
libtcod.red)})
return results
class StaticMonster:
def __str__(self):
return "Statuc monster AI. Attacks nearby targets, but does not move."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if fov_map.fov[monster.y][monster.x]:
if target.fighter.is_effect("invisible"):
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
return results
class MotherDoughAI(StaticMonster):
def __init__(self):
self.turns_to_spawn = 40
def __str__(self):
return "AI for the Mother Dough. Attacks nearby targets, and spreads sourdough starters every few turns."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
done = False
if self.turns_to_spawn <= 0:
for y in [monster.y - 1, monster.y, monster.y + 1]:
for x in [monster.x - 1, monster.x, monster.x + 1]:
if not (x == monster.x and y == monster.y) and not game_map.is_blocked(x, y):
blocking_entities = get_blocking_entities_at_location(entities, x, y)
if blocking_entities is None:
results.append({"spawn_enemy": {"name": "sourdough_starter",
"x": x, "y": y,
"mother": monster}})
monster.fighter.heal(10)
self.turns_to_spawn = 40
done = True
break
if done:
break
else:
self.turns_to_spawn -= 1
if fov_map.fov[monster.y][monster.x]:
if target.fighter.is_effect("invisible"):
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
return results
class SourdoughAI(StaticMonster):
def __init__(self, min_spread_time, max_spread_time):
self.min_spread_time = min_spread_time
self.max_spread_time = max_spread_time
self.turns_to_spawn = randint(min_spread_time, max_spread_time)
self.mother = None
def reroll(self):
self.turns_to_spawn = randint(self.min_spread_time, self.max_spread_time)
def __str__(self):
return "AI for the Sourdough Starter. Attacks nearby targets, and spreads sourdough starters more rarely than the mother dough."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
done = False
if self.turns_to_spawn <= 0:
if self.mother and not self.mother.ai:
results.append({"dead": monster})
else:
for y in [monster.y - 1, monster.y, monster.y + 1]:
for x in [monster.x - 1, monster.x, monster.x + 1]:
if not (x == monster.x and y == monster.y) and not game_map.is_blocked(x, y):
blocking_entities = get_blocking_entities_at_location(entities, x, y)
if blocking_entities is None:
results.append({"spawn_enemy": {"name": "sourdough_starter",
"x": x, "y": y, "mother": monster}})
monster.fighter.heal(10)
self.turns_to_spawn = 40
done = True
break
if done:
break
else:
self.turns_to_spawn -= 1
if fov_map.fov[monster.y][monster.x]:
if target.fighter.is_effect("invisible"):
attack_results = monster.fighter.attack(target)
results.extend(attack_results)
return results
class NeutralMonster:
def __init__(self, aggressive_ai):
self.aggressive_ai = aggressive_ai
def __str__(self):
return "Neutral monster AI. Will remain neutral to the player unless attacked."
def take_turn(self, target, fov_map, game_map, entities):
results = []
monster = self.owner
if not self.owner.fighter.is_effect("stuck"):
random_x = self.owner.x + randint(0, 2) - 1
random_y = self.owner.y + randint(0, 2) - 1
monster.move_towards(random_x, random_y, game_map, entities)
results.extend(check_for_traps(monster, entities, game_map, fov_map))
return results
def become_aggressive(self):
results = []
self.owner.ai = self.aggressive_ai
self.aggressive_ai.owner = self.owner
results.append({'message': Message('The {0} become agressive!'.format(self.owner.name),
libtcod.red)})
return results
| 43.767677 | 136 | 0.56012 | 2,036 | 17,332 | 4.533399 | 0.086444 | 0.045829 | 0.034128 | 0.019502 | 0.818527 | 0.793716 | 0.776273 | 0.74442 | 0.724919 | 0.698592 | 0 | 0.011824 | 0.351027 | 17,332 | 395 | 137 | 43.878481 | 0.808766 | 0.0015 | 0 | 0.771341 | 0 | 0.003049 | 0.071024 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.10061 | false | 0 | 0.015244 | 0.036585 | 0.222561 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
24794915f172061a0b0cea4fc7a2be6199d4d517 | 125 | py | Python | codecademy/io/read_line.py | haozai309/hello_python | deaa0621f549ff786bc36ccee48430d1d130690b | [
"Apache-2.0"
] | null | null | null | codecademy/io/read_line.py | haozai309/hello_python | deaa0621f549ff786bc36ccee48430d1d130690b | [
"Apache-2.0"
] | null | null | null | codecademy/io/read_line.py | haozai309/hello_python | deaa0621f549ff786bc36ccee48430d1d130690b | [
"Apache-2.0"
] | null | null | null | my_file = open("text.txt", "r")
print my_file.readline()
print my_file.readline()
print my_file.readline()
my_file.close()
| 15.625 | 31 | 0.728 | 21 | 125 | 4.095238 | 0.428571 | 0.348837 | 0.383721 | 0.662791 | 0.662791 | 0.662791 | 0.662791 | 0.662791 | 0 | 0 | 0 | 0 | 0.104 | 125 | 7 | 32 | 17.857143 | 0.767857 | 0 | 0 | 0.6 | 0 | 0 | 0.072 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.6 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
249b237b6a7378bcf0c89485e1fd1e94b674439f | 27,375 | py | Python | backend/frontend/tests/test_views.py | IINamelessII/YesOrNo | 0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19 | [
"Apache-2.0"
] | 3 | 2019-02-17T01:25:19.000Z | 2019-04-01T12:57:00.000Z | backend/frontend/tests/test_views.py | IINamelessII/YesOrNo | 0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19 | [
"Apache-2.0"
] | 3 | 2021-03-08T23:44:34.000Z | 2022-02-12T05:07:13.000Z | backend/frontend/tests/test_views.py | IINamelessII/YesOrNo | 0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19 | [
"Apache-2.0"
] | 2 | 2018-12-12T19:24:59.000Z | 2018-12-14T20:01:42.000Z | from importlib import import_module
from json import dumps
from random import randint
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.models import User
from django.http.request import HttpRequest
from django.test import TestCase
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.utils.encoding import force_bytes
from django.contrib.auth.tokens import default_token_generator
from model_mommy import mommy
from rest_framework.test import APIRequestFactory, force_authenticate, APIClient
from frontend import views
from frontend.models import Profile
from polls.models import Poll
class TestIndexView(TestCase):
def test_index_status_code(self):
response = self.client.get('/')
self.assertEquals(response.status_code, 200)
class TestLogoutView(TestCase):
def setUp(self):
self.view = views.logout
self.factory = APIRequestFactory()
self.user_model = mommy.make('User')
def test_user_was_authenticated(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
force_authenticate(request, user=self.user_model)
response = self.view(request)
self.assertEquals(response.status_code, 200)
def test_user_wasnt_authenticated(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request)
self.assertEquals(response.status_code, 200)
def test_user_wasnt_authenticated_without_session(self):
request = self.factory.get('/')
response = self.view(request)
self.assertEquals(response.status_code, 500)
class TestVoteYes(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.poll_model = mommy.make('Poll')
self.view = views.voteYes
class TestRequest(HttpRequest):
def __init__(request_self):
super().__init__()
request_self.user = self.user_model
self.request = TestRequest
def test_id_exists(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 204)
def test_id_dont_exist(self):
request = self.request()
id = randint(1, 2147483647)
while id == self.poll_model.id:
id = randint(1, 2147483647)
request._body = bytes(dumps({'id': id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 500)
def test_user_is_none(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
request.user = None
response = self.view(request)
self.assertEquals(response.status_code, 500)
class TestVoteNo(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.poll_model = mommy.make('Poll')
self.view = views.voteNo
class TestRequest(HttpRequest):
def __init__(request_self):
super().__init__()
request_self.user = self.user_model
self.request = TestRequest
def test_id_exists(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 204)
def test_id_dont_exist(self):
request = self.request()
id = randint(1, 2147483647)
while id == self.poll_model.id:
id = randint(1, 2147483647)
request._body = bytes(dumps({'id': id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 500)
def test_user_is_none(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
request.user = None
response = self.view(request)
self.assertEquals(response.status_code, 500)
class TestRateLike(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.poll_model = mommy.make('Poll')
self.view = views.rateLike
class TestRequest(HttpRequest):
def __init__(request_self):
super().__init__()
request_self.user = self.user_model
self.request = TestRequest
def test_id_exists(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 204)
def test_id_dont_exist(self):
request = self.request()
id = randint(1, 2147483647)
while id == self.poll_model.id:
id = randint(1, 2147483647)
request._body = bytes(dumps({'id': id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 500)
def test_user_is_none(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
request.user = None
response = self.view(request)
self.assertEquals(response.status_code, 500)
class TestRateDislike(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.poll_model = mommy.make('Poll')
self.view = views.rateDislike
class TestRequest(HttpRequest):
def __init__(request_self):
super().__init__()
request_self.user = self.user_model
self.request = TestRequest
def test_id_exists(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 204)
def test_id_dont_exist(self):
request = self.request()
id = randint(1, 2147483647)
while id == self.poll_model.id:
id = randint(1, 2147483647)
request._body = bytes(dumps({'id': id}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 500)
def test_user_is_none(self):
request = self.request()
request._body = bytes(dumps({'id': self.poll_model.id}), 'utf-8')
request.user = None
response = self.view(request)
self.assertEquals(response.status_code, 500)
class TestAddPoll(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.flow_model = mommy.make('Flow')
self.view = views.addPoll
class TestRequest(HttpRequest):
def __init__(request_self):
super().__init__()
request_self.user = self.user_model
self.request = TestRequest
def test_OK(self):
#length of this statement is between 10 and 500, it is OK
text = 'This statement is OK'
request = self.request()
request._body = bytes(dumps({
'flow': self.flow_model.name,
'statement': text
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 204)
self.assertEquals(Poll.objects.filter(statement=text).exists(), True)
def test_length_of_statement_less_than_10(self):
#length of this statement is less than 10, it is not OK
text = 'Just 6'
request = self.request()
request._body = bytes(dumps({
'flow': self.flow_model.name,
'statement': text
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
self.assertEquals(Poll.objects.filter(statement=text).exists(), False)
def test_length_of_statement_more_than_500(self):
#length of this statement is more than 500, it is not OK
text = 'A' * 501
request = self.request()
request._body = bytes(dumps({
'flow': self.flow_model.name,
'statement': text
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
self.assertEquals(Poll.objects.filter(statement=text).exists(), False)
def test_flow_does_not_exist(self):
#this flow is not exist, it is not OK
text = 'This statement is OK',
request = self.request()
request._body = bytes(dumps({
'flow': 'Incorrect flow name',
'statement': text
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
self.assertEquals(Poll.objects.filter(statement=text).exists(), False)
class TestSignin(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.signin
self.factory = APIRequestFactory()
#Example of correct and strong password
self.password = '9Re5ghsS@^*zw?Pd'
#Set known password manually
#because we can not get raw password of self.user_model
self.user_model.set_password(self.password)
self.user_model.save()
def test_OK_user_was_not_authenticated(self):
#All data is OK, user was_not_authenticated
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.user_model.username,
'password': self.password
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), None)
def test_wrong_password_user_was_not_authenticated(self):
#Password is wrong, user was_not_authenticated
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.user_model.username,
'password': self.password + 'It is wrong'
}), 'utf-8')
response = self.view(request)
message = 'Wrong Username or Password, please try again or reset your Password'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
self.assertEquals(request.session.get('message_was_showed'), False)
def test_data_is_wrong_user_was_not_authenticated(self):
#Data is wrong, user was_not_authenticated
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'not an username': self.user_model.username,
'password': self.password
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
self.assertEquals(request.session.get('message'), None)
class TestSignup(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.signup
self.factory = APIRequestFactory()
#Example of correct and strong password
self.password = '9Re5ghsS@^*zw?Pd'
#Set known password manually
#because we can not get raw password of self.user_model
self.user_model.set_password(self.password)
self.user_model.save()
#Not registred in system username
self.username = 'NewUser'
#Not registred in system email
self.email = 'newemail@gmail.com'
def test_all_data_is_OK(self):
#All data is OK
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.username,
'password': self.password,
'email': self.email
}), 'utf-8')
response = self.view(request)
message = 'Please follow the link received in the email to confirm registration of your account'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_data_is_wrong(self):
#Wrong data
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'not an username': self.username,
'password': self.password,
'email': self.email
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
self.assertEquals(request.session.get('message'), None)
def test_email_is_exist_in_system(self):
#User with this email is exist in system
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.username,
'password': self.password,
'email': self.user_model.email
}), 'utf-8')
response = self.view(request)
message = 'Account has already been registered to this email'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_username_is_exist_in_system(self):
#User with this username is exist in system
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.user_model.username,
'password': self.password,
'email': self.email
}), 'utf-8')
response = self.view(request)
message = 'Account with this username already exists'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_length_of_password_is_less_than_8(self):
#Incorrect password, its length less than 8
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.username,
'password': 'Just 6',
'email': self.email
}), 'utf-8')
response = self.view(request)
message = 'Password must be at least 8 characters'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_incorrect_email(self):
#Incorrect email format
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.username,
'password': self.password,
'email': 'not an email adress'
}), 'utf-8')
response = self.view(request)
message = 'This email is incorrect'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_email_server_does_not_work(self):
#Some problems with email server
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'username': self.username,
'password': self.password,
'email': self.email
}), 'utf-8')
with self.settings(EMAIL_BACKEND='Not a backend actually'):
response = self.view(request)
message = 'Something went wrong, please try again'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
class TestActivation(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.activation
self.factory = APIRequestFactory()
#Correct uid64
self.uid = urlsafe_base64_encode(force_bytes(self.user_model.pk)).decode()
#Correct token
self.token = default_token_generator.make_token(self.user_model)
def test_uid_and_token_are_OK_and_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, self.uid, self.token)
message = 'Registration successfully completed'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_and_token_are_OK_and_user_is_active(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, self.uid, self.token)
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_is_not_correct_token_is_OK_and_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, 'not a correct uid', self.token)
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_is_OK_token_is_incorrect_and_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, self.uid, 'Not a correct token')
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_and_token_was_not_passed_and_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request)
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
class TestResetPassword(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.reset_password
self.factory = APIRequestFactory()
self.email = self.user_model.email
def test_email_exists_in_system_and_user_is_active(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'email': self.email
}), 'utf-8')
response = self.view(request)
message = 'Please follow the link in the email to restore access to your account'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_email_exists_in_system_and_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'email': self.email
}), 'utf-8')
response = self.view(request)
message = 'Please follow the link in the email to complete the registration of your account.'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_email_does_not_exist_in_system(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'email': 'not an email actually'
}), 'utf-8')
response = self.view(request)
message = 'Account with this email not found'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_passed_wrong_data(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'not an email': self.email
}), 'utf-8')
response = self.view(request)
self.assertEquals(response.status_code, 404)
class TestResetPasswordLink(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.reset_password_link
self.factory = APIRequestFactory()
#Correct uid64
self.uid = urlsafe_base64_encode(force_bytes(self.user_model.pk)).decode()
#Correct token
self.token = default_token_generator.make_token(self.user_model)
def test_uid_and_token_are_OK(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, self.uid, self.token)
message = None
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_uid_is_OK_and_token_is_not_correct(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, self.uid, 'Not a token actually')
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_is_not_correct_and_token_is_OK(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request, 'Not an uid actually', self.token)
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
def test_uid_and_token_were_not_passed(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
response = self.view(request)
message = 'Sorry, this link is not valid'
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), message)
class TestResetPasswordForm(TestCase):
def setUp(self):
self.user_model = mommy.make('User')
self.view = views.reset_password_form
self.factory = APIRequestFactory()
#Correct uid64
self.uid = urlsafe_base64_encode(force_bytes(self.user_model.pk)).decode()
#Correct token
self.token = default_token_generator.make_token(self.user_model)
#Example of correct and strong password
self.password = '9Re5ghsS@^*zw?Pd'
def test_uid_and_token_and_password_are_OK_user_is_active(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'password': self.password
}), 'utf-8')
response = self.view(request, self.uid, self.token)
self.assertEquals(response.status_code, 302)
self.assertEquals(request.session.get('message'), None)
def test_uid_and_token_and_password_are_OK_user_is_not_active(self):
self.user_model.is_active = False
self.user_model.save()
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'password': self.password
}), 'utf-8')
response = self.view(request, self.uid, self.token)
message = 'Please follow the link in the email to complete the registration of your account.'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
def test_uid_and_token_are_OK_passed_wrong_data_user_is_active(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'Not a password': self.password
}), 'utf-8')
response = self.view(request, self.uid, self.token)
self.assertEquals(response.status_code, 404)
self.assertEquals(request.session.get('message'), None)
def test_uid_and_token_are_OK_length_of_password_less_than_8_user_is_active(self):
request = self.factory.get('/')
engine = import_module(settings.SESSION_ENGINE)
session_key = None
request.session = engine.SessionStore(session_key)
request._body = bytes(dumps({
'password': 'Just 6'
}), 'utf-8')
response = self.view(request, self.uid, self.token)
message = 'Password must be at least 8 characters'
self.assertEquals(response.status_code, 200)
self.assertEquals(request.session.get('message'), message)
| 39.789244 | 104 | 0.646685 | 3,231 | 27,375 | 5.283813 | 0.066543 | 0.056701 | 0.036551 | 0.082591 | 0.878866 | 0.860415 | 0.849637 | 0.843135 | 0.836164 | 0.830483 | 0 | 0.015096 | 0.247452 | 27,375 | 687 | 105 | 39.847162 | 0.813601 | 0.034557 | 0 | 0.802426 | 0 | 0 | 0.075682 | 0 | 0 | 0 | 0 | 0 | 0.135182 | 1 | 0.110919 | false | 0.064125 | 0.07799 | 0 | 0.220104 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
24c780cf7b8ec9591b6bb5d5ca292f38e30f0585 | 38,520 | py | Python | scripts/graph_creation/graph_one_experiment.py | mrjojo11/malpaca-pub | 26fd3a7045288bed66d624e0f5593067ff05952d | [
"MIT"
] | null | null | null | scripts/graph_creation/graph_one_experiment.py | mrjojo11/malpaca-pub | 26fd3a7045288bed66d624e0f5593067ff05952d | [
"MIT"
] | null | null | null | scripts/graph_creation/graph_one_experiment.py | mrjojo11/malpaca-pub | 26fd3a7045288bed66d624e0f5593067ff05952d | [
"MIT"
] | null | null | null | import csv
import glob
import math
import os
import sys
from random import random, seed
from timeit import default_timer as timer
import time
from statistics import mean
from pathlib import Path
import networkx as nx
import numpy as np
from adjustText import adjust_text
from scapy.layers.inet import IP, UDP
from scapy.utils import PcapWriter, PcapReader
import tkinter as tk
from tkinter import filedialog
import zat
from zat.log_to_dataframe import LogToDataFrame
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
from matplotlib.pyplot import cm
import matplotlib.transforms as mtrans
from PIL import Image
class Graph_One_Experiment():
@staticmethod
def creating_graphs_for_one_experiment():
path_to_csv_file = r"C:\Users\Johannes\iCloudDrive\Uni\CSE\Year 3\Q4\Code\Dataset\Results\20_none_enriched\Experiment 1 -\15_fixed_threshold/summary_15_fixed_threshold_15.csv"
#path_to_csv_file = r"C:\Users\Johannes\iCloudDrive\Uni\CSE\Year 3\Q4\Code\Dataset\Results\20_none_enriched\Experiment 1 -\5_fixed_threshold/summary_5_fixed_threshold_5.csv"
#path_to_csv_file = r"C:\Users\Johannes\iCloudDrive\Uni\CSE\Year 3\Q4\Code\Dataset\Results\20_none_enriched\Experiment 1 -\100_fixed_threshold_5_skip\summary_100_fixed_threshold_5_skip_100.csv"
summary_csv_df = pd.read_csv(path_to_csv_file)
application_name_graph = "application_name_graph.png"
path_to_application_name_legend_storage = "application_name_legend.png"
path_to_application_name_combined = 'application_name_combined.png'
application_category_name_graph = "application_category_name_graph.png"
path_to_application_category_name_legend_storage = "application_category_name_legend.png"
path_to_application_category_name_combined = 'application_category_name_combined.png'
label_distribution_graph = "label_graph.png"
path_to_label_legend_storage = "label_legend.png"
path_to_label_combined = 'label_combined.png'
detailed_label_distribution_graph = "detailed_label_graph.png"
path_to_detailed_label_legend_storage = "detailed_label_legend.png"
path_to_detailed_label_combined = 'detailed_label_combined.png'
name_distribution_graph = "name_graph.png"
path_to_name_legend_storage = "name_legend.png"
path_to_name_combined = 'name_combined.png'
overall_detailed_label_df = summary_csv_df.groupby("clusnum")["application_name"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"application_name": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
if len(clusters) < 4:
ncols = len(clusters)
else:
ncols = 4
nrows = math.ceil(len(clusters) / 4)
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(7, 7))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_name", "count"]]
cluster_df["application_name"] = np.where(cluster_df["count"] <= 4, "Other", cluster_df.application_name)
cluster_df = cluster_df.groupby("application_name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("application_name")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["application_name"].tolist()
colors = {}
cmap = cm.get_cmap('viridis', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
if len(clusters) == 1:
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
ax.pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_name"]])
ax.set_title("Cluster " + str(cluster))
else:
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_name", "count"]]
cluster_df["application_name"] = np.where(cluster_df["count"] <= 4, "Other", cluster_df.application_name)
cluster_df = cluster_df.groupby("application_name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
if (len(cluster_df.index) > 7):
cluster_df["relative_count"] = np.where(cluster_df["relative_count"] <= 5, "", cluster_df["relative_count"])
ax[math.floor(index / 4), index % 4].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_name"]], labeldistance=1.25)
ax[math.floor(index / 4), index % 4].set_title("Cluster " + str(cluster))
if len(clusters) % 4 != 0:
if len(clusters) > 1:
for missing_axis in range(4 - len(clusters) % 4, 4):
ax[nrows-1, missing_axis].axis('off')
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
plt.suptitle("Application Name Distribution per Cluster", y=0.985, x=0.5)
fig.tight_layout()
fig.canvas.draw()
fig.savefig(application_name_graph, dpi=1200)
legend = plt.legend(handles=markers, labels=colors.keys(), loc=3, framealpha=1, frameon=True, bbox_to_anchor=(2, 0))
separate_legend = legend.figure
separate_legend.canvas.draw()
bbox = legend.get_window_extent()
bbox = bbox.from_extents(*(bbox.extents + np.array([-4 ,-4 ,4 ,4])))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(path_to_application_name_legend_storage, dpi=1200, bbox_inches=bbox)
legend.remove()
plt.close()
plt.clf()
graph_img = Image.open(application_name_graph)
legend_im = Image.open(path_to_application_name_legend_storage)
widths_graph = graph_img.width
heights_graph = graph_img.height
widths_legend = legend_im.width
heights_legend = legend_im.height
if heights_legend > heights_graph:
resize_percentage = heights_graph / heights_legend
new_width = int(resize_percentage * widths_legend)
legend_im = legend_im.resize((new_width, heights_graph), Image.ANTIALIAS)
total_width = widths_graph + widths_legend
y_offset = int((heights_graph - heights_legend) / 2)
combined_im = Image.new('RGB', (total_width, heights_graph), color=(255, 255, 255, 1))
combined_im.paste(graph_img, (0, 0))
combined_im.paste(legend_im, (widths_graph, y_offset))
combined_im.save(path_to_application_name_combined)
overall_detailed_label_df = summary_csv_df.groupby("clusnum")[
"application_category_name"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"application_category_name": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
if len(clusters) < 4:
ncols = len(clusters)
else:
ncols = 4
nrows = math.ceil(len(clusters) / 4)
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(7, 7))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_category_name", "count"]]
cluster_df = cluster_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["application_category_name"].tolist()
colors = {}
cmap = cm.get_cmap('cividis', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_category_name", "count"]]
cluster_df = cluster_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
if (len(cluster_df.index) > 7):
cluster_df["relative_count"] = np.where(cluster_df["relative_count"] <= 5, "",
cluster_df["relative_count"])
if len(clusters) == 1:
ax.pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_category_name"]])
ax.set_title("Cluster " + str(cluster))
else:
ax[math.floor(index / 4), index % 4].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_category_name"]], labeldistance=1.25)
ax[math.floor(index / 4), index % 4].set_title("Cluster " + str(cluster))
if len(clusters) % 4 != 0:
if len(clusters) > 1:
for missing_axis in range(4 - len(clusters) % 4, 4):
ax[nrows-1, missing_axis].axis('off')
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
plt.suptitle("Application Category Name Distribution per Cluster", y=0.985, x=0.5)
fig.tight_layout()
fig.canvas.draw()
fig.savefig(application_category_name_graph, dpi=1200)
legend = plt.legend(handles=markers, labels=colors.keys(), loc=3, framealpha=1, frameon=True, bbox_to_anchor=(2, 0))
separate_legend = legend.figure
separate_legend.canvas.draw()
bbox = legend.get_window_extent()
bbox = bbox.from_extents(*(bbox.extents + np.array([-4 ,-4 ,4 ,4])))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(path_to_application_category_name_legend_storage, dpi=1200, bbox_inches=bbox)
legend.remove()
plt.close()
plt.clf()
graph_img = Image.open(application_category_name_graph)
legend_im = Image.open(path_to_application_category_name_legend_storage)
widths_graph = graph_img.width
heights_graph = graph_img.height
widths_legend = legend_im.width
heights_legend = legend_im.height
if heights_legend > heights_graph:
resize_percentage = heights_graph / heights_legend
new_width = int(resize_percentage * widths_legend)
legend_im = legend_im.resize((new_width, heights_graph), Image.ANTIALIAS)
total_width = widths_graph + widths_legend
y_offset = int((heights_graph - heights_legend) / 2)
combined_im = Image.new('RGB', (total_width, heights_graph), color=(255, 255, 255, 1))
combined_im.paste(graph_img, (0, 0))
combined_im.paste(legend_im, (widths_graph, y_offset))
combined_im.save(path_to_application_category_name_combined)
overall_detailed_label_df = summary_csv_df.groupby("clusnum")["label"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"label": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
if len(clusters) < 4:
ncols = len(clusters)
else:
ncols = 4
nrows = math.ceil(len(clusters) / 4)
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(7, 7))
colors = {}
colors["Malicious"] = "r"
colors["Benign"] = "g"
colors["Unknown"] = "grey"
for index, cluster in enumerate(clusters):
cluster_df = \
overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["label", "count"]]
cluster_df = cluster_df.groupby("label")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
if (len(cluster_df.index) > 7):
cluster_df["relative_count"] = np.where(cluster_df["relative_count"] <= 5, "",
cluster_df["relative_count"])
if len(clusters) == 1:
ax.pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["label"]])
ax.set_title("Cluster " + str(cluster))
else:
ax[math.floor(index / 4), index % 4].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["label"]], labeldistance=1.25)
ax[math.floor(index / 4), index % 4].set_title("Cluster " + str(cluster))
if len(clusters) % 4 != 0:
if len(clusters) > 1:
for missing_axis in range(4 - len(clusters) % 4, 4):
ax[nrows-1, missing_axis].axis('off')
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
plt.suptitle("Label Distribution per Cluster", y=0.985, x=0.5)
fig.tight_layout()
fig.canvas.draw()
fig.savefig(label_distribution_graph, dpi=1200)
legend = plt.legend(handles=markers, labels=colors.keys(), loc=3, framealpha=1, frameon=True,
bbox_to_anchor=(2, 0))
separate_legend = legend.figure
separate_legend.canvas.draw()
bbox = legend.get_window_extent()
bbox = bbox.from_extents(*(bbox.extents + np.array([-4, -4, 4, 4])))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(path_to_label_legend_storage, dpi=1200, bbox_inches=bbox)
legend.remove()
plt.close()
plt.clf()
graph_img = Image.open(label_distribution_graph)
legend_im = Image.open(path_to_label_legend_storage)
widths_graph = graph_img.width
heights_graph = graph_img.height
widths_legend = legend_im.width
heights_legend = legend_im.height
if heights_legend > heights_graph:
resize_percentage = heights_graph / heights_legend
new_width = int(resize_percentage * widths_legend)
legend_im = legend_im.resize((new_width, heights_graph), Image.ANTIALIAS)
total_width = widths_graph + widths_legend
y_offset = int((heights_graph - heights_legend) / 2)
combined_im = Image.new('RGB', (total_width, heights_graph), color=(255, 255, 255, 1))
combined_im.paste(graph_img, (0, 0))
combined_im.paste(legend_im, (widths_graph, y_offset))
combined_im.save(path_to_label_combined)
overall_detailed_label_df = summary_csv_df.groupby("clusnum")["detailed_label"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"detailed_label": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
if len(clusters) < 4:
ncols = len(clusters)
else:
ncols = 4
nrows = math.ceil(len(clusters) / 4)
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(7, 7))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["detailed_label", "count"]]
cluster_df["detailed_label"] = np.where(cluster_df["detailed_label"] == "-", "Unknown",
cluster_df.detailed_label)
cluster_df = cluster_df.groupby("detailed_label")["count"].aggregate(sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("detailed_label")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["detailed_label"].tolist()
colors = {}
cmap = cm.get_cmap('plasma', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["detailed_label", "count"]]
cluster_df["detailed_label"] = np.where(cluster_df["detailed_label"] == "-", "Unknown",
cluster_df.detailed_label)
cluster_df = cluster_df.groupby("detailed_label")["count"].aggregate(sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
if (len(cluster_df.index) > 7):
cluster_df["relative_count"] = np.where(cluster_df["relative_count"] <= 5, "",
cluster_df["relative_count"])
if len(clusters) == 1:
ax.pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["detailed_label"]])
ax.set_title("Cluster " + str(cluster))
else:
ax[math.floor(index / 4), index % 4].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["detailed_label"]], labeldistance=1.25)
ax[math.floor(index / 4), index % 4].set_title("Cluster " + str(cluster))
if len(clusters) % 4 != 0:
if len(clusters) > 1:
for missing_axis in range(4 - len(clusters) % 4, 4):
ax[nrows-1, missing_axis].axis('off')
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
plt.suptitle("Detailed Label Distribution per Cluster", y=0.985, x=0.5)
fig.tight_layout()
fig.canvas.draw()
fig.savefig(detailed_label_distribution_graph, dpi=1200)
legend = plt.legend(handles=markers, labels=colors.keys(), loc=3, framealpha=1, frameon=True,
bbox_to_anchor=(2, 0))
separate_legend = legend.figure
separate_legend.canvas.draw()
bbox = legend.get_window_extent()
bbox = bbox.from_extents(*(bbox.extents + np.array([-4, -4, 4, 4])))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(path_to_detailed_label_legend_storage, dpi=1200, bbox_inches=bbox)
legend.remove()
plt.close()
plt.clf()
graph_img = Image.open(detailed_label_distribution_graph)
legend_im = Image.open(path_to_detailed_label_legend_storage)
widths_graph = graph_img.width
heights_graph = graph_img.height
widths_legend = legend_im.width
heights_legend = legend_im.height
if heights_legend > heights_graph:
resize_percentage = heights_graph / heights_legend
new_width = int(resize_percentage * widths_legend)
legend_im = legend_im.resize((new_width, heights_graph), Image.ANTIALIAS)
total_width = widths_graph + widths_legend
y_offset = int((heights_graph - heights_legend) / 2)
combined_im = Image.new('RGB', (total_width, heights_graph), color=(255, 255, 255, 1))
combined_im.paste(graph_img, (0, 0))
combined_im.paste(legend_im, (widths_graph, y_offset))
combined_im.save(path_to_detailed_label_combined)
overall_name_df = summary_csv_df.groupby("clusnum")["name"].value_counts().to_frame()
overall_name_df = overall_name_df.rename(columns={"name": "count"})
overall_name_df = overall_name_df.reset_index()
clusters = overall_name_df["clusnum"].unique().tolist()
if len(clusters) < 4:
ncols = len(clusters)
else:
ncols = 4
nrows = math.ceil(len(clusters) / 4)
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(7, 7))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_name_df[overall_name_df["clusnum"] == cluster][
["name", "count"]]
cluster_df = cluster_df.groupby("name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("name")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["name"].tolist()
colors = {}
cmap = cm.get_cmap('inferno', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_name_df[overall_name_df["clusnum"] == cluster][
["name", "count"]]
cluster_df = cluster_df.groupby("name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
if (len(cluster_df.index) >= 7):
cluster_df["relative_count"] = np.where(cluster_df["relative_count"] <= 7, "",
cluster_df["relative_count"])
if len(clusters) == 1:
ax.pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["name"]])
ax.set_title("Cluster " + str(cluster))
else:
ax[math.floor(index / 4), index % 4].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["name"]], labeldistance=1.25)
ax[math.floor(index / 4), index % 4].set_title("Cluster " + str(cluster))
if len(clusters) % 4 != 0:
if len(clusters) > 1:
for missing_axis in range(4 - len(clusters) % 4, 4):
ax[nrows-1, missing_axis].axis('off')
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
plt.suptitle("Device / Malware Distribution per Cluster", y=0.985, x=0.5)
fig.tight_layout()
fig.canvas.draw()
fig.savefig(name_distribution_graph, dpi=1200)
legend = plt.legend(handles=markers, labels=colors.keys(), loc=3, framealpha=1, frameon=True,
bbox_to_anchor=(2, 0))
separate_legend = legend.figure
separate_legend.canvas.draw()
bbox = legend.get_window_extent()
bbox = bbox.from_extents(*(bbox.extents + np.array([-4, -4, 4, 4])))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(path_to_name_legend_storage, dpi=1200, bbox_inches=bbox)
legend.remove()
plt.close()
plt.clf()
graph_img = Image.open(name_distribution_graph)
legend_im = Image.open(path_to_name_legend_storage)
widths_graph = graph_img.width
heights_graph = graph_img.height
widths_legend = legend_im.width
heights_legend = legend_im.height
if heights_legend > heights_graph:
resize_percentage = heights_graph / heights_legend
new_width = int(resize_percentage * widths_legend)
legend_im = legend_im.resize((new_width, heights_graph), Image.ANTIALIAS)
total_width = widths_graph + widths_legend
y_offset = int((heights_graph - heights_legend) / 2)
combined_im = Image.new('RGB', (total_width, heights_graph), color=(255, 255, 255, 1))
combined_im.paste(graph_img, (0, 0))
combined_im.paste(legend_im, (widths_graph, y_offset))
combined_im.save(path_to_name_combined)
@staticmethod
def per_cluster_get_application_pie_chart():
path_to_csv_file = "C:/Users/Johannes/iCloudDrive/Uni/CSE/Year 3/Q4/Code/Results/Results 2 - Split Connection Into X Clusters/20_threshold_1_part/summary_20_threshold_1_part_20.csv"
csv_df = pd.read_csv(path_to_csv_file)
overall_detailed_label_df = csv_df.groupby("clusnum")["application_name"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"application_name": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
fig, ax = plt.subplots(nrows=1, ncols=len(clusters))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_name", "count"]]
cluster_df["application_name"] = np.where(cluster_df["count"] <= 4, "Other", cluster_df.application_name)
cluster_df = cluster_df.groupby("application_name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("application_name")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["application_name"].tolist()
colors = {}
cmap = cm.get_cmap('viridis', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_name", "count"]]
cluster_df["application_name"] = np.where(cluster_df["count"] <= 4, "Other", cluster_df.application_name)
cluster_df = cluster_df.groupby("application_name")["count"].aggregate(sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
ax[index].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_name"]])
ax[index].set_title("Cluster " + str(cluster))
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
central_axis = int(len(clusters) / 2)
ax[central_axis].legend(markers, colors.keys(), numpoints=1, loc="lower center", bbox_to_anchor=(0.5, -1))
plt.suptitle("Application Name Distribution per Cluster", y=0.9, x=0.5)
plt.show()
plt.close()
overall_detailed_label_df = csv_df.groupby("clusnum")["application_category_name"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"application_category_name": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
fig, ax = plt.subplots(nrows=1, ncols=len(clusters))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_category_name", "count"]]
cluster_df["application_category_name"] = np.where(cluster_df["count"] <= 4, "Other",
cluster_df.application_category_name)
cluster_df = cluster_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["application_category_name"].tolist()
colors = {}
cmap = cm.get_cmap('cividis', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["application_category_name", "count"]]
cluster_df["application_category_name"] = np.where(cluster_df["count"] <= 4, "Other",
cluster_df.application_category_name)
cluster_df = cluster_df.groupby("application_category_name")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
ax[index].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["application_category_name"]])
ax[index].set_title("Cluster " + str(cluster))
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
central_axis = int(len(clusters) / 2)
ax[central_axis].legend(markers, colors.keys(), numpoints=1, loc="lower center", bbox_to_anchor=(0.5, -1))
plt.suptitle("Application Name Category Distribution per Cluster", y=0.9, x=0.5)
plt.show()
plt.close()
overall_detailed_label_df = csv_df.groupby("clusnum")["label"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"label": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
fig, ax = plt.subplots(nrows=1, ncols=len(clusters))
colors = {}
colors["Malicious"] = "r"
colors["Benign"] = "g"
colors["Unknown"] = "grey"
for index, cluster in enumerate(clusters):
cluster_df = \
overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["label", "count"]]
cluster_df = cluster_df.groupby("label")["count"].aggregate(
sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
ax[index].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["label"]])
ax[index].set_title("Cluster " + str(cluster))
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
central_axis = int(len(clusters) / 2)
ax[central_axis].legend(markers, colors.keys(), numpoints=1, loc="lower center", bbox_to_anchor=(0.5, -0.6))
plt.suptitle("Label Distribution per Cluster", y=0.9, x=0.5)
plt.show()
plt.close()
overall_detailed_label_df = csv_df.groupby("clusnum")["detailed_label"].value_counts().to_frame()
overall_detailed_label_df = overall_detailed_label_df.rename(columns={"detailed_label": "count"})
overall_detailed_label_df = overall_detailed_label_df.reset_index()
clusters = overall_detailed_label_df["clusnum"].unique().tolist()
fig, ax = plt.subplots(nrows=1, ncols=len(clusters))
list_of_names_dfs = []
for cluster in clusters:
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["detailed_label", "count"]]
cluster_df["detailed_label"] = np.where(cluster_df["count"] <= 3, "Other", cluster_df.detailed_label)
cluster_df["detailed_label"] = np.where(cluster_df["detailed_label"] == "-", "Unknown",
cluster_df.detailed_label)
cluster_df = cluster_df.groupby("detailed_label")["count"].aggregate(sum).reset_index().sort_values(
by=["count"], ascending=False)
list_of_names_dfs.append(cluster_df)
detailed_label_name_df = list_of_names_dfs.pop()
for name_df in list_of_names_dfs:
detailed_label_name_df = detailed_label_name_df.append(name_df)
detailed_label_name_df = detailed_label_name_df.groupby("detailed_label")["count"].aggregate(
sum).reset_index().sort_values(by=["count"])
unique_application_category_names = detailed_label_name_df["detailed_label"].tolist()
colors = {}
cmap = cm.get_cmap('plasma', len(unique_application_category_names))
for index, color in enumerate(cmap.colors):
application_name = unique_application_category_names.pop()
colors[application_name] = color
for index, cluster in enumerate(clusters):
cluster_df = overall_detailed_label_df[overall_detailed_label_df["clusnum"] == cluster][
["detailed_label", "count"]]
cluster_df["detailed_label"] = np.where(cluster_df["count"] <= 3, "Other", cluster_df.detailed_label)
cluster_df["detailed_label"] = np.where(cluster_df["detailed_label"] == "-", "Unknown",
cluster_df.detailed_label)
cluster_df = cluster_df.groupby("detailed_label")["count"].aggregate(sum).reset_index().sort_values(
by=["count"])
cluster_df["relative_count"] = round((cluster_df["count"] / cluster_df["count"].sum()) * 100, 2)
ax[index].pie(cluster_df["count"], labels=cluster_df["relative_count"],
colors=[colors[key] for key in cluster_df["detailed_label"]])
ax[index].set_title("Cluster " + str(cluster))
markers = [plt.Line2D([0, 0], [0, 0], color=color, marker='o', linestyle='') for color in colors.values()]
fig.subplots_adjust(bottom=0.25)
central_axis = int(len(clusters) / 2)
ax[central_axis].legend(markers, colors.keys(), numpoints=1, loc="lower center", bbox_to_anchor=(0.5, -1.3))
plt.suptitle("Detailed Label Distribution per Cluster", y=0.9, x=0.5)
plt.show()
plt.close() | 44.37788 | 201 | 0.63663 | 4,747 | 38,520 | 4.847061 | 0.051822 | 0.071581 | 0.066061 | 0.072667 | 0.940632 | 0.928984 | 0.91612 | 0.912339 | 0.908862 | 0.89578 | 0 | 0.016787 | 0.242238 | 38,520 | 868 | 202 | 44.37788 | 0.771489 | 0.00945 | 0 | 0.823241 | 0 | 0.003273 | 0.108846 | 0.02807 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003273 | false | 0 | 0.040917 | 0 | 0.045827 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
701ab8a6e4b11c683caf0c00a7ebda4908f70cdd | 82 | py | Python | mowl/datasets/__init__.py | bio-ontology-research-group/OntoML | 4cdc17dc7ee26464db96c67838c3e77dba5318f9 | [
"BSD-3-Clause"
] | null | null | null | mowl/datasets/__init__.py | bio-ontology-research-group/OntoML | 4cdc17dc7ee26464db96c67838c3e77dba5318f9 | [
"BSD-3-Clause"
] | null | null | null | mowl/datasets/__init__.py | bio-ontology-research-group/OntoML | 4cdc17dc7ee26464db96c67838c3e77dba5318f9 | [
"BSD-3-Clause"
] | null | null | null | from .ppi_yeast import PPIYeastDataset
from .ppi_yeast import PPIYeastSlimDataset
| 27.333333 | 42 | 0.878049 | 10 | 82 | 7 | 0.6 | 0.2 | 0.342857 | 0.514286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 82 | 2 | 43 | 41 | 0.945946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
701cf5a6ce68965a05fb3a4e4d39014adaabec0c | 113 | py | Python | tenning/utils/__init__.py | guilherme9820/Tenning | c0fe7695ef3dd791ea1083f39d6b312266fb0512 | [
"MIT"
] | null | null | null | tenning/utils/__init__.py | guilherme9820/Tenning | c0fe7695ef3dd791ea1083f39d6b312266fb0512 | [
"MIT"
] | null | null | null | tenning/utils/__init__.py | guilherme9820/Tenning | c0fe7695ef3dd791ea1083f39d6b312266fb0512 | [
"MIT"
] | null | null | null | from .generic_utils import *
from .data_utils import *
from .rotation_utils import *
from .linalg_utils import *
| 22.6 | 29 | 0.787611 | 16 | 113 | 5.3125 | 0.4375 | 0.517647 | 0.529412 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.141593 | 113 | 4 | 30 | 28.25 | 0.876289 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
70858b4c844bcaaf8c3bf24d220b8296fdb45a63 | 1,382 | py | Python | analysis/otherFiles/test.py | Sage-Bionetworks/JHU-biobank | 2e0dbdb64d756beba3fc215c6997ddf81916cd59 | [
"Apache-2.0"
] | 1 | 2020-07-13T15:52:22.000Z | 2020-07-13T15:52:22.000Z | analysis/otherFiles/test.py | Sage-Bionetworks/JHU-biobank | 2e0dbdb64d756beba3fc215c6997ddf81916cd59 | [
"Apache-2.0"
] | 11 | 2019-09-21T23:04:21.000Z | 2019-11-08T01:06:01.000Z | analysis/otherFiles/test.py | Sage-Bionetworks/JHU-biobank | 2e0dbdb64d756beba3fc215c6997ddf81916cd59 | [
"Apache-2.0"
] | 2 | 2019-09-22T16:04:04.000Z | 2019-09-27T22:12:59.000Z | #!/usr/bin/python
import sys
import os
import time
import re
import synapseclient
syn = synapseclient.login()
# fastq
tbl = syn.tableQuery("select * from syn17025501 where individualID is not null")
df = tbl.asDataFrame()
format = lambda x: x.replace(' ','_')
df['group'] = df['specimenID'].map(format)
grouped = df.groupby('specimenID')
for name, group in grouped:
os.system("mkdir "+name)
for index,row in group.iterrows():
temp = syn.get(row['id'],downloadLocation="./")
file_path = row['name']
os.rename(temp.path, file_path)
while not os.path.exists(file_path):
time.sleep(1)
qcmd = "~/FastQC/fastqc -o "+name+" "+file_path
os.system(qcmd)
os.remove(file_path)
# bam
tbl = syn.tableQuery("select * from syn17038362 where individualID is not null")
df = tbl.asDataFrame()
format = lambda x: x.replace(' ','_')
df['group'] = df['specimenID'].map(format)
grouped = df.groupby('specimenID')
for name, group in grouped:
os.system("mkdir "+name)
for index,row in group.iterrows():
temp = syn.get(row['id'],downloadLocation="./")
file_path = row['name']
os.rename(temp.path, file_path)
while not os.path.exists(file_path):
time.sleep(1)
qcmd = "~/FastQC/fastqc -o "+name+" "+file_path
os.system(qcmd)
os.remove(file_path)
| 26.576923 | 80 | 0.631693 | 186 | 1,382 | 4.629032 | 0.311828 | 0.092915 | 0.037166 | 0.051103 | 0.864112 | 0.803717 | 0.803717 | 0.803717 | 0.803717 | 0.803717 | 0 | 0.016544 | 0.212735 | 1,382 | 51 | 81 | 27.098039 | 0.774816 | 0.018813 | 0 | 0.789474 | 0 | 0 | 0.172949 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.131579 | 0 | 0.131579 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5668a60185dbd8e9ca07ca25f87af2628f9214ba | 20,427 | py | Python | f5/bigip/tm/asm/policies/test/functional/test_blocking_settings.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 272 | 2016-02-23T06:05:44.000Z | 2022-02-20T02:09:32.000Z | f5/bigip/tm/asm/policies/test/functional/test_blocking_settings.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 1,103 | 2016-02-11T17:48:03.000Z | 2022-02-15T17:13:37.000Z | f5/bigip/tm/asm/policies/test/functional/test_blocking_settings.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 167 | 2016-02-11T17:48:21.000Z | 2022-01-17T20:13:05.000Z | # Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from distutils.version import LooseVersion
from f5.bigip.tm.asm.policies.blocking_settings import Evasion
from f5.bigip.tm.asm.policies.blocking_settings import Evasions_s
from f5.bigip.tm.asm.policies.blocking_settings import Http_Protocol
from f5.bigip.tm.asm.policies.blocking_settings import Http_Protocols_s
from f5.bigip.tm.asm.policies.blocking_settings import Violation
from f5.bigip.tm.asm.policies.blocking_settings import Violations_s
from f5.bigip.tm.asm.policies.blocking_settings import Web_Services_Securities_s
from f5.bigip.tm.asm.policies.blocking_settings import Web_Services_Security
from f5.sdk_exception import UnsupportedMethod
from f5.sdk_exception import UnsupportedOperation
from requests.exceptions import HTTPError
class TestBlockingSettings(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedMethod):
policy.blocking_settings.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedMethod):
policy.blocking_settings.delete()
def test_load(self, policy):
block = policy.blocking_settings.load()
attributes = block._meta_data['attribute_registry']
obj_class = [
Evasions_s, Http_Protocols_s, Violations_s,
Web_Services_Securities_s
]
v12kind = 'tm:asm:policies:blocking-settings:blocking-settingcollectionstate'
v11kind = 'tm:asm:policies:blocking-settings'
if LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.0.0'):
assert block.kind == v11kind
else:
assert block.kind == v12kind
assert hasattr(block, 'httpProtocolReference')
assert hasattr(block, 'webServicesSecurityReference')
assert hasattr(block, 'evasionReference')
assert hasattr(block, 'violationReference')
assert set(obj_class) == set(attributes.values())
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) >= LooseVersion('13.0.0'),
reason='Needs TMOS version less than v13.0.0 to pass.'
)
class TestEvasions(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.evasions_s.evasion.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.evasions_s.evasion.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
hashid = str(coll[0].id)
eva1 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
eva2 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
assert eva1.kind == eva2.kind
assert eva1.description == eva2.description
assert eva1.enabled == eva2.enabled
eva2.modify(enabled=False)
assert eva1.enabled is True
assert eva2.enabled is False
eva1.refresh()
assert eva1.enabled is False
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.blocking_settings.evasions_s.evasion.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
hashid = str(coll[0].id)
eva1 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
assert eva1.kind == 'tm:asm:policies:blocking-settings:evasions:evasionstate'
assert eva1.enabled is True
eva1.modify(enabled=False)
assert eva1.enabled is False
eva2 = policy.blocking_settings.evasions_s.evasion.load(id=eva1.id)
assert eva1.selfLink == eva2.selfLink
assert eva1.kind == eva2.kind
assert eva1.enabled == eva2.enabled
def test_evasions_subcollection(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Evasion)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='Needs TMOS version greater than or equal to v13.0.0 to pass.'
)
class TestEvasionsV13(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.evasions_s.evasion.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.evasions_s.evasion.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
hashid = str(coll[0].id)
eva1 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
eva2 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
assert eva1.kind == eva2.kind
assert eva1.description == eva2.description
assert eva1.enabled == eva2.enabled
eva2.modify(enabled=True)
assert eva1.enabled is False
assert eva2.enabled is True
eva1.refresh()
assert eva1.enabled is True
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.blocking_settings.evasions_s.evasion.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
hashid = str(coll[0].id)
eva1 = policy.blocking_settings.evasions_s.evasion.load(id=hashid)
assert eva1.kind == 'tm:asm:policies:blocking-settings:evasions:evasionstate'
assert eva1.enabled is False
eva1.modify(enabled=True)
assert eva1.enabled is True
eva2 = policy.blocking_settings.evasions_s.evasion.load(id=eva1.id)
assert eva1.selfLink == eva2.selfLink
assert eva1.kind == eva2.kind
assert eva1.enabled == eva2.enabled
def test_evasions_subcollection(self, policy):
coll = policy.blocking_settings.evasions_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Evasion)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) >= LooseVersion('13.0.0'),
reason='Needs TMOS version less than v13.0.0 to pass.'
)
class TestViolations(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.violations_s.violation.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.violations_s.violation.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
hashid = str(coll[0].id)
vio1 = policy.blocking_settings.violations_s.violation.load(id=hashid)
vio2 = policy.blocking_settings.violations_s.violation.load(id=hashid)
assert vio1.kind == vio2.kind
assert vio1.description == vio2.description
assert vio1.learn == vio2.learn
vio2.modify(learn=False)
assert vio1.learn is True
assert vio2.learn is False
vio1.refresh()
assert vio1.learn is False
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.blocking_settings.violations_s.violation.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
hashid = str(coll[0].id)
vio1 = policy.blocking_settings.violations_s.violation.load(id=hashid)
assert vio1.kind == 'tm:asm:policies:blocking-settings:violations:violationstate'
assert vio1.learn is True
vio1.modify(learn=False)
assert vio1.learn is False
vio2 = policy.blocking_settings.violations_s.violation.load(id=vio1.id)
assert vio1.selfLink == vio2.selfLink
assert vio1.kind == vio2.kind
assert vio1.learn == vio2.learn
def test_violations_subcollection(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Violation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='Needs TMOS version greater than or equal to v13.0.0 to pass.'
)
class TestViolationsV13(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.violations_s.violation.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.violations_s.violation.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
hashid = str(coll[0].id)
vio1 = policy.blocking_settings.violations_s.violation.load(id=hashid)
vio2 = policy.blocking_settings.violations_s.violation.load(id=hashid)
assert vio1.kind == vio2.kind
assert vio1.description == vio2.description
assert vio1.learn == vio2.learn
vio2.modify(learn=True)
assert vio1.learn is False
assert vio2.learn is True
vio1.refresh()
assert vio1.learn is True
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.blocking_settings.violations_s.violation.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
hashid = str(coll[0].id)
vio1 = policy.blocking_settings.violations_s.violation.load(id=hashid)
assert vio1.kind == 'tm:asm:policies:blocking-settings:violations:violationstate'
assert vio1.learn is False
vio1.modify(learn=True)
assert vio1.learn is True
vio2 = policy.blocking_settings.violations_s.violation.load(id=vio1.id)
assert vio1.selfLink == vio2.selfLink
assert vio1.kind == vio2.kind
assert vio1.learn == vio2.learn
def test_violations_subcollection(self, policy):
coll = policy.blocking_settings.violations_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Violation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) >= LooseVersion('13.0.0'),
reason='Needs TMOS version less than v13.0.0 to pass.'
)
class TestHTTPProtocols(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.http_protocols_s.http_protocol.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.http_protocols_s.http_protocol.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
hashid = str(coll[1].id)
http1 = policy.blocking_settings.http_protocols_s.http_protocol.load(id=hashid)
http2 = policy.blocking_settings.http_protocols_s.http_protocol.load(id=hashid)
assert http1.kind == http2.kind
assert http1.description == http2.description
assert http1.enabled == http2.enabled
http2.modify(enabled=False)
assert http1.enabled is True
assert http2.enabled is False
http1.refresh()
assert http1.enabled is False
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.blocking_settings.http_protocols_s.http_protocol.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
hashid = str(coll[1].id)
http1 = policy.blocking_settings.http_protocols_s.http_protocol.load(
id=hashid)
assert http1.kind == 'tm:asm:policies:blocking-settings:http-protocols:http-protocolstate'
assert http1.enabled is True
http1.modify(enabled=False)
assert http1.enabled is False
http2 = policy.blocking_settings.http_protocols_s.http_protocol.load(id=http1.id)
assert http1.selfLink == http2.selfLink
assert http1.kind == http2.kind
assert http1.enabled == http2.enabled
def test_httpprotocols_subcollection(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Http_Protocol)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='Needs TMOS version greater than or equal to v13.0.0 to pass.'
)
class TestHTTPProtocolsV13(object):
def test_create_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.http_protocols_s.http_protocol.create()
def test_delete_raises(self, policy):
with pytest.raises(UnsupportedOperation):
policy.blocking_settings.http_protocols_s.http_protocol.delete()
def test_refresh(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
hashid = str(coll[1].id)
http1 = policy.blocking_settings.http_protocols_s.http_protocol.load(id=hashid)
http2 = policy.blocking_settings.http_protocols_s.http_protocol.load(id=hashid)
assert http1.kind == http2.kind
assert http1.description == http2.description
assert http1.enabled == http2.enabled
http2.modify(enabled=True)
assert http1.enabled is False
assert http2.enabled is True
http1.refresh()
assert http1.enabled is True
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
protocols = policy.blocking_settings.http_protocols_s
protocols.http_protocol.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
hashid = str(coll[1].id)
http1 = policy.blocking_settings.http_protocols_s.http_protocol.load(
id=hashid
)
assert http1.kind == 'tm:asm:policies:blocking-settings:http-protocols:http-protocolstate'
assert http1.enabled is False
http1.modify(enabled=True)
assert http1.enabled is True
http2 = policy.blocking_settings.http_protocols_s. \
http_protocol.load(id=http1.id)
assert http1.selfLink == http2.selfLink
assert http1.kind == http2.kind
assert http1.enabled == http2.enabled
def test_httpprotocols_subcollection(self, policy):
coll = policy.blocking_settings.http_protocols_s.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Http_Protocol)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) >= LooseVersion('13.0.0'),
reason='Needs TMOS version less than v13.0.0 to pass.'
)
class TestWebServicesSecurities(object):
def test_create_raises(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(UnsupportedOperation):
wsc.web_services_security.create()
def test_delete_raises(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(UnsupportedOperation):
wsc.web_services_security.delete()
def test_refresh(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
hashid = str(coll[1].id)
ws1 = wsc.web_services_security.load(id=hashid)
ws2 = wsc.web_services_security.load(id=hashid)
assert ws1.kind == ws2.kind
assert ws1.description == ws2.description
assert ws1.enabled == ws2.enabled
ws2.modify(enabled=False)
assert ws1.enabled is True
assert ws2.enabled is False
ws1.refresh()
assert ws1.enabled is False
def test_load_no_object(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(HTTPError) as err:
wsc.web_services_security.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
hashid = str(coll[1].id)
ws1 = wsc.web_services_security.load(id=hashid)
assert ws1.kind == 'tm:asm:policies:blocking-settings:web-services-securities:web-services-securitystate'
assert ws1.enabled is True
ws1.modify(enabled=False)
assert ws1.enabled is False
ws2 = wsc.web_services_security.load(id=ws1.id)
assert ws1.selfLink == ws2.selfLink
assert ws1.kind == ws2.kind
assert ws1.enabled == ws2.enabled
def test_webservicessecurities_subcollection(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Web_Services_Security)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='Needs TMOS version greater than or equal to v13.0.0 to pass.'
)
class TestWebServicesSecuritiesV13(object):
def test_create_raises(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(UnsupportedOperation):
wsc.web_services_security.create()
def test_delete_raises(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(UnsupportedOperation):
wsc.web_services_security.delete()
def test_refresh(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
hashid = str(coll[1].id)
ws1 = wsc.web_services_security.load(id=hashid)
ws2 = wsc.web_services_security.load(id=hashid)
assert ws1.kind == ws2.kind
assert ws1.description == ws2.description
assert ws1.enabled == ws2.enabled
ws2.modify(enabled=True)
assert ws1.enabled is False
assert ws2.enabled is True
ws1.refresh()
assert ws1.enabled is True
def test_load_no_object(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
with pytest.raises(HTTPError) as err:
wsc.web_services_security.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
hash_id = coll[0].id
ws1 = wsc.web_services_security.load(id=hash_id)
assert ws1.kind == 'tm:asm:policies:blocking-settings:web-services-securities:web-services-securitystate'
assert ws1.enabled is False
def test_webservicessecurities_subcollection(self, policy):
wsc = policy.blocking_settings.web_services_securities_s
coll = wsc.get_collection()
assert isinstance(coll, list)
assert len(coll)
assert isinstance(coll[0], Web_Services_Security)
| 42.030864 | 113 | 0.698291 | 2,543 | 20,427 | 5.453401 | 0.075501 | 0.107297 | 0.118979 | 0.046005 | 0.890179 | 0.871359 | 0.852827 | 0.817854 | 0.815474 | 0.805524 | 0 | 0.024992 | 0.206687 | 20,427 | 485 | 114 | 42.117526 | 0.830793 | 0.027023 | 0 | 0.792176 | 0 | 0.00489 | 0.068681 | 0.034089 | 0 | 0 | 0 | 0 | 0.320293 | 1 | 0.124694 | false | 0.01956 | 0.031785 | 0 | 0.178484 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
56988b3cd3edbaccaac7d6dd1625ac43a36c7466 | 78 | py | Python | src/jodel_api/protos/__init__.py | hackspace-marburg/jodel_api | 9e8094d11b6f90dbbf7d13d85425f3a3b7430860 | [
"MIT"
] | 175 | 2016-09-29T23:33:41.000Z | 2022-03-08T11:28:24.000Z | src/jodel_api/protos/__init__.py | hackspace-marburg/jodel_api | 9e8094d11b6f90dbbf7d13d85425f3a3b7430860 | [
"MIT"
] | 78 | 2016-09-24T22:08:04.000Z | 2021-12-03T15:44:21.000Z | src/jodel_api/protos/__init__.py | hackspace-marburg/jodel_api | 9e8094d11b6f90dbbf7d13d85425f3a3b7430860 | [
"MIT"
] | 78 | 2016-09-12T20:45:23.000Z | 2022-03-23T18:27:49.000Z | from jodel_api.protos import checkin_pb2
from jodel_api.protos import mcs_pb2
| 26 | 40 | 0.871795 | 14 | 78 | 4.571429 | 0.571429 | 0.28125 | 0.375 | 0.5625 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028571 | 0.102564 | 78 | 2 | 41 | 39 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
56a66055c0222828a3d046aa380f1a38070ad92a | 271 | py | Python | autopycli/__init__.py | zrd/py-cli-autoconf | e02de0570d83930ee6dd6e4480b1bb327c633ba9 | [
"MIT"
] | null | null | null | autopycli/__init__.py | zrd/py-cli-autoconf | e02de0570d83930ee6dd6e4480b1bb327c633ba9 | [
"MIT"
] | null | null | null | autopycli/__init__.py | zrd/py-cli-autoconf | e02de0570d83930ee6dd6e4480b1bb327c633ba9 | [
"MIT"
] | null | null | null | from autopycli.arguments import ArgumentParser
from autopycli.arguments import ArgumentsError
from autopycli.configuration import Configuration
from autopycli.environment import Environment
from autopycli.cli_runtime import CliRuntime
from autopycli.logger import logger
| 38.714286 | 49 | 0.889299 | 31 | 271 | 7.741935 | 0.387097 | 0.325 | 0.183333 | 0.233333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088561 | 271 | 6 | 50 | 45.166667 | 0.97166 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3b104f4a130a0af40029266c5939186951e72c23 | 197 | py | Python | src/field_schnet/__init__.py | atomistic-machine-learning/field_schnet | 0dcc72a91eaa6eb9d65183a8b6fb98a4330d1e5b | [
"MIT"
] | 4 | 2021-06-19T01:21:41.000Z | 2021-08-21T01:47:29.000Z | src/field_schnet/__init__.py | atomistic-machine-learning/field_schnet | 0dcc72a91eaa6eb9d65183a8b6fb98a4330d1e5b | [
"MIT"
] | null | null | null | src/field_schnet/__init__.py | atomistic-machine-learning/field_schnet | 0dcc72a91eaa6eb9d65183a8b6fb98a4330d1e5b | [
"MIT"
] | null | null | null | from field_schnet import nn
from field_schnet import atomistic
from field_schnet import interfaces
from field_schnet import md
from field_schnet import representation
from field_schnet import utils | 32.833333 | 39 | 0.883249 | 30 | 197 | 5.6 | 0.333333 | 0.321429 | 0.535714 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116751 | 197 | 6 | 40 | 32.833333 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3b1e959d31dde00deedb915b3cd12a948094aab0 | 4,238 | py | Python | model.py | yangyiben/PCE | 0248aae74328998cb734da1123afe8dd72c2510e | [
"Apache-2.0"
] | 8 | 2018-12-02T14:11:44.000Z | 2020-12-17T07:52:25.000Z | model.py | yangyiben/PCE | 0248aae74328998cb734da1123afe8dd72c2510e | [
"Apache-2.0"
] | null | null | null | model.py | yangyiben/PCE | 0248aae74328998cb734da1123afe8dd72c2510e | [
"Apache-2.0"
] | 2 | 2019-07-19T04:25:44.000Z | 2019-10-24T02:22:58.000Z | import torch
from torch.autograd import Variable
class PCE_onePole(torch.nn.Module):
def __init__(self, nhid, embeddings):
super(PCE_onePole, self).__init__()
self.nhid = nhid
self.activation = torch.nn.ReLU()
self.drop = torch.nn.Dropout()
embedding_temp = torch.nn.Embedding(embeddings.size(0), embeddings.size(1))
embedding_temp.weight = torch.nn.Parameter(embeddings)
embedding_temp.weight.requires_grad = False
self.embedding = embedding_temp
self.encoder2 = torch.nn.Linear(nhid*2,nhid)
self.encoderp = torch.nn.Linear(nhid,nhid)
self.encodern = torch.nn.Linear(nhid,nhid)
self.encoders = torch.nn.Linear(nhid,nhid)
def reset(self):
def init_weights(m):
if type(m) == torch.nn.Linear:
m.reset_parameters()
self.apply(init_weights)
def forward(self,input_data):
input_data = self.embedding(input_data)
x = input_data.select(1,0)
y = input_data.select(1,1)
r1 = input_data.select(1,2)
r2 = input_data.select(1,3)
r3 = input_data.select(1,4)
input_embedding = torch.cat([x,y],1)
h = self.encoder2(input_embedding)
h = self.drop(h)
r2 = self.encoders(r1)
r2 = self.drop(r2)
r3 = self.encodern(r1)
r3 = self.drop(r3)
r1 = self.encoderp(r1)
r1 = self.drop(r1)
h = torch.unsqueeze(h,1)
r1 = torch.unsqueeze(r1,2)
r2 = torch.unsqueeze(r2,2)
r3 = torch.unsqueeze(r3,2)
out1 = torch.bmm(h,r1).squeeze()
out2 = torch.bmm(h,r2).squeeze()
out3 = torch.bmm(h,r3).squeeze()
out = torch.stack([out1,out2,out3],1)
return out
class PCE_four_way(torch.nn.Module):
def __init__(self, nhid,embeddings):
super(PCE_four_way, self).__init__()
self.nhid = nhid
self.activation = torch.nn.ReLU()
self.drop = torch.nn.Dropout()
embedding_temp = torch.nn.Embedding(embeddings.size(0), embeddings.size(1))
embedding_temp.weight = torch.nn.Parameter(embeddings)
embedding_temp.weight.requires_grad = False
self.embedding = embedding_temp
self.encoder1 = torch.nn.Linear(nhid,nhid)
self.encoder2 = torch.nn.Linear(nhid*2,nhid)
def reset(self):
def init_weights(m):
if type(m) == torch.nn.Linear:
m.reset_parameters()
self.apply(init_weights)
def forward(self,input_data):
input_data = self.embedding(input_data)
x = input_data.select(1,0)
y = input_data.select(1,1)
r1 = input_data.select(1,2)
r2 = input_data.select(1,3)
r3 = input_data.select(1,4)
input_embedding = torch.cat([x,y],1)
h1 = self.encoder1(x)
h2 = self.encoder1(y)
h1 = self.drop(h1)
h2 = self.drop(h2)
h = self.encoder2(input_embedding)
h = self.drop(h)
h = torch.unsqueeze(h,1)
h1 = torch.unsqueeze(h1,1)
h2 = torch.unsqueeze(h2,1)
r1 = torch.unsqueeze(r1,2)
r2 = torch.unsqueeze(r2,2)
r3 = torch.unsqueeze(r3,2)
out1 = torch.bmm(h,r1).squeeze()
out2 = torch.bmm(h,r2).squeeze()
out3 = torch.bmm(h,r3).squeeze()
out4 = (torch.bmm(h1,r1).squeeze() + torch.bmm(h1,r3).squeeze() + torch.bmm(h2,r1).squeeze() + torch.bmm(h2,r3).squeeze())
out = torch.stack([out1,out2,out3,out4],1)
return out
class PCE_three_way(torch.nn.Module):
def __init__(self, nhid, embeddings):
super(PCE_three_way, self).__init__()
self.nhid = nhid
self.activation = torch.nn.ReLU()
self.drop = torch.nn.Dropout()
embedding_temp = torch.nn.Embedding(embeddings.size(0), embeddings.size(1))
embedding_temp.weight = torch.nn.Parameter(embeddings)
embedding_temp.weight.requires_grad = False
self.embedding = embedding_temp
self.encoder2 = torch.nn.Linear(nhid*2,nhid)
def reset(self):
def init_weights(m):
if type(m) == torch.nn.Linear:
m.reset_parameters()
self.apply(init_weights)
def forward(self,input_data):
input_data = self.embedding(input_data)
x = input_data.select(1,0)
y = input_data.select(1,1)
r1 = input_data.select(1,2)
r2 = input_data.select(1,3)
r3 = input_data.select(1,4)
input_embedding = torch.cat([x,y],1)
h = self.encoder2(input_embedding)
h = self.drop(h)
h = torch.unsqueeze(h,1)
r1 = torch.unsqueeze(r1,2)
r2 = torch.unsqueeze(r2,2)
r3 = torch.unsqueeze(r3,2)
out1 = torch.bmm(h,r1).squeeze()
out2 = torch.bmm(h,r2).squeeze()
out3 = torch.bmm(h,r3).squeeze()
out = torch.stack([out1,out2,out3],1)
return out
| 25.076923 | 125 | 0.691364 | 678 | 4,238 | 4.19174 | 0.106195 | 0.061576 | 0.07917 | 0.084448 | 0.870514 | 0.855384 | 0.830401 | 0.830401 | 0.818438 | 0.818438 | 0 | 0.042897 | 0.152902 | 4,238 | 168 | 126 | 25.22619 | 0.748747 | 0 | 0 | 0.790323 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096774 | false | 0 | 0.016129 | 0 | 0.16129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3b4328b749b75b306b7672d9aa005f8f95fc1565 | 34,718 | py | Python | sdk/python/pulumi_opsgenie/notification_policy.py | pulumi/pulumi-opsgenie | 4bc7d0cbb5b0437c59422a5977a61468baa2c4a7 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-12-01T20:57:42.000Z | 2021-12-01T20:57:42.000Z | sdk/python/pulumi_opsgenie/notification_policy.py | pulumi/pulumi-opsgenie | 4bc7d0cbb5b0437c59422a5977a61468baa2c4a7 | [
"ECL-2.0",
"Apache-2.0"
] | 34 | 2021-02-03T20:15:32.000Z | 2022-03-25T19:57:20.000Z | sdk/python/pulumi_opsgenie/notification_policy.py | pulumi/pulumi-opsgenie | 4bc7d0cbb5b0437c59422a5977a61468baa2c4a7 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-07-08T15:16:09.000Z | 2021-07-20T11:12:44.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NotificationPolicyArgs', 'NotificationPolicy']
@pulumi.input_type
class NotificationPolicyArgs:
def __init__(__self__, *,
filters: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]],
team_id: pulumi.Input[str],
auto_close_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]] = None,
auto_restart_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]] = None,
de_duplication_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]] = None,
delay_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_description: Optional[pulumi.Input[str]] = None,
suppress: Optional[pulumi.Input[bool]] = None,
time_restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]] = None):
"""
The set of arguments for constructing a NotificationPolicy resource.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]] filters: A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
:param pulumi.Input[str] team_id: Id of team that this policy belons to.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]] auto_close_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]] auto_restart_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]] de_duplication_actions: Deduplication Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]] delay_actions: Delay notifications. This is a block, structure is documented below.
:param pulumi.Input[bool] enabled: If policy should be enabled. Default: `true`
:param pulumi.Input[str] name: Name of the notification policy
:param pulumi.Input[str] policy_description: Description of the policy. This can be max 512 characters.
:param pulumi.Input[bool] suppress: Suppress value of the policy. Values are: `true`, `false`. Default: `false`
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]] time_restrictions: Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
pulumi.set(__self__, "filters", filters)
pulumi.set(__self__, "team_id", team_id)
if auto_close_actions is not None:
pulumi.set(__self__, "auto_close_actions", auto_close_actions)
if auto_restart_actions is not None:
pulumi.set(__self__, "auto_restart_actions", auto_restart_actions)
if de_duplication_actions is not None:
pulumi.set(__self__, "de_duplication_actions", de_duplication_actions)
if delay_actions is not None:
pulumi.set(__self__, "delay_actions", delay_actions)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if name is not None:
pulumi.set(__self__, "name", name)
if policy_description is not None:
pulumi.set(__self__, "policy_description", policy_description)
if suppress is not None:
pulumi.set(__self__, "suppress", suppress)
if time_restrictions is not None:
pulumi.set(__self__, "time_restrictions", time_restrictions)
@property
@pulumi.getter
def filters(self) -> pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]]:
"""
A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter(name="teamId")
def team_id(self) -> pulumi.Input[str]:
"""
Id of team that this policy belons to.
"""
return pulumi.get(self, "team_id")
@team_id.setter
def team_id(self, value: pulumi.Input[str]):
pulumi.set(self, "team_id", value)
@property
@pulumi.getter(name="autoCloseActions")
def auto_close_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_close_actions")
@auto_close_actions.setter
def auto_close_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]]):
pulumi.set(self, "auto_close_actions", value)
@property
@pulumi.getter(name="autoRestartActions")
def auto_restart_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_restart_actions")
@auto_restart_actions.setter
def auto_restart_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]]):
pulumi.set(self, "auto_restart_actions", value)
@property
@pulumi.getter(name="deDuplicationActions")
def de_duplication_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]]:
"""
Deduplication Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "de_duplication_actions")
@de_duplication_actions.setter
def de_duplication_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]]):
pulumi.set(self, "de_duplication_actions", value)
@property
@pulumi.getter(name="delayActions")
def delay_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]]:
"""
Delay notifications. This is a block, structure is documented below.
"""
return pulumi.get(self, "delay_actions")
@delay_actions.setter
def delay_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]]):
pulumi.set(self, "delay_actions", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
If policy should be enabled. Default: `true`
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the notification policy
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="policyDescription")
def policy_description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the policy. This can be max 512 characters.
"""
return pulumi.get(self, "policy_description")
@policy_description.setter
def policy_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_description", value)
@property
@pulumi.getter
def suppress(self) -> Optional[pulumi.Input[bool]]:
"""
Suppress value of the policy. Values are: `true`, `false`. Default: `false`
"""
return pulumi.get(self, "suppress")
@suppress.setter
def suppress(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "suppress", value)
@property
@pulumi.getter(name="timeRestrictions")
def time_restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]]:
"""
Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
return pulumi.get(self, "time_restrictions")
@time_restrictions.setter
def time_restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]]):
pulumi.set(self, "time_restrictions", value)
@pulumi.input_type
class _NotificationPolicyState:
def __init__(__self__, *,
auto_close_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]] = None,
auto_restart_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]] = None,
de_duplication_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]] = None,
delay_actions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_description: Optional[pulumi.Input[str]] = None,
suppress: Optional[pulumi.Input[bool]] = None,
team_id: Optional[pulumi.Input[str]] = None,
time_restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]] = None):
"""
Input properties used for looking up and filtering NotificationPolicy resources.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]] auto_close_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]] auto_restart_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]] de_duplication_actions: Deduplication Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]] delay_actions: Delay notifications. This is a block, structure is documented below.
:param pulumi.Input[bool] enabled: If policy should be enabled. Default: `true`
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]] filters: A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
:param pulumi.Input[str] name: Name of the notification policy
:param pulumi.Input[str] policy_description: Description of the policy. This can be max 512 characters.
:param pulumi.Input[bool] suppress: Suppress value of the policy. Values are: `true`, `false`. Default: `false`
:param pulumi.Input[str] team_id: Id of team that this policy belons to.
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]] time_restrictions: Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
if auto_close_actions is not None:
pulumi.set(__self__, "auto_close_actions", auto_close_actions)
if auto_restart_actions is not None:
pulumi.set(__self__, "auto_restart_actions", auto_restart_actions)
if de_duplication_actions is not None:
pulumi.set(__self__, "de_duplication_actions", de_duplication_actions)
if delay_actions is not None:
pulumi.set(__self__, "delay_actions", delay_actions)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if name is not None:
pulumi.set(__self__, "name", name)
if policy_description is not None:
pulumi.set(__self__, "policy_description", policy_description)
if suppress is not None:
pulumi.set(__self__, "suppress", suppress)
if team_id is not None:
pulumi.set(__self__, "team_id", team_id)
if time_restrictions is not None:
pulumi.set(__self__, "time_restrictions", time_restrictions)
@property
@pulumi.getter(name="autoCloseActions")
def auto_close_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_close_actions")
@auto_close_actions.setter
def auto_close_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionArgs']]]]):
pulumi.set(self, "auto_close_actions", value)
@property
@pulumi.getter(name="autoRestartActions")
def auto_restart_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_restart_actions")
@auto_restart_actions.setter
def auto_restart_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionArgs']]]]):
pulumi.set(self, "auto_restart_actions", value)
@property
@pulumi.getter(name="deDuplicationActions")
def de_duplication_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]]:
"""
Deduplication Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "de_duplication_actions")
@de_duplication_actions.setter
def de_duplication_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionArgs']]]]):
pulumi.set(self, "de_duplication_actions", value)
@property
@pulumi.getter(name="delayActions")
def delay_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]]:
"""
Delay notifications. This is a block, structure is documented below.
"""
return pulumi.get(self, "delay_actions")
@delay_actions.setter
def delay_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionArgs']]]]):
pulumi.set(self, "delay_actions", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
If policy should be enabled. Default: `true`
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]]]:
"""
A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the notification policy
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="policyDescription")
def policy_description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the policy. This can be max 512 characters.
"""
return pulumi.get(self, "policy_description")
@policy_description.setter
def policy_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_description", value)
@property
@pulumi.getter
def suppress(self) -> Optional[pulumi.Input[bool]]:
"""
Suppress value of the policy. Values are: `true`, `false`. Default: `false`
"""
return pulumi.get(self, "suppress")
@suppress.setter
def suppress(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "suppress", value)
@property
@pulumi.getter(name="teamId")
def team_id(self) -> Optional[pulumi.Input[str]]:
"""
Id of team that this policy belons to.
"""
return pulumi.get(self, "team_id")
@team_id.setter
def team_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "team_id", value)
@property
@pulumi.getter(name="timeRestrictions")
def time_restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]]:
"""
Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
return pulumi.get(self, "time_restrictions")
@time_restrictions.setter
def time_restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionArgs']]]]):
pulumi.set(self, "time_restrictions", value)
class NotificationPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_close_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoCloseActionArgs']]]]] = None,
auto_restart_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoRestartActionArgs']]]]] = None,
de_duplication_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDeDuplicationActionArgs']]]]] = None,
delay_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDelayActionArgs']]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_description: Optional[pulumi.Input[str]] = None,
suppress: Optional[pulumi.Input[bool]] = None,
team_id: Optional[pulumi.Input[str]] = None,
time_restrictions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyTimeRestrictionArgs']]]]] = None,
__props__=None):
"""
Manages a Notification Policy within Opsgenie.
## Import
Notification policies can be imported using the `team_id` and `notification_policy_id`, e.g.
```sh
$ pulumi import opsgenie:index/notificationPolicy:NotificationPolicy test team_id/notification_policy_id`
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoCloseActionArgs']]]] auto_close_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoRestartActionArgs']]]] auto_restart_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDeDuplicationActionArgs']]]] de_duplication_actions: Deduplication Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDelayActionArgs']]]] delay_actions: Delay notifications. This is a block, structure is documented below.
:param pulumi.Input[bool] enabled: If policy should be enabled. Default: `true`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyFilterArgs']]]] filters: A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
:param pulumi.Input[str] name: Name of the notification policy
:param pulumi.Input[str] policy_description: Description of the policy. This can be max 512 characters.
:param pulumi.Input[bool] suppress: Suppress value of the policy. Values are: `true`, `false`. Default: `false`
:param pulumi.Input[str] team_id: Id of team that this policy belons to.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyTimeRestrictionArgs']]]] time_restrictions: Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NotificationPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Notification Policy within Opsgenie.
## Import
Notification policies can be imported using the `team_id` and `notification_policy_id`, e.g.
```sh
$ pulumi import opsgenie:index/notificationPolicy:NotificationPolicy test team_id/notification_policy_id`
```
:param str resource_name: The name of the resource.
:param NotificationPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NotificationPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_close_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoCloseActionArgs']]]]] = None,
auto_restart_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoRestartActionArgs']]]]] = None,
de_duplication_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDeDuplicationActionArgs']]]]] = None,
delay_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDelayActionArgs']]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_description: Optional[pulumi.Input[str]] = None,
suppress: Optional[pulumi.Input[bool]] = None,
team_id: Optional[pulumi.Input[str]] = None,
time_restrictions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyTimeRestrictionArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NotificationPolicyArgs.__new__(NotificationPolicyArgs)
__props__.__dict__["auto_close_actions"] = auto_close_actions
__props__.__dict__["auto_restart_actions"] = auto_restart_actions
__props__.__dict__["de_duplication_actions"] = de_duplication_actions
__props__.__dict__["delay_actions"] = delay_actions
__props__.__dict__["enabled"] = enabled
if filters is None and not opts.urn:
raise TypeError("Missing required property 'filters'")
__props__.__dict__["filters"] = filters
__props__.__dict__["name"] = name
__props__.__dict__["policy_description"] = policy_description
__props__.__dict__["suppress"] = suppress
if team_id is None and not opts.urn:
raise TypeError("Missing required property 'team_id'")
__props__.__dict__["team_id"] = team_id
__props__.__dict__["time_restrictions"] = time_restrictions
super(NotificationPolicy, __self__).__init__(
'opsgenie:index/notificationPolicy:NotificationPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
auto_close_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoCloseActionArgs']]]]] = None,
auto_restart_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoRestartActionArgs']]]]] = None,
de_duplication_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDeDuplicationActionArgs']]]]] = None,
delay_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDelayActionArgs']]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_description: Optional[pulumi.Input[str]] = None,
suppress: Optional[pulumi.Input[bool]] = None,
team_id: Optional[pulumi.Input[str]] = None,
time_restrictions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyTimeRestrictionArgs']]]]] = None) -> 'NotificationPolicy':
"""
Get an existing NotificationPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoCloseActionArgs']]]] auto_close_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyAutoRestartActionArgs']]]] auto_restart_actions: Auto Restart Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDeDuplicationActionArgs']]]] de_duplication_actions: Deduplication Action of the policy. This is a block, structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyDelayActionArgs']]]] delay_actions: Delay notifications. This is a block, structure is documented below.
:param pulumi.Input[bool] enabled: If policy should be enabled. Default: `true`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyFilterArgs']]]] filters: A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
:param pulumi.Input[str] name: Name of the notification policy
:param pulumi.Input[str] policy_description: Description of the policy. This can be max 512 characters.
:param pulumi.Input[bool] suppress: Suppress value of the policy. Values are: `true`, `false`. Default: `false`
:param pulumi.Input[str] team_id: Id of team that this policy belons to.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NotificationPolicyTimeRestrictionArgs']]]] time_restrictions: Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NotificationPolicyState.__new__(_NotificationPolicyState)
__props__.__dict__["auto_close_actions"] = auto_close_actions
__props__.__dict__["auto_restart_actions"] = auto_restart_actions
__props__.__dict__["de_duplication_actions"] = de_duplication_actions
__props__.__dict__["delay_actions"] = delay_actions
__props__.__dict__["enabled"] = enabled
__props__.__dict__["filters"] = filters
__props__.__dict__["name"] = name
__props__.__dict__["policy_description"] = policy_description
__props__.__dict__["suppress"] = suppress
__props__.__dict__["team_id"] = team_id
__props__.__dict__["time_restrictions"] = time_restrictions
return NotificationPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autoCloseActions")
def auto_close_actions(self) -> pulumi.Output[Optional[Sequence['outputs.NotificationPolicyAutoCloseAction']]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_close_actions")
@property
@pulumi.getter(name="autoRestartActions")
def auto_restart_actions(self) -> pulumi.Output[Optional[Sequence['outputs.NotificationPolicyAutoRestartAction']]]:
"""
Auto Restart Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "auto_restart_actions")
@property
@pulumi.getter(name="deDuplicationActions")
def de_duplication_actions(self) -> pulumi.Output[Optional[Sequence['outputs.NotificationPolicyDeDuplicationAction']]]:
"""
Deduplication Action of the policy. This is a block, structure is documented below.
"""
return pulumi.get(self, "de_duplication_actions")
@property
@pulumi.getter(name="delayActions")
def delay_actions(self) -> pulumi.Output[Optional[Sequence['outputs.NotificationPolicyDelayAction']]]:
"""
Delay notifications. This is a block, structure is documented below.
"""
return pulumi.get(self, "delay_actions")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
If policy should be enabled. Default: `true`
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def filters(self) -> pulumi.Output[Sequence['outputs.NotificationPolicyFilter']]:
"""
A notification filter which will be applied. This filter can be empty: `filter {}` - this means `match-all`. This is a block, structure is documented below.
"""
return pulumi.get(self, "filters")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the notification policy
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyDescription")
def policy_description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the policy. This can be max 512 characters.
"""
return pulumi.get(self, "policy_description")
@property
@pulumi.getter
def suppress(self) -> pulumi.Output[Optional[bool]]:
"""
Suppress value of the policy. Values are: `true`, `false`. Default: `false`
"""
return pulumi.get(self, "suppress")
@property
@pulumi.getter(name="teamId")
def team_id(self) -> pulumi.Output[str]:
"""
Id of team that this policy belons to.
"""
return pulumi.get(self, "team_id")
@property
@pulumi.getter(name="timeRestrictions")
def time_restrictions(self) -> pulumi.Output[Optional[Sequence['outputs.NotificationPolicyTimeRestriction']]]:
"""
Time restrictions specified in this field must be met for this policy to work. This is a block, structure is documented below.
"""
return pulumi.get(self, "time_restrictions")
| 54.416928 | 267 | 0.691486 | 3,792 | 34,718 | 6.137658 | 0.05327 | 0.106342 | 0.075922 | 0.083784 | 0.91179 | 0.902466 | 0.89194 | 0.87935 | 0.874667 | 0.86646 | 0 | 0.000796 | 0.203554 | 34,718 | 637 | 268 | 54.502355 | 0.84091 | 0.31416 | 0 | 0.815789 | 1 | 0 | 0.182576 | 0.109055 | 0 | 0 | 0 | 0 | 0 | 1 | 0.163158 | false | 0.002632 | 0.018421 | 0 | 0.278947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3b4ce49074ac85fed443e1c7a5f0422445b88da8 | 5,537 | py | Python | benchmarks/feature/bench_feature.py | jakeKonrad/torch-quiver | 16e01b8b61459ae41b7386b6a57ef9d20dfb6606 | [
"Apache-2.0"
] | 196 | 2021-10-30T23:40:27.000Z | 2022-03-28T03:43:18.000Z | benchmarks/feature/bench_feature.py | jakeKonrad/torch-quiver | 16e01b8b61459ae41b7386b6a57ef9d20dfb6606 | [
"Apache-2.0"
] | 32 | 2021-11-03T15:07:50.000Z | 2022-03-07T09:03:33.000Z | benchmarks/feature/bench_feature.py | jakeKonrad/torch-quiver | 16e01b8b61459ae41b7386b6a57ef9d20dfb6606 | [
"Apache-2.0"
] | 24 | 2021-10-31T12:28:34.000Z | 2022-03-19T03:03:13.000Z | import torch
from ogb.nodeproppred import PygNodePropPredDataset
from torch_geometric.datasets import Reddit
from torch_geometric.loader import NeighborSampler
import time
import numpy as np
import os.path as osp
import quiver
def bench_on_ogbproduct():
print("=" * 20 + "OGBn-Product" + "=" * 20)
root = "/data/data/products"
dataset = PygNodePropPredDataset('ogbn-products', root)
train_idx = dataset.get_idx_split()["train"]
train_loader = torch.utils.data.DataLoader(train_idx,
batch_size=1024,
pin_memory=True,
shuffle=True)
csr_topo = quiver.CSRTopo(dataset[0].edge_index)
quiver_sampler = quiver.pyg.GraphSageSampler(csr_topo, [15, 10, 5],
device=0,
mode="UVA")
quiver_feature = quiver.Feature(rank=0,
device_list=[0, 1],
device_cache_size="200M",
cache_policy="device_replicate",
csr_topo=csr_topo)
feature = torch.zeros(dataset[0].x.shape)
feature[:] = dataset[0].x
quiver_feature.from_cpu_tensor(feature)
accessed_feature_size = 0
feature_time = 0
for seeds in train_loader:
nid, _, _ = quiver_sampler.sample(seeds)
torch.cuda.synchronize()
feature_start = time.time()
res = quiver_feature[nid]
torch.cuda.synchronize()
feature_time += time.time() - feature_start
accessed_feature_size += res.numel() * 4
torch.cuda.synchronize()
print(
f"Feature Collection Throughput {accessed_feature_size / feature_time / 1024 / 1024 / 1024} GB/s"
)
def bench_on_ogbproduct_cpu():
print("=" * 20 + "OGBn-Product CPU" + "=" * 20)
root = "/data/data/products"
dataset = PygNodePropPredDataset('ogbn-products', root)
feature = dataset[0].x
train_idx = dataset.get_idx_split()["train"]
train_loader = NeighborSampler(dataset[0].edge_index,
node_idx=train_idx,
sizes=[15, 10, 5],
batch_size=1024,
shuffle=True)
accessed_feature_size = 0
feature_time = 0
for batch_size, n_id, adjs in train_loader:
feature_start = time.time()
res = feature[n_id].to(0)
torch.cuda.synchronize()
feature_time += time.time() - feature_start
accessed_feature_size += res.numel() * 4
torch.cuda.synchronize()
print(
f"Feature Collection Throughput {accessed_feature_size / feature_time / 1024 / 1024 / 1024} GB/s"
)
def bench_on_reddit():
print("=" * 20 + "Reddit" + "=" * 20)
dataset = Reddit('/data/data/Reddit')
train_mask = dataset[0].train_mask
train_idx = train_mask.nonzero(as_tuple=False).view(-1)
train_loader = torch.utils.data.DataLoader(train_idx,
batch_size=1024,
pin_memory=True,
shuffle=True)
csr_topo = quiver.CSRTopo(dataset[0].edge_index)
quiver_sampler = quiver.pyg.GraphSageSampler(csr_topo, [25, 10],
device=0,
mode="UVA")
quiver_feature = quiver.Feature(rank=0,
device_list=[0, 1],
device_cache_size="110M",
cache_policy="device_replicate",
csr_topo=csr_topo)
quiver_feature.from_cpu_tensor(dataset[0].x)
accessed_feature_size = 0
feature_time = 0
for seeds in train_loader:
nid, _, _ = quiver_sampler.sample(seeds)
torch.cuda.synchronize()
feature_start = time.time()
res = quiver_feature[nid]
torch.cuda.synchronize()
feature_time += time.time() - feature_start
accessed_feature_size += res.numel() * 4
torch.cuda.synchronize()
print(
f"Feature Collection Throughput {accessed_feature_size / feature_time / 1024 / 1024 / 1024} GB/s"
)
def bench_on_reddit_cpu():
print("=" * 20 + "Reddit CPU" + "=" * 20)
root = '/data/data/Reddit'
dataset = Reddit(root)
feature = dataset[0].x
train_mask = dataset[0].train_mask
train_idx = train_mask.nonzero(as_tuple=False).view(-1)
train_loader = NeighborSampler(dataset[0].edge_index,
node_idx=train_idx,
sizes=[25, 10],
batch_size=1024,
shuffle=True)
accessed_feature_size = 0
feature_time = 0
for batch_size, n_id, adjs in train_loader:
feature_start = time.time()
res = feature[n_id].to(0)
torch.cuda.synchronize()
feature_time += time.time() - feature_start
accessed_feature_size += res.numel() * 4
torch.cuda.synchronize()
print(
f"Feature Collection Throughput {accessed_feature_size / feature_time / 1024 / 1024 / 1024} GB/s"
)
if __name__ == "__main__":
quiver.init_p2p([0, 1])
#bench_on_ogbproduct()
#bench_on_ogbproduct_cpu()
bench_on_reddit()
#bench_on_reddit_cpu()
| 39.269504 | 105 | 0.553729 | 599 | 5,537 | 4.856427 | 0.178631 | 0.061877 | 0.078377 | 0.055689 | 0.820901 | 0.79615 | 0.783774 | 0.783774 | 0.756274 | 0.73496 | 0 | 0.039259 | 0.346758 | 5,537 | 140 | 106 | 39.55 | 0.764999 | 0.0121 | 0 | 0.714286 | 0 | 0 | 0.10792 | 0.016097 | 0.031746 | 0 | 0 | 0 | 0 | 1 | 0.031746 | false | 0 | 0.063492 | 0 | 0.095238 | 0.063492 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3b99c72fee26c0cd79cb05620b286b20a86caa18 | 78,709 | py | Python | tests/codegen.py | izgzhen/cozy | fc57fdccdd52c5ecf4c4ae4e8b80af97e8119b77 | [
"Apache-2.0"
] | null | null | null | tests/codegen.py | izgzhen/cozy | fc57fdccdd52c5ecf4c4ae4e8b80af97e8119b77 | [
"Apache-2.0"
] | null | null | null | tests/codegen.py | izgzhen/cozy | fc57fdccdd52c5ecf4c4ae4e8b80af97e8119b77 | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict, defaultdict
import io
import os
import subprocess
import tempfile
import unittest
from cozy.target_syntax import *
from cozy.structures.heaps import *
from cozy.syntax_tools import pprint, mk_lambda, fresh_var
from cozy.codegen import CxxPrinter, JavaPrinter
class TestCodegen(unittest.TestCase):
def trove_path(self):
dir = "/tmp"
path = os.path.join(dir, "trove-3.0.3.jar")
if not os.path.exists(path):
subprocess.run(["curl", "-LO", "https://bitbucket.org/trove4j/trove/downloads/trove-3.0.3.tar.gz"], cwd=dir)
subprocess.run(["tar", "xf", "trove-3.0.3.tar.gz"], cwd=dir)
subprocess.run(["ln", "3.0.3/lib/trove-3.0.3.jar", path], cwd=dir)
return path
def check(self, impl, state_map, share_info, codegen):
with io.StringIO() as f:
codegen = codegen(f)
codegen.visit(impl, state_map, share_info)
code = f.getvalue()
ext = "java" if isinstance(codegen, JavaPrinter) else "cpp"
compile = ["javac"] if isinstance(codegen, JavaPrinter) else ["c++", "-std=c++11", "-w", "-c", "-o", "/dev/null"]
dir = tempfile.mkdtemp()
print("Writing impls to {}".format(dir))
filename = os.path.join(dir, "{}.{}".format(impl.name, ext))
args = compile + [filename]
print("Running `{}`".format(" ".join(args)))
with open(filename, "w") as f:
f.write(code)
res = subprocess.run(args)
assert res.returncode == 0
def test_regression01(self):
impl = Spec('Q', [], [], [('_var29', TBag(TInt())), ('_var724', TInt()), ('_var1831', TMap(TInt(), TBool())), ('_var6402', TBool()), ('_var10567', TMinHeap(TInt(), TInt())), ('_var10570', TInt()), ('_var22411', TBool()), ('_var32734', TBag(TInt())), ('_var51043', TInt()), ('_var80254', TMap(TInt(), TInt())), ('_var109036', TMinHeap(TInt(), TInt())), ('_var146605', TInt())], [], [Query('min_elt', 'public', [], (), EVar('_var724').with_type(TInt()), ''), Query('_query44', 'internal', [('n', TInt())], (), ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), '[add] additions to _var29'), Query('_query45', 'internal', [('n', TInt())], (), EEmptyList().with_type(TBag(TInt())), '[add] deletions from _var29'), Query('_query94', 'internal', [], (), EEmptyList().with_type(TBag(TInt())), '[extract_min] additions to _var29'), Query('_query95', 'internal', [], (), EBinOp(ECond(EHasKey(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var724').with_type(TInt())).with_type(TBool()), ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', EBinOp(ECond(EHasKey(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var724').with_type(TInt())).with_type(TBool()), ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EHasKey(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var724').with_type(TInt())).with_type(TBool()), ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), '[extract_min] deletions from _var29'), Query('_query763', 'internal', [('n', TInt())], (), ECond(EVar('_var6402').with_type(TBool()), EArgMin(EBinOp(ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), EVar('n').with_type(TInt())).with_type(TInt()), '[add] new value for _var724'), Query('_query866', 'internal', [], (), EHeapPeek2(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), EVar('_var10570').with_type(TInt())).with_type(TInt()), '[extract_min] new value for _var724'), Query('_query2688', 'internal', [('n', TInt())], (), EFilter(ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var2466').with_type(TInt()), EUnaryOp('not', EHasKey(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var2466').with_type(TInt())).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '[add] new or modified keys from _var1831'), Query('_query6243', 'internal', [], (), EFilter(EVar('_var32734').with_type(TBag(TInt())), ELambda(EVar('_var61201').with_type(TInt()), EUnaryOp('not', ECond(EBool(True).with_type(TBool()), EBinOp(EMapGet(EVar('_var80254').with_type(TMap(TInt(), TInt())), EVar('_var61201').with_type(TInt())).with_type(TInt()), '>', ENum(1).with_type(TInt())).with_type(TBool()), EHasKey(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var61201').with_type(TInt())).with_type(TBool())).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '[extract_min] keys removed from _var1831'), Query('_query7077', 'internal', [('n', TInt())], (), EBool(True).with_type(TBool()), '[add] new value for _var6402'), Query('_query7187', 'internal', [], (), EVar('_var22411').with_type(TBool()), '[extract_min] new value for _var6402'), Query('_query11276', 'internal', [], (), EVar('_var10570').with_type(TInt()), '[add] None'), Query('_query11282', 'internal', [('n', TInt())], (), ENum(0).with_type(TInt()), '[add] None'), Query('_query11300', 'internal', [('_var11293', TInt()), ('n', TInt())], (), ENum(0).with_type(TInt()), '[add] None'), Query('_query17696', 'internal', [('n', TInt())], (), EBinOp(EVar('_var10570').with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '[add] new value for _var10570'), Query('_query17927', 'internal', [], (), EUnaryOp('len', EBinOp(EHeapElems(EVar('_var109036').with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt())), '-', EHeapElems(EMakeMinHeap(EBinOp(EVar('_var32734').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var32734').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '[extract_min] None'), Query('_query17964', 'internal', [('_var17933', TInt())], (), ENum(0).with_type(TInt()), '[extract_min] None'), Query('_query23035', 'internal', [], (), EVar('_var51043').with_type(TInt()), '[extract_min] new value for _var10570'), Query('_query35412', 'internal', [('n', TInt())], (), EBinOp(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), EBinOp(EBinOp(EUnaryOp('len', EVar('_var29').with_type(TBag(TInt()))).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '-', ENum(1).with_type(TInt())).with_type(TInt()), EBinOp(EBinOp(EUnaryOp('len', EVar('_var29').with_type(TBag(TInt()))).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '-', ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '>', ENum(0).with_type(TInt())).with_type(TBool()), '[add] new value for _var22411'), Query('_query36133', 'internal', [], (), EBinOp(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var35638').with_type(TInt()), EVar('_var35638').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EUnaryOp('len', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '-', ENum(1).with_type(TInt())).with_type(TInt()), EBinOp(EUnaryOp('len', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '-', ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '>', ENum(0).with_type(TInt())).with_type(TBool()), '[extract_min] new value for _var22411'), Query('_query38230', 'internal', [('n', TInt())], (), ECond(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ECond(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ECond(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), '[add] additions to _var32734'), Query('_query38240', 'internal', [('n', TInt())], (), ECond(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ECond(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ECond(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), '[add] deletions from _var32734'), Query('_query45518', 'internal', [], (), ECond(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44087').with_type(TInt()), EVar('_var44087').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44097').with_type(TInt()), EVar('_var44097').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44109').with_type(TInt()), EVar('_var44109').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44117').with_type(TInt()), EVar('_var44117').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44122').with_type(TInt()), EVar('_var44122').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ECond(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44128').with_type(TInt()), EVar('_var44128').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44153').with_type(TInt()), EVar('_var44153').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44158').with_type(TInt()), EVar('_var44158').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(ESingleton(EArgMin(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44128').with_type(TInt()), EVar('_var44128').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44153').with_type(TInt()), EVar('_var44153').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var44158').with_type(TInt()), EVar('_var44158').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ECond(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', ESingleton(EArgMin(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), EBinOp(EEmptyList().with_type(TBag(TInt())), '-', EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), '[extract_min] additions to _var32734'), Query('_query58222', 'internal', [('n', TInt())], (), ECond(EBool(True).with_type(TBool()), EVar('_var10570').with_type(TInt()), EBinOp(EVar('_var146605').with_type(TInt()), '-', ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '[add] new value for _var51043'), Query('_query58811', 'internal', [], (), ECond(EUnaryOp('exists', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EUnaryOp('len', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '-', ENum(1).with_type(TInt())).with_type(TInt()), EBinOp(EUnaryOp('len', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EVar('_var724').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '-', ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '[extract_min] new value for _var51043'), Query('_query84333', 'internal', [('n', TInt()), ('_var83413', TInt()), ('_var83414', TInt())], (), EBinOp(EUnaryOp('len', EFilter(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('_var61214').with_type(TInt()), EBinOp(EVar('_var83413').with_type(TInt()), '==', EVar('_var61214').with_type(TInt())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TInt()), '+', ECond(EBinOp(EVar('_var83413').with_type(TInt()), '==', EVar('n').with_type(TInt())).with_type(TBool()), ENum(1).with_type(TInt()), ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '[add] new value for _var83414'), Query('_query97908', 'internal', [('_var96625', TInt()), ('_var96632', TInt())], (), EUnaryOp('len', EFilter(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var61214').with_type(TInt()), EBinOp(EVar('_var96625').with_type(TInt()), '==', EVar('_var61214').with_type(TInt())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TInt()), '[extract_min] new value for _var96632'), Query('_query97917', 'internal', [], (), EFilter(EUnaryOp('distinct', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var96625').with_type(TInt()), EBinOp(EUnaryOp('not', EBinOp(EVar('_var96625').with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), 'or', EUnaryOp('not', EBinOp(EUnaryOp('len', EFilter(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('_var61214').with_type(TInt()), EBinOp(EVar('_var96625').with_type(TInt()), '==', EVar('_var61214').with_type(TInt())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TInt()), '==', EUnaryOp('len', EFilter(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var61214').with_type(TInt()), EBinOp(EVar('_var96625').with_type(TInt()), '==', EVar('_var61214').with_type(TInt())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TInt())).with_type(TBool())).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '[extract_min] new or modified keys from _var80254'), Query('_query109248', 'internal', [('n', TInt())], (), EUnaryOp('len', EBinOp(EHeapElems(EMakeMinHeap(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt())), '-', EHeapElems(EMakeMinHeap(ECond(EBinOp(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), 'or', EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '[add] None'), Query('_query109256', 'internal', [('_var109253', TInt()), ('n', TInt())], (), ENum(0).with_type(TInt()), '[add] None'), Query('_query121063', 'internal', [], (), EUnaryOp('len', EBinOp(EHeapElems(EMakeMinHeap(ECond(EBinOp(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), 'in', EVar('_var29').with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt())), '-', EHeapElems(EMakeMinHeap(ECond(EBinOp(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var121024').with_type(TInt()), EVar('_var121024').with_type(TInt()))).with_type(TInt()), 'in', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), ESingleton(EArgMin(EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var121024').with_type(TInt()), EVar('_var121024').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '[extract_min] None'), Query('_query121109', 'internal', [('_var121085', TInt())], (), ENum(0).with_type(TInt()), '[extract_min] None'), Query('_query146856', 'internal', [('n', TInt())], (), EBinOp(EBinOp(EUnaryOp('len', EVar('_var29').with_type(TBag(TInt()))).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '[add] new value for _var146605'), Query('_query147362', 'internal', [], (), EBinOp(EUnaryOp('len', EBinOp(EVar('_var29').with_type(TBag(TInt())), '-', ESingleton(EArgMin(EVar('_var29').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()), '[extract_min] new value for _var146605'), Op('add', [('n', TInt())], [], SSeq(SSeq(SSeq(SSeq(SDecl(EVar('_var164049'), ECall('_query763', (EVar('n').with_type(TInt()),)).with_type(TInt())), SDecl(EVar('_var164050'), ECall('_query2688', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())))), SSeq(SDecl(EVar('_var164051'), ECall('_query17927', ()).with_type(TInt())), SDecl(EVar('_var164052'), EBinOp(ECall('_query17927', ()).with_type(TInt()), '-', ECall('_query109248', (EVar('n').with_type(TInt()),)).with_type(TInt())).with_type(TInt())))), SSeq(SSeq(SDecl(EVar('_var164053'), ECall('_query17696', (EVar('n').with_type(TInt()),)).with_type(TInt())), SAssign(EVar('_var6402').with_type(TBool()), ECall('_query7077', (EVar('n').with_type(TInt()),)).with_type(TBool()))), SSeq(SSeq(SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'remove_all', (ECall('_query11276', ()).with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())))), SSeq(SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'add_all', (EBinOp(ECall('_query11276', ()).with_type(TInt()), '-', ECall('_query11282', (EVar('n').with_type(TInt()),)).with_type(TInt())).with_type(TInt()), ECall('_query44', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())))), SForEach(EVar('_var11293').with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'update', (EVar('_var11293').with_type(TInt()), ECall('_query11300', (EVar('_var11293').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TInt())))))), SAssign(EVar('_var146605').with_type(TInt()), ECall('_query146856', (EVar('n').with_type(TInt()),)).with_type(TInt()))))), SSeq(SSeq(SSeq(SAssign(EVar('_var22411').with_type(TBool()), ECall('_query35412', (EVar('n').with_type(TInt()),)).with_type(TBool())), SAssign(EVar('_var10570').with_type(TInt()), EVar('_var164053').with_type(TInt()))), SSeq(SSeq(SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'remove_all', (EVar('_var164051').with_type(TInt()), ECall('_query38240', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())))), SSeq(SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'add_all', (EVar('_var164052').with_type(TInt()), ECall('_query38230', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())))), SForEach(EVar('_var109253').with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'update', (EVar('_var109253').with_type(TInt()), ECall('_query109256', (EVar('_var109253').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TInt())))))), SAssign(EVar('_var51043').with_type(TInt()), ECall('_query58222', (EVar('n').with_type(TInt()),)).with_type(TInt())))), SSeq(SSeq(SSeq(SForEach(EVar('_var83413').with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SMapDel(EVar('_var80254').with_type(TMap(TInt(), TInt())), EVar('_var83413').with_type(TInt()))), SForEach(EVar('_var83413').with_type(TInt()), ECall('_query44', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SMapUpdate(EVar('_var80254').with_type(TMap(TInt(), TInt())), EVar('_var83413').with_type(TInt()), EVar('_var83414').with_type(TInt()), SAssign(EVar('_var83414').with_type(TInt()), ECall('_query84333', (EVar('n').with_type(TInt()), EVar('_var83413').with_type(TInt()), EVar('_var83414').with_type(TInt()))).with_type(TInt()))))), SSeq(SForEach(EVar('_var38247').with_type(TInt()), ECall('_query38240', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var32734').with_type(TBag(TInt())), 'remove', (EVar('_var38247').with_type(TInt()),))), SForEach(EVar('_var38247').with_type(TInt()), ECall('_query38230', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var32734').with_type(TBag(TInt())), 'add', (EVar('_var38247').with_type(TInt()),))))), SSeq(SSeq(SForEach(EVar('_var46').with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var29').with_type(TBag(TInt())), 'remove', (EVar('_var46').with_type(TInt()),))), SForEach(EVar('_var46').with_type(TInt()), ECall('_query44', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var29').with_type(TBag(TInt())), 'add', (EVar('_var46').with_type(TInt()),)))), SSeq(SAssign(EVar('_var724').with_type(TInt()), EVar('_var164049').with_type(TInt())), SSeq(SForEach(EVar('_var2466').with_type(TInt()), ECall('_query45', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SMapDel(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var2466').with_type(TInt()))), SForEach(EVar('_var2466').with_type(TInt()), EVar('_var164050').with_type(TBag(TInt())), SMapUpdate(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var2466').with_type(TInt()), EVar('_var2467').with_type(TBool()), SNoOp())))))))), ''), Op('extract_min', [], [], SSeq(SSeq(SSeq(SSeq(SDecl(EVar('_var164054'), ECall('_query17927', ()).with_type(TInt())), SDecl(EVar('_var164055'), EBinOp(ECall('_query17927', ()).with_type(TInt()), '-', ECall('_query121063', ()).with_type(TInt())).with_type(TInt()))), SSeq(SDecl(EVar('_var164056'), ECall('_query6243', ()).with_type(TBag(TInt()))), SDecl(EVar('_var164057'), ECall('_query6243', ()).with_type(TBag(TInt()))))), SSeq(SSeq(SDecl(EVar('_var164058'), ECall('_query866', ()).with_type(TInt())), SDecl(EVar('_var164059'), ECall('_query6243', ()).with_type(TBag(TInt())))), SSeq(SAssign(EVar('_var6402').with_type(TBool()), ECall('_query7187', ()).with_type(TBool())), SSeq(SSeq(SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'remove_all', (ECall('_query11276', ()).with_type(TInt()), ECall('_query95', ()).with_type(TBag(TInt())))), SSeq(SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'add_all', (EBinOp(ECall('_query11276', ()).with_type(TInt()), '-', ECall('_query17927', ()).with_type(TInt())).with_type(TInt()), ECall('_query94', ()).with_type(TBag(TInt())))), SForEach(EVar('_var17933').with_type(TInt()), ECall('_query94', ()).with_type(TBag(TInt())), SCall(EVar('_var10567').with_type(TMinHeap(TInt(), TInt())), 'update', (EVar('_var17933').with_type(TInt()), ECall('_query17964', (EVar('_var17933').with_type(TInt()),)).with_type(TInt())))))), SAssign(EVar('_var146605').with_type(TInt()), ECall('_query147362', ()).with_type(TInt())))))), SSeq(SSeq(SSeq(SAssign(EVar('_var22411').with_type(TBool()), ECall('_query36133', ()).with_type(TBool())), SAssign(EVar('_var10570').with_type(TInt()), ECall('_query23035', ()).with_type(TInt()))), SSeq(SSeq(SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'remove_all', (EVar('_var164054').with_type(TInt()), ECall('_query6243', ()).with_type(TBag(TInt())))), SSeq(SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'add_all', (EVar('_var164055').with_type(TInt()), ECall('_query45518', ()).with_type(TBag(TInt())))), SForEach(EVar('_var121085').with_type(TInt()), ECall('_query94', ()).with_type(TBag(TInt())), SCall(EVar('_var109036').with_type(TMinHeap(TInt(), TInt())), 'update', (EVar('_var121085').with_type(TInt()), ECall('_query121109', (EVar('_var121085').with_type(TInt()),)).with_type(TInt())))))), SAssign(EVar('_var51043').with_type(TInt()), ECall('_query58811', ()).with_type(TInt())))), SSeq(SSeq(SSeq(SForEach(EVar('_var96625').with_type(TInt()), EVar('_var164056').with_type(TBag(TInt())), SMapDel(EVar('_var80254').with_type(TMap(TInt(), TInt())), EVar('_var96625').with_type(TInt()))), SForEach(EVar('_var96625').with_type(TInt()), ECall('_query97917', ()).with_type(TBag(TInt())), SMapUpdate(EVar('_var80254').with_type(TMap(TInt(), TInt())), EVar('_var96625').with_type(TInt()), EVar('_var96632').with_type(TInt()), SAssign(EVar('_var96632').with_type(TInt()), ECall('_query97908', (EVar('_var96625').with_type(TInt()), EVar('_var96632').with_type(TInt()))).with_type(TInt()))))), SSeq(SForEach(EVar('_var45536').with_type(TInt()), EVar('_var164057').with_type(TBag(TInt())), SCall(EVar('_var32734').with_type(TBag(TInt())), 'remove', (EVar('_var45536').with_type(TInt()),))), SForEach(EVar('_var45536').with_type(TInt()), ECall('_query45518', ()).with_type(TBag(TInt())), SCall(EVar('_var32734').with_type(TBag(TInt())), 'add', (EVar('_var45536').with_type(TInt()),))))), SSeq(SSeq(SForEach(EVar('_var96').with_type(TInt()), ECall('_query95', ()).with_type(TBag(TInt())), SCall(EVar('_var29').with_type(TBag(TInt())), 'remove', (EVar('_var96').with_type(TInt()),))), SForEach(EVar('_var96').with_type(TInt()), ECall('_query94', ()).with_type(TBag(TInt())), SCall(EVar('_var29').with_type(TBag(TInt())), 'add', (EVar('_var96').with_type(TInt()),)))), SSeq(SAssign(EVar('_var724').with_type(TInt()), EVar('_var164058').with_type(TInt())), SSeq(SForEach(EVar('_var6241').with_type(TInt()), EVar('_var164059').with_type(TBag(TInt())), SMapDel(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var6241').with_type(TInt()))), SForEach(EVar('_var6241').with_type(TInt()), ECall('_query94', ()).with_type(TBag(TInt())), SMapUpdate(EVar('_var1831').with_type(TMap(TInt(), TBool())), EVar('_var6241').with_type(TInt()), EVar('_var6242').with_type(TBool()), SNoOp())))))))), '')], '', '', '')
print(pprint(impl))
for codegen in (CxxPrinter, JavaPrinter):
state_map = OrderedDict([('_var29', EVar('l').with_type(TBag(TInt()))), ('_var724', EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())), ('_var1831', EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var657').with_type(TInt()), EBool(True).with_type(TBool()))).with_type(TMap(TInt(), TBool()))), ('_var6402', EUnaryOp('exists', EVar('l').with_type(TBag(TInt()))).with_type(TBool())), ('_var10567', EMakeMinHeap(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))), ('_var10570', EUnaryOp('len', EVar('l').with_type(TBag(TInt()))).with_type(TInt())), ('_var22411', EBinOp(EBinOp(EUnaryOp('len', EVar('l').with_type(TBag(TInt()))).with_type(TInt()), '-', ECond(EHasKey(EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var19933').with_type(TInt()), EBool(True).with_type(TBool()))).with_type(TMap(TInt(), TBool())), EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), ENum(1).with_type(TInt()), ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt()), '>', ENum(0).with_type(TInt())).with_type(TBool())), ('_var32734', ECond(ECond(EHasKey(EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var657').with_type(TInt()), EBool(True).with_type(TBool()))).with_type(TMap(TInt(), TBool())), EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBinOp(EArgMin(ECond(EHasKey(EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var657').with_type(TInt()), EBool(True).with_type(TBool()))).with_type(TMap(TInt(), TBool())), EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), ESingleton(EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt()), '==', EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ESingleton(EArgMin(ECond(EHasKey(EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var657').with_type(TInt()), EBool(True).with_type(TBool()))).with_type(TMap(TInt(), TBool())), EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBool()), ESingleton(EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))), ('_var51043', EBinOp(EUnaryOp('len', EVar('l').with_type(TBag(TInt()))).with_type(TInt()), '-', ECond(EUnaryOp('exists', EVar('l').with_type(TBag(TInt()))).with_type(TBool()), ENum(1).with_type(TInt()), ENum(0).with_type(TInt())).with_type(TInt())).with_type(TInt())), ('_var80254', EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var61207').with_type(TInt()), EUnaryOp('len', EFilter(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var61214').with_type(TInt()), EBinOp(EVar('_var61207').with_type(TInt()), '==', EVar('_var61214').with_type(TInt())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TInt()))).with_type(TMap(TInt(), TInt()))), ('_var109036', EMakeMinHeap(EMapGet(EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_key54901').with_type(TInt()), ESingleton(EVar('_key54901').with_type(TInt())).with_type(TBag(TInt())))).with_type(TMap(TInt(), TBag(TInt()))), EArgMin(EVar('l').with_type(TBag(TInt())), ELambda(EVar('x').with_type(TInt()), EVar('x').with_type(TInt()))).with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var790').with_type(TInt()), EVar('_var790').with_type(TInt()))).with_type(TMinHeap(TInt(), TInt()))), ('_var146605', EBinOp(EUnaryOp('len', EVar('l').with_type(TBag(TInt()))).with_type(TInt()), '+', ENum(1).with_type(TInt())).with_type(TInt()))])
share_info = defaultdict(list, {})
self.check(impl, state_map, share_info, lambda out: codegen(out=out))
def test_regression04(self):
impl = Spec('Basic', [], [], [('_var12', TList(TInt())), ('_var895', TMap(TInt(), TList(TInt()))), ('_var9841', TMap(TInt(), TList(TInt()))), ('_var10947', TMap(TInt(), TBool()))], [], [Query('elems', 'public', [], (), EVar('_var12').with_type(TList(TInt())), ""), Query('_name13', 'internal', [('n', TInt())], (), ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ""), Query('_name14', 'internal', [('n', TInt())], (), EEmptyList().with_type(TBag(TInt())), ""), Query('_name35', 'internal', [('n', TInt())], (), EMapGet(EVar('_var895').with_type(TMap(TInt(), TList(TInt()))), EVar('n').with_type(TInt())).with_type(TList(TInt())), ""), Query('_name911', 'internal', [('_var905', TInt()), ('n', TInt())], (), ESingleton(EVar('_var905').with_type(TInt())).with_type(TBag(TInt())), ""), Query('_name912', 'internal', [('_var905', TInt()), ('n', TInt())], (), EEmptyList().with_type(TBag(TInt())), ""), Query('_name914', 'internal', [('n', TInt())], (), EFilter(ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var1498').with_type(TInt()), EUnaryOp('not', EMapGet(EVar('_var10947').with_type(TMap(TInt(), TBool())), EVar('_var1498').with_type(TInt())).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), ""), Query('_name1497', 'internal', [('_var1492', TInt()), ('n', TInt())], (), EEmptyList().with_type(TBag(TInt())), ""), Query('_name1506', 'internal', [('_var1492', TInt()), ('n', TInt())], (), ESingleton(EVar('_var1492').with_type(TInt())).with_type(TBag(TInt())), ""), Query('_name1519', 'internal', [('n', TInt())], (), EMapGet(EVar('_var9841').with_type(TMap(TInt(), TList(TInt()))), EVar('n').with_type(TInt())).with_type(TList(TInt())), ""), Query('_name9848', 'internal', [('_var9842', TInt()), ('n', TInt())], (), EBinOp(ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '+', EFilter(ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EVar('_var12').with_type(TList(TInt()))).with_type(TBool()), EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ""), Query('_name9855', 'internal', [('_var9842', TInt()), ('n', TInt())], (), EBinOp(ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EVar('_var12').with_type(TList(TInt()))).with_type(TBool()), EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '+', EFilter(ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ""), Query('_name9863', 'internal', [('n', TInt())], (), EFilter(EUnaryOp('distinct', EBinOp(EUnaryOp('distinct', EVar('_var12').with_type(TList(TInt()))).with_type(TList(TInt())), '+', EUnaryOp('distinct', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var9842').with_type(TInt()), EUnaryOp('not', EBinOp(ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EVar('_var12').with_type(TList(TInt()))).with_type(TBool()), EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '==', ECond(EBinOp(EVar('_var9842').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), '+', EFilter(ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var9842').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), ""), Query('_name16354', 'internal', [('_var16321', TInt()), ('n', TInt())], (), EBinOp(ECond(EBinOp(EVar('_var16321').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EFilter(EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var16321').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ECond(EBinOp(EVar('_var16321').with_type(TInt()), 'in', EVar('_var12').with_type(TList(TInt()))).with_type(TBool()), EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('_var16321').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ""), Query('_name16357', 'internal', [('_var16321', TInt()), ('n', TInt())], (), EBinOp(ECond(EBinOp(EVar('_var16321').with_type(TInt()), 'in', EVar('_var12').with_type(TList(TInt()))).with_type(TBool()), EFilter(EVar('_var12').with_type(TList(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('_var16321').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ECond(EBinOp(EVar('_var16321').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EFilter(EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt())), '-', ESingleton(EVar('_var16321').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())), EEmptyList().with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBag(TInt())), ""), Query('_name24791', 'internal', [('_var24789', TInt()), ('n', TInt())], (), ECond(EBinOp(EVar('_var24789').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EVar('_var24789').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '+', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ""), Query('_name28311', 'internal', [('_var28307', TInt()), ('n', TInt())], (), ECond(EBinOp(EVar('_var28307').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBinOp(EVar('_var28307').with_type(TInt()), 'in', EBinOp(EVar('_var12').with_type(TList(TInt())), '-', ESingleton(EVar('n').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool()), EBool(False).with_type(TBool())).with_type(TBool()), ""), Op('add', [('n', TInt())], [], SSeq(SSeq(SSeq(SSeq(SForEach(EVar('_var15').with_type(TInt()), ECall('_name14', [EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var12').with_type(TList(TInt())), 'remove', [EVar('_var15').with_type(TInt())])), SForEach(EVar('_var15').with_type(TInt()), ECall('_name13', [EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var12').with_type(TList(TInt())), 'add', [EVar('_var15').with_type(TInt())]))), SForEach(EVar('_var905').with_type(TInt()), ECall('_name914', [EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SMapUpdate(EVar('_var895').with_type(TMap(TInt(), TList(TInt()))), EVar('_var905').with_type(TInt()), EVar('_var906').with_type(TList(TInt())), SSeq(SForEach(EVar('_var913').with_type(TInt()), ECall('_name912', [EVar('_var905').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var906').with_type(TList(TInt())), 'remove', [EVar('_var913').with_type(TInt())])), SForEach(EVar('_var913').with_type(TInt()), ECall('_name911', [EVar('_var905').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var906').with_type(TList(TInt())), 'add', [EVar('_var913').with_type(TInt())])))))), SForEach(EVar('_var9842').with_type(TInt()), ECall('_name9863', [EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SMapUpdate(EVar('_var9841').with_type(TMap(TInt(), TList(TInt()))), EVar('_var9842').with_type(TInt()), EVar('_var9843').with_type(TList(TInt())), SSeq(SForEach(EVar('_var9856').with_type(TInt()), ECall('_name9855', [EVar('_var9842').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var9843').with_type(TList(TInt())), 'remove', [EVar('_var9856').with_type(TInt())])), SForEach(EVar('_var9856').with_type(TInt()), ECall('_name9848', [EVar('_var9842').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var9843').with_type(TList(TInt())), 'add', [EVar('_var9856').with_type(TInt())])))))), SForEach(EVar('_var24789').with_type(TInt()), ECall('_name914', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SMapUpdate(EVar('_var10947').with_type(TMap(TInt(), TBool())), EVar('_var24789').with_type(TInt()), EVar('_var24790').with_type(TBool()), SAssign(EVar('_var24790').with_type(TBool()), ECall('_name24791', (EVar('_var24789').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TBool()))))), ""), Op('remove', [('n', TInt())], [], SSeq(SSeq(SSeq(SSeq(SForEach(EVar('_var36').with_type(TInt()), ECall('_name35', (EVar('n').with_type(TInt()),)).with_type(TList(TInt())), SCall(EVar('_var12').with_type(TList(TInt())), 'remove', [EVar('_var36').with_type(TInt())])), SForEach(EVar('_var36').with_type(TInt()), ECall('_name14', (EVar('n').with_type(TInt()),)).with_type(TBag(TInt())), SCall(EVar('_var12').with_type(TList(TInt())), 'add', [EVar('_var36').with_type(TInt())]))), SForEach(EVar('_var1492').with_type(TInt()), ECall('_name1519', [EVar('n').with_type(TInt())]).with_type(TList(TInt())), SMapUpdate(EVar('_var895').with_type(TMap(TInt(), TList(TInt()))), EVar('_var1492').with_type(TInt()), EVar('_var1493').with_type(TList(TInt())), SSeq(SForEach(EVar('_var1507').with_type(TInt()), ECall('_name1506', [EVar('_var1492').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var1493').with_type(TList(TInt())), 'remove', [EVar('_var1507').with_type(TInt())])), SForEach(EVar('_var1507').with_type(TInt()), ECall('_name1497', [EVar('_var1492').with_type(TInt()), EVar('n').with_type(TInt())]).with_type(TBag(TInt())), SCall(EVar('_var1493').with_type(TList(TInt())), 'add', [EVar('_var1507').with_type(TInt())])))))), SForEach(EVar('_var16321').with_type(TInt()), ECall('_name35', (EVar('n').with_type(TInt()),)).with_type(TList(TInt())), SMapUpdate(EVar('_var9841').with_type(TMap(TInt(), TList(TInt()))), EVar('_var16321').with_type(TInt()), EVar('_var16322').with_type(TList(TInt())), SSeq(SForEach(EVar('_var16358').with_type(TInt()), ECall('_name16357', (EVar('_var16321').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TBag(TInt())), SCall(EVar('_var16322').with_type(TList(TInt())), 'remove', [EVar('_var16358').with_type(TInt())])), SForEach(EVar('_var16358').with_type(TInt()), ECall('_name16354', (EVar('_var16321').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TBag(TInt())), SCall(EVar('_var16322').with_type(TList(TInt())), 'add', [EVar('_var16358').with_type(TInt())])))))), SForEach(EVar('_var28307').with_type(TInt()), ECall('_name1519', (EVar('n').with_type(TInt()),)).with_type(TList(TInt())), SMapUpdate(EVar('_var10947').with_type(TMap(TInt(), TBool())), EVar('_var28307').with_type(TInt()), EVar('_var28309').with_type(TBool()), SAssign(EVar('_var28309').with_type(TBool()), ECall('_name28311', (EVar('_var28307').with_type(TInt()), EVar('n').with_type(TInt()))).with_type(TBool()))))), "")], "", "", "")
print(pprint(impl))
for codegen in (CxxPrinter, JavaPrinter):
state_map = {'_var12': EVar('l').with_type(TBag(TInt())), '_var895': EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var116').with_type(TInt()), ESingleton(EVar('_var116').with_type(TInt())).with_type(TBag(TInt())))).with_type(TMap(TInt(), TBag(TInt()))), '_var9841': EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var2516').with_type(TInt()), EFilter(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var2515').with_type(TInt()), EUnaryOp('not', EBinOp(EVar('_var2515').with_type(TInt()), 'in', EBinOp(EVar('l').with_type(TBag(TInt())), '-', ESingleton(EVar('_var2516').with_type(TInt())).with_type(TBag(TInt()))).with_type(TBag(TInt()))).with_type(TBool())).with_type(TBool()))).with_type(TBag(TInt())))).with_type(TMap(TInt(), TBag(TInt()))), '_var10947': EMakeMap2(EVar('l').with_type(TBag(TInt())), ELambda(EVar('_var1498').with_type(TInt()), EBinOp(EVar('_var1498').with_type(TInt()), 'in', EVar('l').with_type(TBag(TInt()))).with_type(TBool()))).with_type(TMap(TInt(), TBool()))}
share_info = {}
self.check(impl, state_map, share_info, lambda out: codegen(out=out))
def test_construct_concrete_list(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EFilter(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(INT))
stm = codgen.construct_concrete(TList(INT), bag, EVar("out").with_type(TList(INT)))
codgen.visit(stm)
def test_construct_concrete_map(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EFilter(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(INT))
map = EMakeMap2(bag, mk_lambda(INT, lambda k: k)).with_type(TMap(INT, INT))
stm = codgen.construct_concrete(TMap(INT, INT), map, EVar("out").with_type(TMap(INT, INT)))
codgen.visit(stm)
def test_distinct_foreach(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EFilter(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(INT))
x = fresh_var(INT)
v = fresh_var(INT)
stm = SForEach(x, EUnaryOp(UOp.Distinct, bag).with_type(TSet(INT)), SAssign(v, x))
codgen.visit(stm)
def test_distinct(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EFilter(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(INT))
print(codgen.visit(EUnaryOp(UOp.Distinct, bag).with_type(TSet(INT))))
def test_len(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EFilter(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(INT))
print(codgen.visit(EUnaryOp(UOp.Length, bag).with_type(TSet(INT))))
def test_all(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EMap(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO))).with_type(TBag(BOOL))
print(codgen.visit(EUnaryOp(UOp.All, bag).with_type(TSet(INT))))
def test_any(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EMap(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO).with_type(BOOL))).with_type(TBag(BOOL))
print(codgen.visit(EUnaryOp(UOp.Any, bag).with_type(TSet(INT))))
def test_argmin(self):
with io.StringIO() as f:
for codgen in (CxxPrinter(out=f), JavaPrinter(out=f)):
bag = EMap(EVar("v").with_type(TBag(INT)), mk_lambda(INT, lambda x: EBinOp(x, ">", ZERO).with_type(BOOL))).with_type(TBag(BOOL))
print(codgen.visit(EArgMin(bag, mk_lambda(INT, lambda x: EUnaryOp("-", x).with_type(x.type))).with_type(INT)))
| 709.09009 | 50,752 | 0.668335 | 11,152 | 78,709 | 4.47274 | 0.028425 | 0.303929 | 0.21243 | 0.214916 | 0.931556 | 0.896271 | 0.874018 | 0.843264 | 0.827386 | 0.811808 | 0 | 0.038155 | 0.042676 | 78,709 | 110 | 50,753 | 715.536364 | 0.623822 | 0 | 0 | 0.347368 | 0 | 0.010526 | 0.109517 | 0.000318 | 0.021053 | 0 | 0 | 0 | 0.010526 | 1 | 0.126316 | false | 0 | 0.105263 | 0 | 0.252632 | 0.105263 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
8e6f182f7798974911b5052d2ca3fbafc22a0d96 | 114,905 | py | Python | sympy/integrals/rubi/rubi_tests/tests/test_1_3.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | sympy/integrals/rubi/rubi_tests/tests/test_1_3.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | sympy/integrals/rubi/rubi_tests/tests/test_1_3.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | import sys
from sympy.external import import_module
matchpy = import_module("matchpy")
if not matchpy:
# bin/test will not execute any tests now
disabled = True
if sys.version_info[:2] < (3, 6):
disabled = True
from sympy.integrals.rubi.rubi import rubi_integrate
from sympy.functions import log, sqrt, exp, cos, sin, tan, sec, csc, cot
from sympy.functions.elementary.hyperbolic import atanh as arctanh
from sympy.functions.elementary.hyperbolic import asinh as arcsinh
from sympy.functions.elementary.hyperbolic import acosh as arccosh
from sympy.functions.elementary.trigonometric import atan as arctan
from sympy.functions.elementary.trigonometric import asin as arcsin
from sympy.functions.elementary.trigonometric import acos as arccos
from sympy.integrals.rubi.utility_function import (
EllipticE,
EllipticF,
hypergeom,
rubi_test,
)
from sympy import pi as Pi
from sympy import S, hyper, I, simplify, exp_polar, symbols
from sympy.testing.pytest import slow, skip, ON_TRAVIS
A, B, C, D, a, b, c, d, e, f, m, n, p, x, u = symbols(
"A B C D a b c d e f m n p x u", real=True, imaginary=False
)
@slow
def test_1():
if ON_TRAVIS:
skip("Too slow for travis.")
test = [
[
x ** S(2) * (a + b * x) * (a * c - b * c * x) ** S(3),
x,
S(2),
S(1) / S(3) * a ** S(4) * c ** S(3) * x ** S(3)
- S(1) / S(2) * a ** S(3) * b * c ** S(3) * x ** S(4)
+ S(1) / S(3) * a * b ** S(3) * c ** S(3) * x ** S(6)
- S(1) / S(7) * b ** S(4) * c ** S(3) * x ** S(7),
],
[
x * (a + b * x) * (a * c - b * c * x) ** S(3),
x,
S(2),
S(1) / S(2) * a ** S(4) * c ** S(3) * x ** S(2)
- S(2) / S(3) * a ** S(3) * b * c ** S(3) * x ** S(3)
+ S(2) / S(5) * a * b ** S(3) * c ** S(3) * x ** S(5)
- S(1) / S(6) * b ** S(4) * c ** S(3) * x ** S(6),
],
[
x ** S(3) * (a + b * x) * (A + B * x),
x,
S(2),
S(1) / S(4) * a * A * x ** S(4)
+ S(1) / S(5) * (A * b + a * B) * x ** S(5)
+ S(1) / S(6) * b * B * x ** S(6),
],
[
x ** S(4) * (A + B * x) / (a + b * x),
x,
S(2),
-(a ** S(3)) * (A * b - a * B) * x / b ** S(5)
+ S(1) / S(2) * a ** S(2) * (A * b - a * B) * x ** S(2) / b ** S(4)
- S(1) / S(3) * a * (A * b - a * B) * x ** S(3) / b ** S(3)
+ S(1) / S(4) * (A * b - a * B) * x ** S(4) / b ** S(2)
+ S(1) / S(5) * B * x ** S(5) / b
+ a ** S(4) * (A * b - a * B) * log(a + b * x) / b ** S(6),
],
[
x ** S(2) * (c + d * x) / (a + b * x),
x,
S(2),
-a * (b * c - a * d) * x / b ** S(3)
+ S(1) / S(2) * (b * c - a * d) * x ** S(2) / b ** S(2)
+ S(1) / S(3) * d * x ** S(3) / b
+ a ** S(2) * (b * c - a * d) * log(a + b * x) / b ** S(4),
],
[
x ** S(3) * (c + d * x) ** S(2) / (a + b * x) ** S(2),
x,
S(2),
-S(2) * a * (b * c - S(2) * a * d) * (b * c - a * d) * x / b ** S(5)
+ S(1)
/ S(2)
* (b * c - S(3) * a * d)
* (b * c - a * d)
* x ** S(2)
/ b ** S(4)
+ S(2) / S(3) * d * (b * c - a * d) * x ** S(3) / b ** S(3)
+ S(1) / S(4) * d ** S(2) * x ** S(4) / b ** S(2)
+ a ** S(3) * (b * c - a * d) ** S(2) / (b ** S(6) * (a + b * x))
+ a ** S(2)
* (S(3) * b * c - S(5) * a * d)
* (b * c - a * d)
* log(a + b * x)
/ b ** S(6),
],
[
x ** S(2) * (c + d * x) ** S(3) / (a + b * x) ** S(3),
x,
S(2),
S(3) * d * (b * c - S(2) * a * d) * (b * c - a * d) * x / b ** S(5)
+ S(3) / S(2) * d ** S(2) * (b * c - a * d) * x ** S(2) / b ** S(4)
+ S(1) / S(3) * d ** S(3) * x ** S(3) / b ** S(3)
- S(1)
/ S(2)
* a ** S(2)
* (b * c - a * d) ** S(3)
/ (b ** S(6) * (a + b * x) ** S(2))
+ a
* (S(2) * b * c - S(5) * a * d)
* (b * c - a * d) ** S(2)
/ (b ** S(6) * (a + b * x))
+ (b * c - a * d)
* (
b ** S(2) * c ** S(2)
- S(8) * a * b * c * d
+ S(10) * a ** S(2) * d ** S(2)
)
* log(a + b * x)
/ b ** S(6),
],
[
x ** (S(5) / S(2)) * (A + B * x) / (a + b * x),
x,
S(6),
-S(2) / S(3) * a * (A * b - a * B) * x ** (S(3) / S(2)) / b ** S(3)
+ S(2) / S(5) * (A * b - a * B) * x ** (S(5) / S(2)) / b ** S(2)
+ S(2) / S(7) * B * x ** (S(7) / S(2)) / b
- S(2)
* a ** (S(5) / S(2))
* (A * b - a * B)
* arctan(sqrt(b) * sqrt(x) / sqrt(a))
/ b ** (S(9) / S(2))
+ S(2) * a ** S(2) * (A * b - a * B) * sqrt(x) / b ** S(4),
],
[
x ** m * (a + b * x) ** S(3) * (A + B * x),
x,
S(2),
a ** S(3) * A * x ** (S(1) + m) / (S(1) + m)
+ a ** S(2) * (S(3) * A * b + a * B) * x ** (S(2) + m) / (S(2) + m)
+ S(3) * a * b * (A * b + a * B) * x ** (S(3) + m) / (S(3) + m)
+ b ** S(2) * (A * b + S(3) * a * B) * x ** (S(4) + m) / (S(4) + m)
+ b ** S(3) * B * x ** (S(5) + m) / (S(5) + m),
],
[
x ** m * (c + d * x) ** S(3) / (a + b * x),
x,
S(7),
d
* (
S(3) * b ** S(2) * c ** S(2)
- S(3) * a * b * c * d
+ a ** S(2) * d ** S(2)
)
* x ** (S(1) + m)
/ (b ** S(3) * (S(1) + m))
+ d ** S(2)
* (S(3) * b * c - a * d)
* x ** (S(2) + m)
/ (b ** S(2) * (S(2) + m))
+ d ** S(3) * x ** (S(3) + m) / (b * (S(3) + m))
+ (b * c - a * d) ** S(3)
* x ** (S(1) + m)
* hypergeom([S(1), S(1)], [S(1) - m], a / (a + b * x))
/ (b ** S(3) * m * (a + b * x)),
c ** S(2) * d * x ** (S(1) + m) / (b * (S(1) + m))
+ c * d * (b * c - a * d) * x ** (S(1) + m) / (b ** S(2) * (S(1) + m))
+ d * (b * c - a * d) ** S(2) * x ** (S(1) + m) / (b ** S(3) * (S(1) + m))
+ S(2) * c * d ** S(2) * x ** (S(2) + m) / (b * (S(2) + m))
+ d ** S(2) * (b * c - a * d) * x ** (S(2) + m) / (b ** S(2) * (S(2) + m))
+ d ** S(3) * x ** (S(3) + m) / (b * (S(3) + m))
+ (b * c - a * d) ** S(3)
* x ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], -b * x / a)
/ (a * b ** S(3) * (S(1) + m)),
],
[
x ** m * (c + d * x) ** S(2) / (a + b * x),
x,
S(5),
c * d * x ** (S(1) + m) / (b * (S(1) + m))
+ d * (b * c - a * d) * x ** (S(1) + m) / (b ** S(2) * (S(1) + m))
+ d ** S(2) * x ** (S(2) + m) / (b * (S(2) + m))
+ (b * c - a * d) ** S(2)
* x ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], -b * x / a)
/ (a * b ** S(2) * (S(1) + m)),
],
[
b ** S(2) * x ** m / (b + a * x ** S(2)) ** S(2),
x,
S(2),
x ** (S(1) + m)
* hypergeom(
[S(2), S(1) / S(2) * (S(1) + m)],
[S(1) / S(2) * (S(3) + m)],
-a * x ** S(2) / b,
)
/ (S(1) + m),
],
[
x ** m
/ (
(S(1) - x * sqrt(a) / sqrt(-b)) ** S(2)
* (S(1) + x * sqrt(a) / sqrt(-b)) ** S(2)
),
x,
S(2),
x ** (S(1) + m)
* hypergeom(
[S(2), S(1) / S(2) * (S(1) + m)],
[S(1) / S(2) * (S(3) + m)],
-a * x ** S(2) / b,
)
/ (S(1) + m),
],
[
x ** S(3) * (A + B * x) * sqrt(a + b * x),
x,
S(2),
-S(2)
/ S(3)
* a ** S(3)
* (A * b - a * B)
* (a + b * x) ** (S(3) / S(2))
/ b ** S(5)
+ S(2)
/ S(5)
* a ** S(2)
* (S(3) * A * b - S(4) * a * B)
* (a + b * x) ** (S(5) / S(2))
/ b ** S(5)
- S(6)
/ S(7)
* a
* (A * b - S(2) * a * B)
* (a + b * x) ** (S(7) / S(2))
/ b ** S(5)
+ S(2)
/ S(9)
* (A * b - S(4) * a * B)
* (a + b * x) ** (S(9) / S(2))
/ b ** S(5)
+ S(2) / S(11) * B * (a + b * x) ** (S(11) / S(2)) / b ** S(5),
],
[
x ** S(3) * (A + B * x) / sqrt(a + b * x),
x,
S(2),
S(2)
/ S(3)
* a ** S(2)
* (S(3) * A * b - S(4) * a * B)
* (a + b * x) ** (S(3) / S(2))
/ b ** S(5)
- S(6)
/ S(5)
* a
* (A * b - S(2) * a * B)
* (a + b * x) ** (S(5) / S(2))
/ b ** S(5)
+ S(2)
/ S(7)
* (A * b - S(4) * a * B)
* (a + b * x) ** (S(7) / S(2))
/ b ** S(5)
+ S(2) / S(9) * B * (a + b * x) ** (S(9) / S(2)) / b ** S(5)
- S(2) * a ** S(3) * (A * b - a * B) * sqrt(a + b * x) / b ** S(5),
],
[
x ** (S(5) / S(2)) * (A + B * x) * sqrt(a + b * x),
x,
S(7),
S(1) / S(5) * B * x ** (S(7) / S(2)) * (a + b * x) ** (S(3) / S(2)) / b
- S(1)
/ S(128)
* a ** S(4)
* (S(10) * A * b - S(7) * a * B)
* arctanh(sqrt(b) * sqrt(x) / sqrt(a + b * x))
/ b ** (S(9) / S(2))
- S(1)
/ S(192)
* a ** S(2)
* (S(10) * A * b - S(7) * a * B)
* x ** (S(3) / S(2))
* sqrt(a + b * x)
/ b ** S(3)
+ S(1)
/ S(240)
* a
* (S(10) * A * b - S(7) * a * B)
* x ** (S(5) / S(2))
* sqrt(a + b * x)
/ b ** S(2)
+ S(1)
/ S(40)
* (S(10) * A * b - S(7) * a * B)
* x ** (S(7) / S(2))
* sqrt(a + b * x)
/ b
+ S(1)
/ S(128)
* a ** S(3)
* (S(10) * A * b - S(7) * a * B)
* sqrt(x)
* sqrt(a + b * x)
/ b ** S(4),
],
[
x ** (S(3) / S(2)) * (A + B * x) * sqrt(a + b * x),
x,
S(6),
S(1) / S(4) * B * x ** (S(5) / S(2)) * (a + b * x) ** (S(3) / S(2)) / b
+ S(1)
/ S(64)
* a ** S(3)
* (S(8) * A * b - S(5) * a * B)
* arctanh(sqrt(b) * sqrt(x) / sqrt(a + b * x))
/ b ** (S(7) / S(2))
+ S(1)
/ S(96)
* a
* (S(8) * A * b - S(5) * a * B)
* x ** (S(3) / S(2))
* sqrt(a + b * x)
/ b ** S(2)
+ S(1)
/ S(24)
* (S(8) * A * b - S(5) * a * B)
* x ** (S(5) / S(2))
* sqrt(a + b * x)
/ b
- S(1)
/ S(64)
* a ** S(2)
* (S(8) * A * b - S(5) * a * B)
* sqrt(x)
* sqrt(a + b * x)
/ b ** S(3),
],
[
x ** (S(7) / S(2)) * (A + B * x) / sqrt(a + b * x),
x,
S(7),
S(7)
/ S(128)
* a ** S(4)
* (S(10) * A * b - S(9) * a * B)
* arctanh(sqrt(b) * sqrt(x) / sqrt(a + b * x))
/ b ** (S(11) / S(2))
+ S(7)
/ S(192)
* a ** S(2)
* (S(10) * A * b - S(9) * a * B)
* x ** (S(3) / S(2))
* sqrt(a + b * x)
/ b ** S(4)
- S(7)
/ S(240)
* a
* (S(10) * A * b - S(9) * a * B)
* x ** (S(5) / S(2))
* sqrt(a + b * x)
/ b ** S(3)
+ S(1)
/ S(40)
* (S(10) * A * b - S(9) * a * B)
* x ** (S(7) / S(2))
* sqrt(a + b * x)
/ b ** S(2)
+ S(1) / S(5) * B * x ** (S(9) / S(2)) * sqrt(a + b * x) / b
- S(7)
/ S(128)
* a ** S(3)
* (S(10) * A * b - S(9) * a * B)
* sqrt(x)
* sqrt(a + b * x)
/ b ** S(5),
],
[
x ** (S(5) / S(2)) * (A + B * x) / sqrt(a + b * x),
x,
S(6),
-S(5)
/ S(64)
* a ** S(3)
* (S(8) * A * b - S(7) * a * B)
* arctanh(sqrt(b) * sqrt(x) / sqrt(a + b * x))
/ b ** (S(9) / S(2))
- S(5)
/ S(96)
* a
* (S(8) * A * b - S(7) * a * B)
* x ** (S(3) / S(2))
* sqrt(a + b * x)
/ b ** S(3)
+ S(1)
/ S(24)
* (S(8) * A * b - S(7) * a * B)
* x ** (S(5) / S(2))
* sqrt(a + b * x)
/ b ** S(2)
+ S(1) / S(4) * B * x ** (S(7) / S(2)) * sqrt(a + b * x) / b
+ S(5)
/ S(64)
* a ** S(2)
* (S(8) * A * b - S(7) * a * B)
* sqrt(x)
* sqrt(a + b * x)
/ b ** S(4),
],
[
x ** S(3) * sqrt(a + b * x) * sqrt(c + d * x),
x,
S(6),
S(1)
/ S(5)
* x ** S(2)
* (a + b * x) ** (S(3) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
+ S(1)
/ S(240)
* (a + b * x) ** (S(3) / S(2))
* (c + d * x) ** (S(3) / S(2))
* (
S(35) * b ** S(2) * c ** S(2)
+ S(38) * a * b * c * d
+ S(35) * a ** S(2) * d ** S(2)
- S(42) * b * d * (b * c + a * d) * x
)
/ (b ** S(3) * d ** S(3))
+ S(1)
/ S(128)
* (b * c - a * d) ** S(2)
* (b * c + a * d)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ S(7) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(9) / S(2)) * d ** (S(9) / S(2)))
- S(1)
/ S(64)
* (b * c + a * d)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ S(7) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(4) * d ** S(3))
- S(1)
/ S(128)
* (
S(7) * b ** S(4) * c ** S(4)
+ S(2) * a * b ** S(3) * c ** S(3) * d
- S(2) * a ** S(3) * b * c * d ** S(3)
- S(7) * a ** S(4) * d ** S(4)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(4) * d ** S(4)),
],
[
x ** S(2) * sqrt(a + b * x) * sqrt(c + d * x),
x,
S(6),
-S(5)
/ S(24)
* (b * c + a * d)
* (a + b * x) ** (S(3) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b ** S(2) * d ** S(2))
+ S(1)
/ S(4)
* x
* (a + b * x) ** (S(3) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
+ S(1)
/ S(64)
* (b * c - a * d) ** S(2)
* (S(4) * a * b * c * d - S(5) * (b * c + a * d) ** S(2))
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(7) / S(2)))
- S(1)
/ S(32)
* (S(4) * a * b * c * d - S(5) * (b * c + a * d) ** S(2))
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(2))
- S(1)
/ S(64)
* (b * c - a * d)
* (S(4) * a * b * c * d - S(5) * (b * c + a * d) ** S(2))
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3)),
],
[
x ** S(3) * sqrt(a + b * x) / sqrt(c + d * x),
x,
S(5),
S(1)
/ S(64)
* (b * c - a * d)
* (
S(35) * b ** S(3) * c ** S(3)
+ S(15) * a * b ** S(2) * c ** S(2) * d
+ S(9) * a ** S(2) * b * c * d ** S(2)
+ S(5) * a ** S(3) * d ** S(3)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(9) / S(2)))
+ S(1)
/ S(4)
* x ** S(2)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b * d)
+ S(1)
/ S(96)
* (a + b * x) ** (S(3) / S(2))
* (
S(35) * b ** S(2) * c ** S(2)
+ S(22) * a * b * c * d
+ S(15) * a ** S(2) * d ** S(2)
- S(4) * b * d * (S(7) * b * c + S(5) * a * d) * x
)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3))
- S(1)
/ S(64)
* (
S(35) * b ** S(3) * c ** S(3)
+ S(15) * a * b ** S(2) * c ** S(2) * d
+ S(9) * a ** S(2) * b * c * d ** S(2)
+ S(5) * a ** S(3) * d ** S(3)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(4)),
],
[
x ** S(2) * sqrt(a + b * x) / sqrt(c + d * x),
x,
S(5),
-S(1)
/ S(8)
* (b * c - a * d)
* (
S(5) * b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(7) / S(2)))
- S(1)
/ S(12)
* (S(5) * b * c + S(3) * a * d)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(2))
+ S(1) / S(3) * x * (a + b * x) ** (S(3) / S(2)) * sqrt(c + d * x) / (b * d)
+ S(1)
/ S(8)
* (
S(5) * b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(3)),
],
[
x ** S(2) * (a + b * x) ** (S(3) / S(2)) * sqrt(c + d * x),
x,
S(7),
-S(1)
/ S(40)
* (S(7) * b * c + S(5) * a * d)
* (a + b * x) ** (S(5) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b ** S(2) * d ** S(2))
+ S(1)
/ S(5)
* x
* (a + b * x) ** (S(5) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
+ S(1)
/ S(128)
* (b * c - a * d) ** S(3)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(6) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(9) / S(2)))
+ S(1)
/ S(192)
* (b * c - a * d)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(6) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3))
+ S(1)
/ S(48)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(6) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(5) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(2))
- S(1)
/ S(128)
* (b * c - a * d) ** S(2)
* (
S(7) * b ** S(2) * c ** S(2)
+ S(6) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(4)),
],
[
x * (a + b * x) ** (S(3) / S(2)) * sqrt(c + d * x),
x,
S(6),
S(1)
/ S(4)
* (a + b * x) ** (S(5) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
- S(1)
/ S(64)
* (b * c - a * d) ** S(3)
* (S(5) * b * c + S(3) * a * d)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(7) / S(2)))
- S(1)
/ S(96)
* (b * c - a * d)
* (S(5) * b * c + S(3) * a * d)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(2))
- S(1)
/ S(24)
* (S(5) * b * c + S(3) * a * d)
* (a + b * x) ** (S(5) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d)
+ S(1)
/ S(64)
* (b * c - a * d) ** S(2)
* (S(5) * b * c + S(3) * a * d)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(3)),
],
[
x ** S(2) * (a + b * x) ** (S(3) / S(2)) / sqrt(c + d * x),
x,
S(6),
S(1)
/ S(64)
* (b * c - a * d) ** S(2)
* (
S(35) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(9) / S(2)))
+ S(1)
/ S(96)
* (
S(35) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(3))
- S(1)
/ S(24)
* (S(7) * b * c + S(3) * a * d)
* (a + b * x) ** (S(5) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(2))
+ S(1) / S(4) * x * (a + b * x) ** (S(5) / S(2)) * sqrt(c + d * x) / (b * d)
- S(1)
/ S(64)
* (b * c - a * d)
* (
S(35) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(4)),
],
[
x * (a + b * x) ** (S(3) / S(2)) / sqrt(c + d * x),
x,
S(5),
-S(1)
/ S(8)
* (b * c - a * d) ** S(2)
* (S(5) * b * c + a * d)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(3) / S(2)) * d ** (S(7) / S(2)))
- S(1)
/ S(12)
* (S(5) * b * c + a * d)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b * d ** S(2))
+ S(1) / S(3) * (a + b * x) ** (S(5) / S(2)) * sqrt(c + d * x) / (b * d)
+ S(1)
/ S(8)
* (b * c - a * d)
* (S(5) * b * c + a * d)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b * d ** S(3)),
],
[
x ** S(2) * (a + b * x) ** (S(5) / S(2)) * sqrt(c + d * x),
x,
S(8),
-S(1)
/ S(60)
* (S(9) * b * c + S(5) * a * d)
* (a + b * x) ** (S(7) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b ** S(2) * d ** S(2))
+ S(1)
/ S(6)
* x
* (a + b * x) ** (S(7) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
- S(1)
/ S(512)
* (b * c - a * d) ** S(4)
* (
S(21) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(11) / S(2)))
- S(1)
/ S(768)
* (b * c - a * d) ** S(2)
* (
S(21) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(4))
+ S(1)
/ S(960)
* (b * c - a * d)
* (
S(21) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(5) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3))
+ S(1)
/ S(160)
* (
S(21) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(7) / S(2))
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(2))
+ S(1)
/ S(512)
* (b * c - a * d) ** S(3)
* (
S(21) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(5)),
],
[
x * (a + b * x) ** (S(5) / S(2)) * sqrt(c + d * x),
x,
S(7),
S(1)
/ S(5)
* (a + b * x) ** (S(7) / S(2))
* (c + d * x) ** (S(3) / S(2))
/ (b * d)
+ S(1)
/ S(128)
* (b * c - a * d) ** S(4)
* (S(7) * b * c + S(3) * a * d)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(9) / S(2)))
+ S(1)
/ S(192)
* (b * c - a * d) ** S(2)
* (S(7) * b * c + S(3) * a * d)
* (a + b * x) ** (S(3) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(3))
- S(1)
/ S(240)
* (b * c - a * d)
* (S(7) * b * c + S(3) * a * d)
* (a + b * x) ** (S(5) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(2))
- S(1)
/ S(40)
* (S(7) * b * c + S(3) * a * d)
* (a + b * x) ** (S(7) / S(2))
* sqrt(c + d * x)
/ (b ** S(2) * d)
- S(1)
/ S(128)
* (b * c - a * d) ** S(3)
* (S(7) * b * c + S(3) * a * d)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(4)),
],
[
x ** S(2) * sqrt(c + d * x) / sqrt(a + b * x),
x,
S(5),
S(1)
/ S(8)
* (b * c - a * d)
* (
b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(5) / S(2)))
- S(1)
/ S(12)
* (S(3) * b * c + S(5) * a * d)
* (c + d * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ (b ** S(2) * d ** S(2))
+ S(1) / S(3) * x * (c + d * x) ** (S(3) / S(2)) * sqrt(a + b * x) / (b * d)
+ S(1)
/ S(8)
* (
b ** S(2) * c ** S(2)
+ S(2) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(2)),
],
[
x * sqrt(c + d * x) / sqrt(a + b * x),
x,
S(4),
-S(1)
/ S(4)
* (b * c - a * d)
* (b * c + S(3) * a * d)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(3) / S(2)))
+ S(1) / S(2) * (c + d * x) ** (S(3) / S(2)) * sqrt(a + b * x) / (b * d)
- S(1)
/ S(4)
* (b * c + S(3) * a * d)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d),
],
[
x ** S(3) / (sqrt(a + b * x) * sqrt(c + d * x)),
x,
S(4),
-S(1)
/ S(8)
* (b * c + a * d)
* (
S(5) * b ** S(2) * c ** S(2)
- S(2) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(7) / S(2)))
+ S(1) / S(3) * x ** S(2) * sqrt(a + b * x) * sqrt(c + d * x) / (b * d)
+ S(1)
/ S(24)
* (
S(15) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
+ S(15) * a ** S(2) * d ** S(2)
- S(10) * b * d * (b * c + a * d) * x
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3)),
],
[
x ** S(2) / (sqrt(a + b * x) * sqrt(c + d * x)),
x,
S(4),
-S(1)
/ S(4)
* (S(4) * a * b * c * d - S(3) * (b * c + a * d) ** S(2))
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(5) / S(2)))
- S(3)
/ S(4)
* (b * c + a * d)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(2) * d ** S(2))
+ S(1) / S(2) * x * sqrt(a + b * x) * sqrt(c + d * x) / (b * d),
],
[
x ** S(4) / ((a + b * x) ** (S(3) / S(2)) * (c + d * x) ** (S(3) / S(2))),
x,
S(5),
S(3)
/ S(4)
* (
S(5) * b ** S(2) * c ** S(2)
+ S(6) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(7) / S(2)) * d ** (S(7) / S(2)))
+ S(2)
* a
* x ** S(3)
/ (b * (b * c - a * d) * sqrt(a + b * x) * sqrt(c + d * x))
- S(2)
* c
* (b * c + a * d)
* x ** S(2)
* sqrt(a + b * x)
/ (b * d * (b * c - a * d) ** S(2) * sqrt(c + d * x))
- S(1)
/ S(4)
* (
(b * c + a * d)
* (
S(15) * b ** S(2) * c ** S(2)
- S(22) * a * b * c * d
+ S(15) * a ** S(2) * d ** S(2)
)
- S(2)
* b
* d
* (
S(5) * b ** S(2) * c ** S(2)
- S(2) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* x
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(3) * d ** S(3) * (b * c - a * d) ** S(2)),
],
[
x ** S(3) / ((a + b * x) ** (S(3) / S(2)) * (c + d * x) ** (S(3) / S(2))),
x,
S(4),
-S(3)
* (b * c + a * d)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * d ** (S(5) / S(2)))
+ S(2)
* a
* x ** S(2)
/ (b * (b * c - a * d) * sqrt(a + b * x) * sqrt(c + d * x))
+ (
c
* (
S(3) * b ** S(2) * c ** S(2)
- S(2) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
+ d * (b * c - S(3) * a * d) * (b * c - a * d) * x
)
* sqrt(a + b * x)
/ (b ** S(2) * d ** S(2) * (b * c - a * d) ** S(2) * sqrt(c + d * x)),
],
[
x ** S(3) * (a + b * x) ** (S(1) / S(4)) / (c + d * x) ** (S(1) / S(4)),
x,
S(7),
-S(1)
/ S(512)
* (
S(195) * b ** S(3) * c ** S(3)
+ S(135) * a * b ** S(2) * c ** S(2) * d
+ S(105) * a ** S(2) * b * c * d ** S(2)
+ S(77) * a ** S(3) * d ** S(3)
)
* (a + b * x) ** (S(1) / S(4))
* (c + d * x) ** (S(3) / S(4))
/ (b ** S(3) * d ** S(4))
+ S(1)
/ S(4)
* x ** S(2)
* (a + b * x) ** (S(5) / S(4))
* (c + d * x) ** (S(3) / S(4))
/ (b * d)
+ S(1)
/ S(384)
* (a + b * x) ** (S(5) / S(4))
* (c + d * x) ** (S(3) / S(4))
* (
S(117) * b ** S(2) * c ** S(2)
+ S(94) * a * b * c * d
+ S(77) * a ** S(2) * d ** S(2)
- S(8) * b * d * (S(13) * b * c + S(11) * a * d) * x
)
/ (b ** S(3) * d ** S(3))
+ S(1)
/ S(1024)
* (b * c - a * d)
* (
S(195) * b ** S(3) * c ** S(3)
+ S(135) * a * b ** S(2) * c ** S(2) * d
+ S(105) * a ** S(2) * b * c * d ** S(2)
+ S(77) * a ** S(3) * d ** S(3)
)
* arctan(
d ** (S(1) / S(4))
* (a + b * x) ** (S(1) / S(4))
/ (b ** (S(1) / S(4)) * (c + d * x) ** (S(1) / S(4)))
)
/ (b ** (S(15) / S(4)) * d ** (S(17) / S(4)))
+ S(1)
/ S(1024)
* (b * c - a * d)
* (
S(195) * b ** S(3) * c ** S(3)
+ S(135) * a * b ** S(2) * c ** S(2) * d
+ S(105) * a ** S(2) * b * c * d ** S(2)
+ S(77) * a ** S(3) * d ** S(3)
)
* arctanh(
d ** (S(1) / S(4))
* (a + b * x) ** (S(1) / S(4))
/ (b ** (S(1) / S(4)) * (c + d * x) ** (S(1) / S(4)))
)
/ (b ** (S(15) / S(4)) * d ** (S(17) / S(4))),
],
[
x ** S(2) * (a + b * x) ** (S(1) / S(4)) / (c + d * x) ** (S(1) / S(4)),
x,
S(7),
S(1)
/ S(32)
* (
S(15) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(7) * a ** S(2) * d ** S(2)
)
* (a + b * x) ** (S(1) / S(4))
* (c + d * x) ** (S(3) / S(4))
/ (b ** S(2) * d ** S(3))
- S(1)
/ S(24)
* (S(9) * b * c + S(7) * a * d)
* (a + b * x) ** (S(5) / S(4))
* (c + d * x) ** (S(3) / S(4))
/ (b ** S(2) * d ** S(2))
+ S(1)
/ S(3)
* x
* (a + b * x) ** (S(5) / S(4))
* (c + d * x) ** (S(3) / S(4))
/ (b * d)
- S(1)
/ S(64)
* (b * c - a * d)
* (
S(15) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(7) * a ** S(2) * d ** S(2)
)
* arctan(
d ** (S(1) / S(4))
* (a + b * x) ** (S(1) / S(4))
/ (b ** (S(1) / S(4)) * (c + d * x) ** (S(1) / S(4)))
)
/ (b ** (S(11) / S(4)) * d ** (S(13) / S(4)))
- S(1)
/ S(64)
* (b * c - a * d)
* (
S(15) * b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
+ S(7) * a ** S(2) * d ** S(2)
)
* arctanh(
d ** (S(1) / S(4))
* (a + b * x) ** (S(1) / S(4))
/ (b ** (S(1) / S(4)) * (c + d * x) ** (S(1) / S(4)))
)
/ (b ** (S(11) / S(4)) * d ** (S(13) / S(4))),
],
[
x * (a + b * x) ** n * (c + d * x),
x,
S(2),
-a * (b * c - a * d) * (a + b * x) ** (S(1) + n) / (b ** S(3) * (S(1) + n))
+ (b * c - S(2) * a * d)
* (a + b * x) ** (S(2) + n)
/ (b ** S(3) * (S(2) + n))
+ d * (a + b * x) ** (S(3) + n) / (b ** S(3) * (S(3) + n)),
],
[
x ** S(2) * (a + b * x) ** n / (c + d * x),
x,
S(3),
-(b * c + a * d)
* (a + b * x) ** (S(1) + n)
/ (b ** S(2) * d ** S(2) * (S(1) + n))
+ (a + b * x) ** (S(2) + n) / (b ** S(2) * d * (S(2) + n))
+ c ** S(2)
* (a + b * x) ** (S(1) + n)
* hypergeom(
[S(1), S(1) + n], [S(2) + n], -d * (a + b * x) / (b * c - a * d)
)
/ (d ** S(2) * (b * c - a * d) * (S(1) + n)),
],
[
x * (a + b * x) ** n / (c + d * x),
x,
S(2),
(a + b * x) ** (S(1) + n) / (b * d * (S(1) + n))
- c
* (a + b * x) ** (S(1) + n)
* hypergeom(
[S(1), S(1) + n], [S(2) + n], -d * (a + b * x) / (b * c - a * d)
)
/ (d * (b * c - a * d) * (S(1) + n)),
],
[
x ** m * (S(3) - S(2) * a * x) ** (S(2) + n) * (S(6) + S(4) * a * x) ** n,
x,
S(8),
S(2) ** n
* S(9) ** (S(1) + n)
* x ** (S(1) + m)
* hypergeom(
[S(1) / S(2) * (S(1) + m), -n],
[S(1) / S(2) * (S(3) + m)],
S(4) / S(9) * a ** S(2) * x ** S(2),
)
/ (S(1) + m)
- S(2) ** (S(2) + n)
* S(3) ** (S(1) + S(2) * n)
* a
* x ** (S(2) + m)
* hypergeom(
[S(1) / S(2) * (S(2) + m), -n],
[S(1) / S(2) * (S(4) + m)],
S(4) / S(9) * a ** S(2) * x ** S(2),
)
/ (S(2) + m)
+ S(2) ** (S(2) + n)
* S(9) ** n
* a ** S(2)
* x ** (S(3) + m)
* hypergeom(
[S(1) / S(2) * (S(3) + m), -n],
[S(1) / S(2) * (S(5) + m)],
S(4) / S(9) * a ** S(2) * x ** S(2),
)
/ (S(3) + m),
],
[
x ** m * (S(3) - S(2) * a * x) ** (S(1) + n) * (S(6) + S(4) * a * x) ** n,
x,
S(5),
S(2) ** n
* S(3) ** (S(1) + S(2) * n)
* x ** (S(1) + m)
* hypergeom(
[S(1) / S(2) * (S(1) + m), -n],
[S(1) / S(2) * (S(3) + m)],
S(4) / S(9) * a ** S(2) * x ** S(2),
)
/ (S(1) + m)
- S(2) ** (S(1) + n)
* S(9) ** n
* a
* x ** (S(2) + m)
* hypergeom(
[S(1) / S(2) * (S(2) + m), -n],
[S(1) / S(2) * (S(4) + m)],
S(4) / S(9) * a ** S(2) * x ** S(2),
)
/ (S(2) + m),
],
[
(a + b * x) * (A + B * x) * (d + e * x) ** m,
x,
S(2),
(b * d - a * e)
* (B * d - A * e)
* (d + e * x) ** (S(1) + m)
/ (e ** S(3) * (S(1) + m))
- (S(2) * b * B * d - A * b * e - a * B * e)
* (d + e * x) ** (S(2) + m)
/ (e ** S(3) * (S(2) + m))
+ b * B * (d + e * x) ** (S(3) + m) / (e ** S(3) * (S(3) + m)),
],
[
(A + B * x) * (d + e * x) ** S(5) / (a + b * x),
x,
S(2),
(A * b - a * B) * e * (b * d - a * e) ** S(4) * x / b ** S(6)
+ S(1)
/ S(2)
* (A * b - a * B)
* (b * d - a * e) ** S(3)
* (d + e * x) ** S(2)
/ b ** S(5)
+ S(1)
/ S(3)
* (A * b - a * B)
* (b * d - a * e) ** S(2)
* (d + e * x) ** S(3)
/ b ** S(4)
+ S(1)
/ S(4)
* (A * b - a * B)
* (b * d - a * e)
* (d + e * x) ** S(4)
/ b ** S(3)
+ S(1) / S(5) * (A * b - a * B) * (d + e * x) ** S(5) / b ** S(2)
+ S(1) / S(6) * B * (d + e * x) ** S(6) / (b * e)
+ (A * b - a * B) * (b * d - a * e) ** S(5) * log(a + b * x) / b ** S(7),
],
[
(S(1) - S(2) * x) * (S(2) + S(3) * x) ** m * (S(3) + S(5) * x),
x,
S(2),
-S(7) / S(27) * (S(2) + S(3) * x) ** (S(1) + m) / (S(1) + m)
+ S(37) / S(27) * (S(2) + S(3) * x) ** (S(2) + m) / (S(2) + m)
- S(10) / S(27) * (S(2) + S(3) * x) ** (S(3) + m) / (S(3) + m),
],
[
(S(1) - S(2) * x) * (S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x),
x,
S(2),
-S(7) / S(243) * (S(2) + S(3) * x) ** S(9)
+ S(37) / S(270) * (S(2) + S(3) * x) ** S(10)
- S(10) / S(297) * (S(2) + S(3) * x) ** S(11),
],
[
(S(1) - S(2) * x) * (S(2) + S(3) * x) ** m / (S(3) + S(5) * x),
x,
S(2),
-S(2) / S(15) * (S(2) + S(3) * x) ** (S(1) + m) / (S(1) + m)
- S(11)
/ S(5)
* (S(2) + S(3) * x) ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], S(5) * (S(2) + S(3) * x))
/ (S(1) + m),
],
[
(S(1) - S(2) * x) * (S(2) + S(3) * x) ** S(6) / (S(3) + S(5) * x),
x,
S(2),
S(1666663) / S(78125) * x
+ S(1777779) / S(31250) * x ** S(2)
+ S(152469) / S(3125) * x ** S(3)
- S(152469) / S(2500) * x ** S(4)
- S(106677) / S(625) * x ** S(5)
- S(7047) / S(50) * x ** S(6)
- S(1458) / S(35) * x ** S(7)
+ S(11) / S(390625) * log(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** S(2) * (S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x),
x,
S(2),
-S(49) / S(729) * (S(2) + S(3) * x) ** S(9)
+ S(91) / S(270) * (S(2) + S(3) * x) ** S(10)
- S(16) / S(99) * (S(2) + S(3) * x) ** S(11)
+ S(5) / S(243) * (S(2) + S(3) * x) ** S(12),
],
[
(S(1) - S(2) * x) ** S(2) * (S(2) + S(3) * x) ** S(7) * (S(3) + S(5) * x),
x,
S(2),
-S(49) / S(648) * (S(2) + S(3) * x) ** S(8)
+ S(91) / S(243) * (S(2) + S(3) * x) ** S(9)
- S(8) / S(45) * (S(2) + S(3) * x) ** S(10)
+ S(20) / S(891) * (S(2) + S(3) * x) ** S(11),
],
[
(S(1) - S(2) * x) ** S(2) * (S(2) + S(3) * x) ** S(7) / (S(3) + S(5) * x),
x,
S(2),
S(83333293) / S(1953125) * x
+ S(80555569) / S(781250) * x ** S(2)
+ S(1327159) / S(78125) * x ** S(3)
- S(20577159) / S(62500) * x ** S(4)
- S(7315947) / S(15625) * x ** S(5)
+ S(130383) / S(1250) * x ** S(6)
+ S(672867) / S(875) * x ** S(7)
+ S(16767) / S(25) * x ** S(8)
+ S(972) / S(5) * x ** S(9)
+ S(121) / S(9765625) * log(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** S(2) * (S(2) + S(3) * x) ** S(6) / (S(3) + S(5) * x),
x,
S(2),
S(8333293) / S(390625) * x
+ S(5555569) / S(156250) * x ** S(2)
- S(422841) / S(15625) * x ** S(3)
- S(1677159) / S(12500) * x ** S(4)
- S(228447) / S(3125) * x ** S(5)
+ S(35883) / S(250) * x ** S(6)
+ S(34992) / S(175) * x ** S(7)
+ S(729) / S(10) * x ** S(8)
+ S(121) / S(1953125) * log(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** S(3) * (S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x),
x,
S(2),
-S(343) / S(2187) * (S(2) + S(3) * x) ** S(9)
+ S(2009) / S(2430) * (S(2) + S(3) * x) ** S(10)
- S(518) / S(891) * (S(2) + S(3) * x) ** S(11)
+ S(107) / S(729) * (S(2) + S(3) * x) ** S(12)
- S(40) / S(3159) * (S(2) + S(3) * x) ** S(13),
],
[
(S(1) - S(2) * x) ** S(3) * (S(2) + S(3) * x) ** S(7) * (S(3) + S(5) * x),
x,
S(2),
S(384) * x
+ S(1184) * x ** S(2)
+ S(480) * x ** S(3)
- S(5148) * x ** S(4)
- S(48968) / S(5) * x ** S(5)
+ S(3514) * x ** S(6)
+ S(29106) * x ** S(7)
+ S(208035) / S(8) * x ** S(8)
- S(15507) * x ** S(9)
- S(217971) / S(5) * x ** S(10)
- S(329508) / S(11) * x ** S(11)
- S(7290) * x ** S(12),
],
[
(S(1) - S(2) * x) ** S(3) * (S(2) + S(3) * x) ** S(6) / (S(3) + S(5) * x),
x,
S(2),
S(41666223) / S(1953125) * x
+ S(11111259) / S(781250) * x ** S(2)
- S(17453753) / S(234375) * x ** S(3)
- S(5848749) / S(62500) * x ** S(4)
+ S(2212083) / S(15625) * x ** S(5)
+ S(331713) / S(1250) * x ** S(6)
- S(40338) / S(875) * x ** S(7)
- S(13851) / S(50) * x ** S(8)
- S(648) / S(5) * x ** S(9)
+ S(1331) / S(9765625) * log(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** S(3) * (S(2) + S(3) * x) ** S(5) / (S(3) + S(5) * x),
x,
S(2),
S(4166223) / S(390625) * x
- S(138741) / S(156250) * x ** S(2)
- S(1703753) / S(46875) * x ** S(3)
- S(73749) / S(12500) * x ** S(4)
+ S(243333) / S(3125) * x ** S(5)
+ S(4419) / S(125) * x ** S(6)
- S(11988) / S(175) * x ** S(7)
- S(243) / S(5) * x ** S(8)
+ S(1331) / S(1953125) * log(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** m * (S(3) + S(5) * x) / (S(1) - S(2) * x),
x,
S(2),
-S(5) / S(6) * (S(2) + S(3) * x) ** (S(1) + m) / (S(1) + m)
+ S(11)
/ S(14)
* (S(2) + S(3) * x) ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], S(2) / S(7) * (S(2) + S(3) * x))
/ (S(1) + m),
],
[
(S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x) / (S(1) - S(2) * x),
x,
S(2),
-S(63019595) / S(512) * x
- S(60332619) / S(512) * x ** S(2)
- S(17391129) / S(128) * x ** S(3)
- S(37722699) / S(256) * x ** S(4)
- S(21272139) / S(160) * x ** S(5)
- S(2929689) / S(32) * x ** S(6)
- S(353565) / S(8) * x ** S(7)
- S(422091) / S(32) * x ** S(8)
- S(3645) / S(2) * x ** S(9)
- S(63412811) / S(1024) * log(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** m / ((S(1) - S(2) * x) * (S(3) + S(5) * x)),
x,
S(3),
S(2)
/ S(77)
* (S(2) + S(3) * x) ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], S(2) / S(7) * (S(2) + S(3) * x))
/ (S(1) + m)
- S(5)
/ S(11)
* (S(2) + S(3) * x) ** (S(1) + m)
* hypergeom([S(1), S(1) + m], [S(2) + m], S(5) * (S(2) + S(3) * x))
/ (S(1) + m),
],
[
(S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x) / (S(1) - S(2) * x) ** S(2),
x,
S(2),
S(63412811) / S(1024) / (S(1) - S(2) * x)
+ S(91609881) / S(256) * x
+ S(122887143) / S(512) * x ** S(2)
+ S(5892813) / S(32) * x ** S(3)
+ S(32991057) / S(256) * x ** S(4)
+ S(5859459) / S(80) * x ** S(5)
+ S(976617) / S(32) * x ** S(6)
+ S(56862) / S(7) * x ** S(7)
+ S(32805) / S(32) * x ** S(8)
+ S(246239357) / S(1024) * log(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(7) * (S(3) + S(5) * x) / (S(1) - S(2) * x) ** S(2),
x,
S(2),
S(9058973) / S(512) / (S(1) - S(2) * x)
+ S(22333965) / S(256) * x
+ S(873207) / S(16) * x ** S(2)
+ S(2399985) / S(64) * x ** S(3)
+ S(1423899) / S(64) * x ** S(4)
+ S(793881) / S(80) * x ** S(5)
+ S(11421) / S(4) * x ** S(6)
+ S(10935) / S(28) * x ** S(7)
+ S(15647317) / S(256) * log(S(1) - S(2) * x),
],
[
(a + b * x) ** m / (e + f * x) ** S(2),
x,
S(1),
b
* (a + b * x) ** (S(1) + m)
* hypergeom(
[S(2), S(1) + m], [S(2) + m], -f * (a + b * x) / (b * e - a * f)
)
/ ((b * e - a * f) ** S(2) * (S(1) + m)),
],
[
(a + b * x) ** m / ((c + d * x) * (e + f * x) ** S(2)),
x,
S(4),
-f
* (a + b * x) ** (S(1) + m)
/ ((b * e - a * f) * (d * e - c * f) * (e + f * x))
+ d ** S(2)
* (a + b * x) ** (S(1) + m)
* hypergeom(
[S(1), S(1) + m], [S(2) + m], -d * (a + b * x) / (b * c - a * d)
)
/ ((b * c - a * d) * (d * e - c * f) ** S(2) * (S(1) + m))
+ f
* (a * d * f - b * (d * e * (S(1) - m) + c * f * m))
* (a + b * x) ** (S(1) + m)
* hypergeom(
[S(1), S(1) + m], [S(2) + m], -f * (a + b * x) / (b * e - a * f)
)
/ ((b * e - a * f) ** S(2) * (d * e - c * f) ** S(2) * (S(1) + m)),
],
[
(S(2) + S(3) * x) ** S(7) * (S(3) + S(5) * x) / (S(1) - S(2) * x) ** S(3),
x,
S(2),
S(9058973) / S(1024) / (S(1) - S(2) * x) ** S(2)
+ (-S(15647317) / S(256)) / (S(1) - S(2) * x)
- S(24960933) / S(256) * x
- S(10989621) / S(256) * x ** S(2)
- S(631611) / S(32) * x ** S(3)
- S(235467) / S(32) * x ** S(4)
- S(147987) / S(80) * x ** S(5)
- S(3645) / S(16) * x ** S(6)
- S(23647449) / S(256) * log(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(8) / ((S(1) - S(2) * x) ** S(3) * (S(3) + S(5) * x)),
x,
S(2),
S(5764801) / S(5632) / (S(1) - S(2) * x) ** S(2)
+ (-S(188591347) / S(30976)) / (S(1) - S(2) * x)
- S(2941619571) / S(400000) * x
- S(110180817) / S(40000) * x ** S(2)
- S(124416) / S(125) * x ** S(3)
- S(408969) / S(1600) * x ** S(4)
- S(6561) / S(200) * x ** S(5)
- S(2644396573) / S(340736) * log(S(1) - S(2) * x)
+ S(1) / S(20796875) * log(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(7) / ((S(1) - S(2) * x) ** S(3) * (S(3) + S(5) * x)),
x,
S(2),
S(823543) / S(2816) / (S(1) - S(2) * x) ** S(2)
+ (-S(5764801) / S(3872)) / (S(1) - S(2) * x)
- S(26161299) / S(20000) * x
- S(792423) / S(2000) * x ** S(2)
- S(40581) / S(400) * x ** S(3)
- S(2187) / S(160) * x ** S(4)
- S(269063263) / S(170368) * log(S(1) - S(2) * x)
+ S(1) / S(4159375) * log(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(6) * (S(3) + S(5) * x) * sqrt(S(1) - S(2) * x),
x,
S(2),
-S(1294139) / S(384) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(3916031) / S(640) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(725445) / S(128) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(406455) / S(128) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(1580985) / S(1408) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(409941) / S(1664) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(19683) / S(640) * (S(1) - S(2) * x) ** (S(15) / S(2))
+ S(3645) / S(2176) * (S(1) - S(2) * x) ** (S(17) / S(2)),
],
[
(S(2) + S(3) * x) ** S(5) * (S(3) + S(5) * x) * sqrt(S(1) - S(2) * x),
x,
S(2),
-S(184877) / S(192) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(12005) / S(8) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(74235) / S(64) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(4165) / S(8) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(97335) / S(704) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(81) / S(4) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(81) / S(64) * (S(1) - S(2) * x) ** (S(15) / S(2)),
],
[
(S(2) + S(3) * x) ** S(4) * sqrt(S(1) - S(2) * x) / (S(3) + S(5) * x),
x,
S(5),
-S(45473) / S(5000) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(34371) / S(5000) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(2889) / S(1400) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(9) / S(40) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(2)
/ S(3125)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(2) / S(3125) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(3) * sqrt(S(1) - S(2) * x) / (S(3) + S(5) * x),
x,
S(5),
-S(1299) / S(500) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(162) / S(125) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(27) / S(140) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(2)
/ S(625)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(2) / S(625) * sqrt(S(1) - S(2) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(6)
* (S(3) + S(5) * x),
x,
S(2),
-S(1294139) / S(640) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(559433) / S(128) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(564235) / S(128) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(3658095) / S(1408) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(1580985) / S(1664) * (S(1) - S(2) * x) ** (S(13) / S(2))
+ S(136647) / S(640) * (S(1) - S(2) * x) ** (S(15) / S(2))
- S(59049) / S(2176) * (S(1) - S(2) * x) ** (S(17) / S(2))
+ S(3645) / S(2432) * (S(1) - S(2) * x) ** (S(19) / S(2)),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(5)
* (S(3) + S(5) * x),
x,
S(2),
-S(184877) / S(320) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(8575) / S(8) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(173215) / S(192) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(37485) / S(88) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(97335) / S(832) * (S(1) - S(2) * x) ** (S(13) / S(2))
+ S(351) / S(20) * (S(1) - S(2) * x) ** (S(15) / S(2))
- S(1215) / S(1088) * (S(1) - S(2) * x) ** (S(17) / S(2)),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(6)
/ (S(3) + S(5) * x),
x,
S(6),
S(2) / S(234375) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(167115051) / S(2500000) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(70752609) / S(700000) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(665817) / S(10000) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(507627) / S(22000) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(43011) / S(10400) * (S(1) - S(2) * x) ** (S(13) / S(2))
+ S(243) / S(800) * (S(1) - S(2) * x) ** (S(15) / S(2))
- S(22)
/ S(390625)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(22) / S(390625) * sqrt(S(1) - S(2) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(5)
/ (S(3) + S(5) * x),
x,
S(6),
S(2) / S(46875) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(4774713) / S(250000) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(806121) / S(35000) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(5673) / S(500) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(5751) / S(2200) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(243) / S(1040) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(22)
/ S(78125)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(22) / S(78125) * sqrt(S(1) - S(2) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(6)
* (S(3) + S(5) * x),
x,
S(2),
-S(184877) / S(128) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(3916031) / S(1152) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(5078115) / S(1408) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(3658095) / S(1664) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(105399) / S(128) * (S(1) - S(2) * x) ** (S(15) / S(2))
+ S(409941) / S(2176) * (S(1) - S(2) * x) ** (S(17) / S(2))
- S(59049) / S(2432) * (S(1) - S(2) * x) ** (S(19) / S(2))
+ S(1215) / S(896) * (S(1) - S(2) * x) ** (S(21) / S(2)),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(5)
* (S(3) + S(5) * x),
x,
S(2),
-S(26411) / S(64) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(60025) / S(72) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(519645) / S(704) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(37485) / S(104) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(6489) / S(64) * (S(1) - S(2) * x) ** (S(15) / S(2))
+ S(1053) / S(68) * (S(1) - S(2) * x) ** (S(17) / S(2))
- S(1215) / S(1216) * (S(1) - S(2) * x) ** (S(19) / S(2)),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(4)
/ (S(3) + S(5) * x),
x,
S(7),
S(22) / S(46875) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(2) / S(15625) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(136419) / S(35000) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(3819) / S(1000) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(2889) / S(2200) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(81) / S(520) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(242)
/ S(78125)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(242) / S(78125) * sqrt(S(1) - S(2) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(3)
/ (S(3) + S(5) * x),
x,
S(7),
S(22) / S(9375) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(2) / S(3125) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(3897) / S(3500) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(18) / S(25) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(27) / S(220) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(242)
/ S(15625)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
* sqrt(S(11) / S(5))
+ S(242) / S(15625) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(5) * (S(3) + S(5) * x) / sqrt(S(1) - S(2) * x),
x,
S(2),
S(60025) / S(24) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(103929) / S(64) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(5355) / S(8) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(10815) / S(64) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(1053) / S(44) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(1215) / S(832) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(184877) / S(64) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(4) * (S(3) + S(5) * x) / sqrt(S(1) - S(2) * x),
x,
S(2),
S(57281) / S(96) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(24843) / S(80) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(1539) / S(16) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(519) / S(32) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ S(405) / S(352) * (S(1) - S(2) * x) ** (S(11) / S(2))
- S(26411) / S(32) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(5) / ((S(3) + S(5) * x) * sqrt(S(1) - S(2) * x)),
x,
S(4),
S(268707) / S(5000) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(51057) / S(2500) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(5751) / S(1400) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(27) / S(80) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(2)
/ S(3125)
* arctanh(sqrt(S(5) / S(11)) * sqrt(S(1) - S(2) * x))
/ sqrt(S(55))
- S(4774713) / S(50000) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(7)
* (S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2)),
x,
S(2),
-S(7882483) / S(128) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(4084101) / S(128) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(787185) / S(64) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(422919) / S(128) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(821583) / S(1408) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(101331) / S(1664) * (S(1) - S(2) * x) ** (S(13) / S(2))
- S(729) / S(256) * (S(1) - S(2) * x) ** (S(15) / S(2))
+ S(9058973) / S(256) / sqrt(S(1) - S(2) * x)
+ S(15647317) / S(128) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(6)
* (S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2)),
x,
S(2),
-S(1692705) / S(128) * (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(731619) / S(128) * (S(1) - S(2) * x) ** (S(5) / S(2))
- S(225855) / S(128) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ S(45549) / S(128) * (S(1) - S(2) * x) ** (S(9) / S(2))
- S(59049) / S(1408) * (S(1) - S(2) * x) ** (S(11) / S(2))
+ S(3645) / S(1664) * (S(1) - S(2) * x) ** (S(13) / S(2))
+ S(1294139) / S(128) / sqrt(S(1) - S(2) * x)
+ S(3916031) / S(128) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(5)
* (S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(5) / S(2)),
x,
S(2),
S(184877) / S(192) / (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(12495) / S(8) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(19467) / S(64) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(1053) / S(28) * (S(1) - S(2) * x) ** (S(7) / S(2))
- S(135) / S(64) * (S(1) - S(2) * x) ** (S(9) / S(2))
+ (-S(60025) / S(8)) / sqrt(S(1) - S(2) * x)
- S(519645) / S(64) * sqrt(S(1) - S(2) * x),
],
[
(S(2) + S(3) * x) ** S(4)
* (S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(5) / S(2)),
x,
S(2),
S(26411) / S(96) / (S(1) - S(2) * x) ** (S(3) / S(2))
+ S(3591) / S(16) * (S(1) - S(2) * x) ** (S(3) / S(2))
- S(4671) / S(160) * (S(1) - S(2) * x) ** (S(5) / S(2))
+ S(405) / S(224) * (S(1) - S(2) * x) ** (S(7) / S(2))
+ (-S(57281) / S(32)) / sqrt(S(1) - S(2) * x)
- S(24843) / S(16) * sqrt(S(1) - S(2) * x),
],
[
(A + B * x) * (d + e * x) ** (S(5) / S(2)) * sqrt(a + b * x),
x,
S(7),
-S(1)
/ S(48)
* (b * d - a * e)
* (S(3) * b * B * d - S(10) * A * b * e + S(7) * a * B * e)
* (a + b * x) ** (S(3) / S(2))
* (d + e * x) ** (S(3) / S(2))
/ (b ** S(3) * e)
- S(1)
/ S(40)
* (S(3) * b * B * d - S(10) * A * b * e + S(7) * a * B * e)
* (a + b * x) ** (S(3) / S(2))
* (d + e * x) ** (S(5) / S(2))
/ (b ** S(2) * e)
+ S(1)
/ S(5)
* B
* (a + b * x) ** (S(3) / S(2))
* (d + e * x) ** (S(7) / S(2))
/ (b * e)
+ S(1)
/ S(128)
* (b * d - a * e) ** S(4)
* (S(3) * b * B * d - S(10) * A * b * e + S(7) * a * B * e)
* arctanh(sqrt(e) * sqrt(a + b * x) / (sqrt(b) * sqrt(d + e * x)))
/ (b ** (S(9) / S(2)) * e ** (S(5) / S(2)))
- S(1)
/ S(64)
* (b * d - a * e) ** S(2)
* (S(3) * b * B * d - S(10) * A * b * e + S(7) * a * B * e)
* (a + b * x) ** (S(3) / S(2))
* sqrt(d + e * x)
/ (b ** S(4) * e)
- S(1)
/ S(128)
* (b * d - a * e) ** S(3)
* (S(3) * b * B * d - S(10) * A * b * e + S(7) * a * B * e)
* sqrt(a + b * x)
* sqrt(d + e * x)
/ (b ** S(4) * e ** S(2)),
],
[
(A + B * x) * (d + e * x) ** (S(3) / S(2)) * sqrt(a + b * x),
x,
S(6),
-S(1)
/ S(24)
* (S(3) * b * B * d - S(8) * A * b * e + S(5) * a * B * e)
* (a + b * x) ** (S(3) / S(2))
* (d + e * x) ** (S(3) / S(2))
/ (b ** S(2) * e)
+ S(1)
/ S(4)
* B
* (a + b * x) ** (S(3) / S(2))
* (d + e * x) ** (S(5) / S(2))
/ (b * e)
+ S(1)
/ S(64)
* (b * d - a * e) ** S(3)
* (S(3) * b * B * d - S(8) * A * b * e + S(5) * a * B * e)
* arctanh(sqrt(e) * sqrt(a + b * x) / (sqrt(b) * sqrt(d + e * x)))
/ (b ** (S(7) / S(2)) * e ** (S(5) / S(2)))
- S(1)
/ S(32)
* (b * d - a * e)
* (S(3) * b * B * d - S(8) * A * b * e + S(5) * a * B * e)
* (a + b * x) ** (S(3) / S(2))
* sqrt(d + e * x)
/ (b ** S(3) * e)
- S(1)
/ S(64)
* (b * d - a * e) ** S(2)
* (S(3) * b * B * d - S(8) * A * b * e + S(5) * a * B * e)
* sqrt(a + b * x)
* sqrt(d + e * x)
/ (b ** S(3) * e ** S(2)),
],
[
(A + B * x) * (d + e * x) ** (S(5) / S(2)) / sqrt(a + b * x),
x,
S(6),
-S(5)
/ S(64)
* (b * d - a * e) ** S(3)
* (b * B * d - S(8) * A * b * e + S(7) * a * B * e)
* arctanh(sqrt(e) * sqrt(a + b * x) / (sqrt(b) * sqrt(d + e * x)))
/ (b ** (S(9) / S(2)) * e ** (S(3) / S(2)))
- S(5)
/ S(96)
* (b * d - a * e)
* (b * B * d - S(8) * A * b * e + S(7) * a * B * e)
* (d + e * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ (b ** S(3) * e)
- S(1)
/ S(24)
* (b * B * d - S(8) * A * b * e + S(7) * a * B * e)
* (d + e * x) ** (S(5) / S(2))
* sqrt(a + b * x)
/ (b ** S(2) * e)
+ S(1) / S(4) * B * (d + e * x) ** (S(7) / S(2)) * sqrt(a + b * x) / (b * e)
- S(5)
/ S(64)
* (b * d - a * e) ** S(2)
* (b * B * d - S(8) * A * b * e + S(7) * a * B * e)
* sqrt(a + b * x)
* sqrt(d + e * x)
/ (b ** S(4) * e),
],
[
(A + B * x) * (d + e * x) ** (S(3) / S(2)) / sqrt(a + b * x),
x,
S(5),
-S(1)
/ S(8)
* (b * d - a * e) ** S(2)
* (b * B * d - S(6) * A * b * e + S(5) * a * B * e)
* arctanh(sqrt(e) * sqrt(a + b * x) / (sqrt(b) * sqrt(d + e * x)))
/ (b ** (S(7) / S(2)) * e ** (S(3) / S(2)))
- S(1)
/ S(12)
* (b * B * d - S(6) * A * b * e + S(5) * a * B * e)
* (d + e * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ (b ** S(2) * e)
+ S(1) / S(3) * B * (d + e * x) ** (S(5) / S(2)) * sqrt(a + b * x) / (b * e)
- S(1)
/ S(8)
* (b * d - a * e)
* (b * B * d - S(6) * A * b * e + S(5) * a * B * e)
* sqrt(a + b * x)
* sqrt(d + e * x)
/ (b ** S(3) * e),
],
[
(S(2) + S(3) * x) ** S(4) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
x,
S(7),
-S(333)
/ S(2000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(1)
/ S(20)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(3)
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(7)
/ S(640000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(231223) + S(140652) * x)
+ S(4122385421)
/ S(51200000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(34069301)
/ S(5120000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(374762311)
/ S(51200000)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(3) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
x,
S(6),
-S(3)
/ S(50)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(21)
/ S(16000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(731) + S(444) * x)
+ S(39142411)
/ S(1280000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(323491)
/ S(128000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(3558401) / S(1280000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(3) * sqrt(S(1) - S(2) * x) / sqrt(S(3) + S(5) * x),
x,
S(5),
S(525371)
/ S(64000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(3)
/ S(40)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
- S(21)
/ S(6400)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(335) + S(216) * x)
* sqrt(S(3) + S(5) * x)
+ S(47761) / S(64000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(2) * sqrt(S(1) - S(2) * x) / sqrt(S(3) + S(5) * x),
x,
S(5),
S(3047)
/ S(800)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(23) / S(80) * (S(1) - S(2) * x) ** (S(3) / S(2)) * sqrt(S(3) + S(5) * x)
- S(1)
/ S(10)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x)
* sqrt(S(3) + S(5) * x)
+ S(277) / S(800) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x),
x,
S(7),
-S(1)
/ S(20)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(1)
/ S(160000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(88987) + S(63120) * x)
+ S(452517373)
/ S(25600000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(3739813)
/ S(7680000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
- S(339983)
/ S(384000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(41137943) / S(25600000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x),
x,
S(7),
-S(567)
/ S(4000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(3)
/ S(50)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x)
* (S(3) + S(5) * x) ** (S(3) / S(2))
+ S(5487713)
/ S(640000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(45353)
/ S(192000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
- S(4123)
/ S(9600)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(498883) / S(640000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(3)
/ sqrt(S(3) + S(5) * x),
x,
S(6),
S(18648399)
/ S(3200000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(51373)
/ S(320000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
- S(3)
/ S(50)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
- S(3)
/ S(80000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(14629) + S(11580) * x)
* sqrt(S(3) + S(5) * x)
+ S(1695309) / S(3200000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(3) / S(2))
* (S(2) + S(3) * x) ** S(2)
/ sqrt(S(3) + S(5) * x),
x,
S(6),
S(109263)
/ S(32000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(301)
/ S(3200)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
- S(119)
/ S(800)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
- S(3)
/ S(40)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x)
* sqrt(S(3) + S(5) * x)
+ S(9933) / S(32000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x),
x,
S(8),
-S(3)
/ S(70)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(3)
/ S(280000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(33857) + S(26700) * x)
+ S(3735929329)
/ S(256000000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(30875449)
/ S(76800000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(2806859)
/ S(19200000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
- S(255169)
/ S(640000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(339629939)
/ S(256000000)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x),
x,
S(8),
-S(193)
/ S(2000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(3) + S(5) * x) ** (S(3) / S(2))
- S(1)
/ S(20)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(2) + S(3) * x)
* (S(3) + S(5) * x) ** (S(3) / S(2))
+ S(105254149)
/ S(12800000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(869869)
/ S(3840000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(79079)
/ S(960000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
- S(7189)
/ S(32000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(9568559) / S(12800000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(4)
/ sqrt(S(3) + S(5) * x),
x,
S(8),
S(12679836719)
/ S(1280000000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(104792039)
/ S(384000000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(9526549)
/ S(96000000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
- S(271)
/ S(2800)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
- S(3)
/ S(70)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(22400000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(12923401) + S(11603280) * x)
* sqrt(S(3) + S(5) * x)
+ S(1152712429)
/ S(1280000000)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(1) - S(2) * x) ** (S(5) / S(2))
* (S(2) + S(3) * x) ** S(3)
/ sqrt(S(3) + S(5) * x),
x,
S(7),
S(368012183)
/ S(64000000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(3041423)
/ S(19200000)
* (S(1) - S(2) * x) ** (S(3) / S(2))
* sqrt(S(3) + S(5) * x)
+ S(276493)
/ S(4800000)
* (S(1) - S(2) * x) ** (S(5) / S(2))
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(20)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(160000)
* (S(1) - S(2) * x) ** (S(7) / S(2))
* (S(52951) + S(47280) * x)
* sqrt(S(3) + S(5) * x)
+ S(33455653) / S(64000000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4) * sqrt(S(3) + S(5) * x) / sqrt(S(1) - S(2) * x),
x,
S(6),
S(1067352517)
/ S(2560000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(987)
/ S(4000)
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
* sqrt(S(1) - S(2) * x)
- S(3)
/ S(50)
* (S(2) + S(3) * x) ** S(3)
* (S(3) + S(5) * x) ** (S(3) / S(2))
* sqrt(S(1) - S(2) * x)
- S(21)
/ S(640000)
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(194923) + S(92040) * x)
* sqrt(S(1) - S(2) * x)
- S(97032047) / S(2560000) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(3) * sqrt(S(3) + S(5) * x) / sqrt(S(1) - S(2) * x),
x,
S(5),
S(677017)
/ S(5120)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(3)
/ S(40)
* (S(2) + S(3) * x) ** S(2)
* (S(3) + S(5) * x) ** (S(3) / S(2))
* sqrt(S(1) - S(2) * x)
- S(3)
/ S(1280)
* (S(3) + S(5) * x) ** (S(3) / S(2))
* (S(865) + S(408) * x)
* sqrt(S(1) - S(2) * x)
- S(61547) / S(5120) * sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4) / (sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x)),
x,
S(5),
S(10866247)
/ S(128000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(259)
/ S(800)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
- S(3)
/ S(40)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
- S(7)
/ S(128000)
* (S(187559) + S(77820) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(3) / (sqrt(S(1) - S(2) * x) * sqrt(S(3) + S(5) * x)),
x,
S(4),
S(44437)
/ S(1600)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
- S(1)
/ S(10)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(1600)
* (S(5363) + S(2220) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(5)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2)),
x,
S(7),
-S(35439958001)
/ S(5120000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ (S(2) + S(3) * x) ** S(5) * sqrt(S(3) + S(5) * x) / sqrt(S(1) - S(2) * x)
+ S(847637)
/ S(32000)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(10389)
/ S(1600)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(33)
/ S(20)
* (S(2) + S(3) * x) ** S(4)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(49)
/ S(5120000)
* (S(87394471) + S(36265980) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2)),
x,
S(6),
-S(92108287)
/ S(51200)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ (S(2) + S(3) * x) ** S(4) * sqrt(S(3) + S(5) * x) / sqrt(S(1) - S(2) * x)
+ S(2203)
/ S(320)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(27)
/ S(16)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(1)
/ S(51200)
* (S(11129753) + S(4618500) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(5)
/ ((S(1) - S(2) * x) ** (S(3) / S(2)) * sqrt(S(3) + S(5) * x)),
x,
S(6),
-S(291096141)
/ S(256000)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(7)
/ S(11)
* (S(2) + S(3) * x) ** S(4)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
+ S(76587)
/ S(17600)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(939)
/ S(880)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(21)
/ S(2816000)
* (S(18424549) + S(7645620) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4)
/ ((S(1) - S(2) * x) ** (S(3) / S(2)) * sqrt(S(3) + S(5) * x)),
x,
S(5),
-S(184641)
/ S(640)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(7)
/ S(11)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
+ S(243)
/ S(220)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
+ S(9)
/ S(7040)
* (S(27269) + S(11316) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(5) / S(2)),
x,
S(6),
S(13246251)
/ S(6400)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(1)
/ S(3)
* (S(2) + S(3) * x) ** S(4)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2))
- S(299)
/ S(66)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
- S(697)
/ S(88)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(70400)
* (S(17606479) + S(7306140) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(5) / S(2)),
x,
S(5),
S(126513)
/ S(320)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(1)
/ S(3)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2))
- S(233)
/ S(66)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
- S(1)
/ S(3520)
* (S(168157) + S(69780) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(5)
/ ((S(1) - S(2) * x) ** (S(5) / S(2)) * sqrt(S(3) + S(5) * x)),
x,
S(6),
S(8261577)
/ S(6400)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(7)
/ S(33)
* (S(2) + S(3) * x) ** S(4)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2))
- S(2051)
/ S(726)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
- S(23909)
/ S(4840)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x)
- S(1)
/ S(774400)
* (S(120791143) + S(50124540) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(S(2) + S(3) * x) ** S(4)
/ ((S(1) - S(2) * x) ** (S(5) / S(2)) * sqrt(S(3) + S(5) * x)),
x,
S(5),
S(392283)
/ S(1600)
* arcsin(sqrt(S(2) / S(11)) * sqrt(S(3) + S(5) * x))
/ sqrt(S(10))
+ S(7)
/ S(33)
* (S(2) + S(3) * x) ** S(3)
* sqrt(S(3) + S(5) * x)
/ (S(1) - S(2) * x) ** (S(3) / S(2))
- S(1589)
/ S(726)
* (S(2) + S(3) * x) ** S(2)
* sqrt(S(3) + S(5) * x)
/ sqrt(S(1) - S(2) * x)
- S(1)
/ S(193600)
* (S(5735477) + S(2380020) * x)
* sqrt(S(1) - S(2) * x)
* sqrt(S(3) + S(5) * x),
],
[
(c + d * x) ** (S(1) / S(2)) / (x ** S(2) * (a + b * x) ** S(2)),
x,
S(7),
(S(4) * b * c - a * d)
* arctanh(sqrt(c + d * x) / sqrt(c))
/ (a ** S(3) * sqrt(c))
- (S(4) * b * c - S(3) * a * d)
* arctanh(sqrt(b) * sqrt(c + d * x) / sqrt(b * c - a * d))
* sqrt(b)
/ (a ** S(3) * sqrt(b * c - a * d))
- S(2) * b * sqrt(c + d * x) / (a ** S(2) * (a + b * x))
- sqrt(c + d * x) / (a * x * (a + b * x)),
],
[
S(1) / (x ** S(2) * (a + b * x) ** S(2) * (c + d * x) ** (S(1) / S(2))),
x,
S(7),
(S(4) * b * c + a * d)
* arctanh(sqrt(c + d * x) / sqrt(c))
/ (a ** S(3) * c ** (S(3) / S(2)))
- b ** (S(3) / S(2))
* (S(4) * b * c - S(5) * a * d)
* arctanh(sqrt(b) * sqrt(c + d * x) / sqrt(b * c - a * d))
/ (a ** S(3) * (b * c - a * d) ** (S(3) / S(2)))
- b
* (S(2) * b * c - a * d)
* sqrt(c + d * x)
/ (a ** S(2) * c * (b * c - a * d) * (a + b * x))
- sqrt(c + d * x) / (a * c * x * (a + b * x)),
],
[
S(1) / (x ** S(2) * (a + b * x) ** S(2) * (c + d * x) ** (S(3) / S(2))),
x,
S(8),
(S(4) * b * c + S(3) * a * d)
* arctanh(sqrt(c + d * x) / sqrt(c))
/ (a ** S(3) * c ** (S(5) / S(2)))
- b ** (S(5) / S(2))
* (S(4) * b * c - S(7) * a * d)
* arctanh(sqrt(b) * sqrt(c + d * x) / sqrt(b * c - a * d))
/ (a ** S(3) * (b * c - a * d) ** (S(5) / S(2)))
- d
* (
S(2) * b ** S(2) * c ** S(2)
- S(2) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
/ (a ** S(2) * c ** S(2) * (b * c - a * d) ** S(2) * sqrt(c + d * x))
- b
* (S(2) * b * c - a * d)
/ (a ** S(2) * c * (b * c - a * d) * (a + b * x) * sqrt(c + d * x))
+ (-S(1)) / (a * c * x * (a + b * x) * sqrt(c + d * x)),
],
[
x ** S(3) * (c + d * x) ** (S(3) / S(2)) / (a + b * x) ** (S(3) / S(2)),
x,
S(6),
S(3)
/ S(64)
* (b * c - a * d)
* (
b ** S(3) * c ** S(3)
+ S(5) * a * b ** S(2) * c ** S(2) * d
+ S(35) * a ** S(2) * b * c * d ** S(2)
- S(105) * a ** S(3) * d ** S(3)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(11) / S(2)) * d ** (S(5) / S(2)))
- S(2) * x ** S(3) * (c + d * x) ** (S(3) / S(2)) / (b * sqrt(a + b * x))
+ S(9)
/ S(4)
* x ** S(2)
* (c + d * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ b ** S(2)
- S(1)
/ S(32)
* (c + d * x) ** (S(3) / S(2))
* (
S(3) * b ** S(2) * c ** S(2)
+ S(14) * a * b * c * d
- S(105) * a ** S(2) * d ** S(2)
- S(4) * b * d * (b * c - S(21) * a * d) * x
)
* sqrt(a + b * x)
/ (b ** S(4) * d ** S(2))
+ S(3)
/ S(64)
* (
b ** S(3) * c ** S(3)
+ S(5) * a * b ** S(2) * c ** S(2) * d
+ S(35) * a ** S(2) * b * c * d ** S(2)
- S(105) * a ** S(3) * d ** S(3)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(5) * d ** S(2)),
],
[
x ** S(2) * (c + d * x) ** (S(3) / S(2)) / (a + b * x) ** (S(3) / S(2)),
x,
S(6),
-S(1)
/ S(8)
* (b * c - a * d)
* (
b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
- S(35) * a ** S(2) * d ** S(2)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(9) / S(2)) * d ** (S(3) / S(2)))
- S(2)
* a ** S(2)
* (c + d * x) ** (S(5) / S(2))
/ (b ** S(2) * (b * c - a * d) * sqrt(a + b * x))
- S(1)
/ S(12)
* (S(10) * a * c + b * c ** S(2) / d - S(35) * a ** S(2) * d / b)
* (c + d * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ (b ** S(2) * (b * c - a * d))
+ S(1)
/ S(3)
* (c + d * x) ** (S(5) / S(2))
* sqrt(a + b * x)
/ (b ** S(2) * d)
- S(1)
/ S(8)
* (
b ** S(2) * c ** S(2)
+ S(10) * a * b * c * d
- S(35) * a ** S(2) * d ** S(2)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(4) * d),
],
[
x ** S(3) * (c + d * x) ** (S(5) / S(2)) / (a + b * x) ** (S(5) / S(2)),
x,
S(7),
-S(2)
/ S(3)
* x ** S(3)
* (c + d * x) ** (S(5) / S(2))
/ (b * (a + b * x) ** (S(3) / S(2)))
- S(5)
/ S(64)
* (b * c - a * d)
* (
b ** S(3) * c ** S(3)
+ S(21) * a * b ** S(2) * c ** S(2) * d
- S(189) * a ** S(2) * b * c * d ** S(2)
+ S(231) * a ** S(3) * d ** S(3)
)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(13) / S(2)) * d ** (S(3) / S(2)))
- S(2)
/ S(3)
* (S(6) * b * c - S(11) * a * d)
* x ** S(2)
* (c + d * x) ** (S(5) / S(2))
/ (b ** S(2) * (b * c - a * d) * sqrt(a + b * x))
- S(5)
/ S(96)
* (
b ** S(3) * c ** S(3)
+ S(21) * a * b ** S(2) * c ** S(2) * d
- S(189) * a ** S(2) * b * c * d ** S(2)
+ S(231) * a ** S(3) * d ** S(3)
)
* (c + d * x) ** (S(3) / S(2))
* sqrt(a + b * x)
/ (b ** S(5) * d * (b * c - a * d))
+ S(1)
/ S(24)
* (c + d * x) ** (S(5) / S(2))
* (
S(5) * b ** S(2) * c ** S(2)
- S(156) * a * b * c * d
+ S(231) * a ** S(2) * d ** S(2)
+ S(2) * b * d * (S(59) * b * c - S(99) * a * d) * x
)
* sqrt(a + b * x)
/ (b ** S(4) * d * (b * c - a * d))
- S(5)
/ S(64)
* (
b ** S(3) * c ** S(3)
+ S(21) * a * b ** S(2) * c ** S(2) * d
- S(189) * a ** S(2) * b * c * d ** S(2)
+ S(231) * a ** S(3) * d ** S(3)
)
* sqrt(a + b * x)
* sqrt(c + d * x)
/ (b ** S(6) * d),
],
[
x ** S(2) / ((a + b * x) ** (S(5) / S(2)) * (c + d * x) ** (S(1) / S(2))),
x,
S(4),
S(2)
* arctanh(sqrt(d) * sqrt(a + b * x) / (sqrt(b) * sqrt(c + d * x)))
/ (b ** (S(5) / S(2)) * sqrt(d))
- S(2)
/ S(3)
* a ** S(2)
* sqrt(c + d * x)
/ (b ** S(2) * (b * c - a * d) * (a + b * x) ** (S(3) / S(2)))
+ S(4)
/ S(3)
* a
* (S(3) * b * c - S(2) * a * d)
* sqrt(c + d * x)
/ (b ** S(2) * (b * c - a * d) ** S(2) * sqrt(a + b * x)),
],
[
x * sqrt(a + b * x) / sqrt(-a - b * x),
x,
S(2),
S(1) / S(2) * x ** S(2) * sqrt(a + b * x) / sqrt(-a - b * x),
],
[
(c + d * x) ** (S(3) / S(2)) / (x * (a + b * x) ** S(2)),
x,
S(6),
-S(2) * c ** (S(3) / S(2)) * arctanh(sqrt(c + d * x) / sqrt(c)) / a ** S(2)
+ (S(2) * b * c + a * d)
* arctanh(sqrt(b) * sqrt(c + d * x) / sqrt(b * c - a * d))
* sqrt(b * c - a * d)
/ (a ** S(2) * b ** (S(3) / S(2)))
+ (b * c - a * d) * sqrt(c + d * x) / (a * b * (a + b * x)),
],
]
for i in test:
r = rubi_integrate(i[0], i[1])
if len(i) == 5:
assert rubi_test(
r, i[1], i[3], expand=True, _diff=True, _numerical=True
) or rubi_test(r, i[1], i[4], expand=True, _diff=True, _numerical=True)
else:
assert rubi_test(r, i[1], i[3], expand=True, _diff=True, _numerical=True)
def test_simplify():
test = [
[
x ** S(3) * (a + b * x) ** S(2) * (c + d * x) ** S(16),
x,
S(2),
-S(1)
/ S(17)
* c ** S(3)
* (b * c - a * d) ** S(2)
* (c + d * x) ** S(17)
/ d ** S(6)
+ S(1)
/ S(18)
* c ** S(2)
* (S(5) * b * c - S(3) * a * d)
* (b * c - a * d)
* (c + d * x) ** S(18)
/ d ** S(6)
- S(1)
/ S(19)
* c
* (
S(10) * b ** S(2) * c ** S(2)
- S(12) * a * b * c * d
+ S(3) * a ** S(2) * d ** S(2)
)
* (c + d * x) ** S(19)
/ d ** S(6)
+ S(1)
/ S(20)
* (
S(10) * b ** S(2) * c ** S(2)
- S(8) * a * b * c * d
+ a ** S(2) * d ** S(2)
)
* (c + d * x) ** S(20)
/ d ** S(6)
- S(1)
/ S(21)
* b
* (S(5) * b * c - S(2) * a * d)
* (c + d * x) ** S(21)
/ d ** S(6)
+ S(1) / S(22) * b ** S(2) * (c + d * x) ** S(22) / d ** S(6),
],
[
x ** S(5) / ((a + b * x) ** S(2) * (c + d * x) ** S(2)),
x,
S(2),
-S(2) * (b * c + a * d) * x / (b ** S(3) * d ** S(3))
+ S(1) / S(2) * x ** S(2) / (b ** S(2) * d ** S(2))
+ a ** S(5) / (b ** S(4) * (b * c - a * d) ** S(2) * (a + b * x))
+ c ** S(5) / (d ** S(4) * (b * c - a * d) ** S(2) * (c + d * x))
+ a ** S(4)
* (S(5) * b * c - S(3) * a * d)
* log(a + b * x)
/ (b ** S(4) * (b * c - a * d) ** S(3))
+ c ** S(4)
* (S(3) * b * c - S(5) * a * d)
* log(c + d * x)
/ (d ** S(4) * (b * c - a * d) ** S(3)),
],
[
x ** S(5) / ((a + b * x) ** S(2) * (c + d * x) ** S(2)),
x,
S(2),
-S(2) * (b * c + a * d) * x / (b ** S(3) * d ** S(3))
+ S(1) / S(2) * x ** S(2) / (b ** S(2) * d ** S(2))
+ a ** S(5) / (b ** S(4) * (b * c - a * d) ** S(2) * (a + b * x))
+ c ** S(5) / (d ** S(4) * (b * c - a * d) ** S(2) * (c + d * x))
+ a ** S(4)
* (S(5) * b * c - S(3) * a * d)
* log(a + b * x)
/ (b ** S(4) * (b * c - a * d) ** S(3))
+ c ** S(4)
* (S(3) * b * c - S(5) * a * d)
* log(c + d * x)
/ (d ** S(4) * (b * c - a * d) ** S(3)),
],
[
x ** S(4) / ((a + b * x) * (c + d * x)),
x,
S(2),
(b ** S(2) * c ** S(2) + a * b * c * d + a ** S(2) * d ** S(2))
* x
/ (b ** S(3) * d ** S(3))
- S(1) / S(2) * (b * c + a * d) * x ** S(2) / (b ** S(2) * d ** S(2))
+ S(1) / S(3) * x ** S(3) / (b * d)
+ a ** S(4) * log(a + b * x) / (b ** S(4) * (b * c - a * d))
- c ** S(4) * log(c + d * x) / (d ** S(4) * (b * c - a * d)),
],
[
(a + b * x) * (A + B * x) * (d + e * x) ** S(4),
x,
S(2),
S(1)
/ S(5)
* (b * d - a * e)
* (B * d - A * e)
* (d + e * x) ** S(5)
/ e ** S(3)
- S(1)
/ S(6)
* (S(2) * b * B * d - A * b * e - a * B * e)
* (d + e * x) ** S(6)
/ e ** S(3)
+ S(1) / S(7) * b * B * (d + e * x) ** S(7) / e ** S(3),
],
[
(a + b * x) ** S(3) * (c + d * x) ** S(3) * (e + f * x) ** S(3),
x,
S(2),
S(1)
/ S(4)
* (b * c - a * d) ** S(3)
* (b * e - a * f) ** S(3)
* (a + b * x) ** S(4)
/ b ** S(7)
+ S(3)
/ S(5)
* (b * c - a * d) ** S(2)
* (b * e - a * f) ** S(2)
* (b * d * e + b * c * f - S(2) * a * d * f)
* (a + b * x) ** S(5)
/ b ** S(7)
+ S(1)
/ S(2)
* (b * c - a * d)
* (b * e - a * f)
* (
S(5) * a ** S(2) * d ** S(2) * f ** S(2)
- S(5) * a * b * d * f * (d * e + c * f)
+ b ** S(2)
* (d ** S(2) * e ** S(2) + S(3) * c * d * e * f + c ** S(2) * f ** S(2))
)
* (a + b * x) ** S(6)
/ b ** S(7)
+ S(1)
/ S(7)
* (b * d * e + b * c * f - S(2) * a * d * f)
* (
S(10) * a ** S(2) * d ** S(2) * f ** S(2)
- S(10) * a * b * d * f * (d * e + c * f)
+ b ** S(2)
* (d ** S(2) * e ** S(2) + S(8) * c * d * e * f + c ** S(2) * f ** S(2))
)
* (a + b * x) ** S(7)
/ b ** S(7)
+ S(3)
/ S(8)
* d
* f
* (
S(5) * a ** S(2) * d ** S(2) * f ** S(2)
- S(5) * a * b * d * f * (d * e + c * f)
+ b ** S(2)
* (d ** S(2) * e ** S(2) + S(3) * c * d * e * f + c ** S(2) * f ** S(2))
)
* (a + b * x) ** S(8)
/ b ** S(7)
+ S(1)
/ S(3)
* d ** S(2)
* f ** S(2)
* (b * d * e + b * c * f - S(2) * a * d * f)
* (a + b * x) ** S(9)
/ b ** S(7)
+ S(1) / S(10) * d ** S(3) * f ** S(3) * (a + b * x) ** S(10) / b ** S(7),
],
[
(a + b * x) * (A + B * x) * (d + e * x) ** (S(5) / S(2)),
x,
S(2),
S(2)
/ S(7)
* (b * d - a * e)
* (B * d - A * e)
* (d + e * x) ** (S(7) / S(2))
/ e ** S(3)
- S(2)
/ S(9)
* (S(2) * b * B * d - A * b * e - a * B * e)
* (d + e * x) ** (S(9) / S(2))
/ e ** S(3)
+ S(2) / S(11) * b * B * (d + e * x) ** (S(11) / S(2)) / e ** S(3),
],
[
(S(5) - S(4) * x) ** S(4) * (S(2) + S(3) * x) ** m / (S(1) + S(2) * x) ** m,
x,
S(4),
-S(1)
/ S(45)
* (S(88) - m)
* (S(5) - S(4) * x) ** S(2)
* (S(1) + S(2) * x) ** (S(1) - m)
* (S(2) + S(3) * x) ** (S(1) + m)
- S(2)
/ S(15)
* (S(5) - S(4) * x) ** S(3)
* (S(1) + S(2) * x) ** (S(1) - m)
* (S(2) + S(3) * x) ** (S(1) + m)
- S(1)
/ S(1215)
* (S(1) + S(2) * x) ** (S(1) - m)
* (S(2) + S(3) * x) ** (S(1) + m)
* (
S(386850)
- S(25441) * m
+ S(426) * m ** S(2)
- S(2) * m ** S(3)
- S(24) * (S(4359) - S(154) * m + m ** S(2)) * x
)
+ S(1)
/ S(1215)
* S(2) ** (-S(1) - m)
* (
S(3528363)
- S(639760) * m
+ S(29050) * m ** S(2)
- S(440) * m ** S(3)
+ S(2) * m ** S(4)
)
* (S(1) + S(2) * x) ** (S(1) - m)
* hypergeom([S(1) - m, -m], [S(2) - m], -S(3) * (S(1) + S(2) * x))
/ (S(1) - m),
],
[
(S(5) - S(4) * x) ** S(3)
* (S(1) + S(2) * x) ** (-S(1) - m)
* (S(2) + S(3) * x) ** m,
x,
S(3),
-S(2)
/ S(9)
* (S(5) - S(4) * x) ** S(2)
* (S(2) + S(3) * x) ** (S(1) + m)
/ (S(1) + S(2) * x) ** m
- S(1)
/ S(27)
* (S(2) + S(3) * x) ** (S(1) + m)
* (
S(9261)
- S(512) * m
+ S(4) * m ** S(2)
- S(4) * (S(109) - S(2) * m) * m * x
)
/ (m * (S(1) + S(2) * x) ** m)
+ S(1)
/ S(27)
* S(2) ** (-S(1) - m)
* (S(27783) - S(8324) * m + S(390) * m ** S(2) - S(4) * m ** S(3))
* (S(1) + S(2) * x) ** (S(1) - m)
* hypergeom([S(1) - m, -m], [S(2) - m], -S(3) * (S(1) + S(2) * x))
/ ((S(1) - m) * m),
],
[
(a + b * x) ** m
* (c + d * x) ** n
* (
(b * c * f + a * d * f + a * d * f * m + b * c * f * n)
/ (b * d * (S(2) + m + n))
+ f * x
)
** (-S(3) - m - n),
x,
S(1),
b
* d
* (S(2) + m + n)
* (a + b * x) ** (S(1) + m)
* (c + d * x) ** (S(1) + n)
* (
f * (a * d * (S(1) + m) + b * c * (S(1) + n)) / (b * d * (S(2) + m + n))
+ f * x
)
** (-S(2) - m - n)
/ ((b * c - a * d) ** S(2) * f * (S(1) + m) * (S(1) + n)),
],
[
x ** S(3) * (c + d * x) ** S(3) / (a + b * x) ** S(3),
x,
S(2),
(b * c - a * d)
* (
b ** S(2) * c ** S(2)
- S(8) * a * b * c * d
+ S(10) * a ** S(2) * d ** S(2)
)
* x
/ b ** S(6)
+ S(3)
/ S(2)
* d
* (b * c - S(2) * a * d)
* (b * c - a * d)
* x ** S(2)
/ b ** S(5)
+ d ** S(2) * (b * c - a * d) * x ** S(3) / b ** S(4)
+ S(1) / S(4) * d ** S(3) * x ** S(4) / b ** S(3)
+ S(1)
/ S(2)
* a ** S(3)
* (b * c - a * d) ** S(3)
/ (b ** S(7) * (a + b * x) ** S(2))
- S(3)
* a ** S(2)
* (b * c - S(2) * a * d)
* (b * c - a * d) ** S(2)
/ (b ** S(7) * (a + b * x))
- S(3)
* a
* (b * c - a * d)
* (
b ** S(2) * c ** S(2)
- S(5) * a * b * c * d
+ S(5) * a ** S(2) * d ** S(2)
)
* log(a + b * x)
/ b ** S(7),
],
[
(S(2) + S(3) * x) ** S(8) * (S(3) + S(5) * x) / (S(1) - S(2) * x) ** S(3),
x,
S(2),
S(63412811) / S(2048) / (S(1) - S(2) * x) ** S(2)
+ (-S(246239357) / S(1024)) / (S(1) - S(2) * x)
- S(120864213) / S(256) * x
- S(118841283) / S(512) * x ** S(2)
- S(16042509) / S(128) * x ** S(3)
- S(7568235) / S(128) * x ** S(4)
- S(213597) / S(10) * x ** S(5)
- S(162567) / S(32) * x ** S(6)
- S(32805) / S(56) * x ** S(7)
- S(106237047) / S(256) * log(S(1) - S(2) * x),
],
]
for i in test:
r = rubi_integrate(i[0], i[1])
if len(i) == 5:
assert rubi_test(r, i[1], i[3], expand=True) or rubi_test(
r, i[1], i[4], expand=True
)
else:
assert rubi_test(r, i[1], i[3], expand=True)
def test_diff():
test = [
[
(a + b * x) * (e + f * x) ** (S(3) / S(2)) / (c + d * x),
x,
S(5),
-S(2) / S(3) * (b * c - a * d) * (e + f * x) ** (S(3) / S(2)) / d ** S(2)
+ S(2) / S(5) * b * (e + f * x) ** (S(5) / S(2)) / (d * f)
+ S(2)
* (b * c - a * d)
* (d * e - c * f) ** (S(3) / S(2))
* arctanh(sqrt(d) * sqrt(e + f * x) / sqrt(d * e - c * f))
/ d ** (S(7) / S(2))
- S(2) * (b * c - a * d) * (d * e - c * f) * sqrt(e + f * x) / d ** S(3),
],
[
x ** (S(5) / S(2)) * (A + B * x) / (a + b * x),
x,
S(6),
-S(2) / S(3) * a * (A * b - a * B) * x ** (S(3) / S(2)) / b ** S(3)
+ S(2) / S(5) * (A * b - a * B) * x ** (S(5) / S(2)) / b ** S(2)
+ S(2) / S(7) * B * x ** (S(7) / S(2)) / b
- S(2)
* a ** (S(5) / S(2))
* (A * b - a * B)
* arctan(sqrt(b) * sqrt(x) / sqrt(a))
/ b ** (S(9) / S(2))
+ S(2) * a ** S(2) * (A * b - a * B) * sqrt(x) / b ** S(4),
],
[
(a + b * x) ** S(2) / ((c + d * x) ** S(2) * sqrt(e + f * x)),
x,
S(4),
(b * c - a * d)
* (S(4) * b * d * e - S(3) * b * c * f - a * d * f)
* arctanh(sqrt(d) * sqrt(e + f * x) / sqrt(d * e - c * f))
/ (d ** (S(5) / S(2)) * (d * e - c * f) ** (S(3) / S(2)))
+ S(2) * b ** S(2) * sqrt(e + f * x) / (d ** S(2) * f)
- (b * c - a * d) ** S(2)
* sqrt(e + f * x)
/ (d ** S(2) * (d * e - c * f) * (c + d * x)),
],
]
for i in test:
r = rubi_integrate(i[0], i[1])
if len(i) == 5:
assert rubi_test(r, i[1], i[3], expand=True, _diff=True) or rubi_test(
r, i[1], i[4], expand=True, _diff=True
)
else:
assert rubi_test(r, i[1], i[3], expand=True, _diff=True)
| 34.978691 | 88 | 0.211618 | 17,523 | 114,905 | 1.385779 | 0.034412 | 0.13771 | 0.079809 | 0.059465 | 0.835729 | 0.812997 | 0.764032 | 0.745213 | 0.706544 | 0.67957 | 0 | 0.162597 | 0.529202 | 114,905 | 3,284 | 89 | 34.989342 | 0.286282 | 0.000339 | 0 | 0.654023 | 0 | 0 | 0.000488 | 0 | 0 | 0 | 0 | 0 | 0.001835 | 1 | 0.000918 | false | 0 | 0.004589 | 0 | 0.005506 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
d931011565d1571aaac22caf83e4dcd401dc683b | 17,609 | py | Python | tests/core/pyspec/eth2spec/test/phase0/fork_choice/test_ex_ante.py | sifraitech/eth2.0-specs | 1bfefe301da592375e2e02f65849a96aadec1936 | [
"CC0-1.0"
] | 497 | 2021-08-19T01:22:07.000Z | 2022-03-30T21:40:40.000Z | tests/core/pyspec/eth2spec/test/phase0/fork_choice/test_ex_ante.py | sifraitech/eth2.0-specs | 1bfefe301da592375e2e02f65849a96aadec1936 | [
"CC0-1.0"
] | 133 | 2021-08-18T16:47:29.000Z | 2022-03-31T22:31:56.000Z | tests/core/pyspec/eth2spec/test/phase0/fork_choice/test_ex_ante.py | sifraitech/eth2.0-specs | 1bfefe301da592375e2e02f65849a96aadec1936 | [
"CC0-1.0"
] | 98 | 2021-08-31T09:19:27.000Z | 2022-03-27T05:07:04.000Z | from eth2spec.test.context import (
MAINNET,
spec_state_test,
with_all_phases,
with_presets,
)
from eth2spec.test.helpers.attestations import (
get_valid_attestation,
sign_attestation,
)
from eth2spec.test.helpers.block import (
build_empty_block,
)
from eth2spec.test.helpers.fork_choice import (
get_genesis_forkchoice_store_and_block,
on_tick_and_append_step,
add_attestation,
add_block,
tick_and_add_block,
)
from eth2spec.test.helpers.state import (
state_transition_and_sign_block,
)
def _apply_base_block_a(spec, state, store, test_steps):
# On receiving block A at slot `N`
block = build_empty_block(spec, state, slot=state.slot + 1)
signed_block_a = state_transition_and_sign_block(spec, state, block)
yield from tick_and_add_block(spec, store, signed_block_a, test_steps)
assert spec.get_head(store) == signed_block_a.message.hash_tree_root()
@with_all_phases
@spec_state_test
def test_ex_ante_vanilla(spec, state):
"""
With a single adversarial attestation
Objects:
Block A - slot N
Block B (parent A) - slot N+1
Block C (parent A) - slot N+2
Attestation_1 (Block B); size `1` - slot N+1
Steps:
Block A received at N — A is head
Block C received at N+2 — C is head
Block B received at N+2 — C is head
Attestation_1 received at N+2 — C is head
"""
test_steps = []
# Initialization
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
yield 'anchor_state', state
yield 'anchor_block', anchor_block
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, current_time, test_steps)
assert store.time == current_time
# On receiving block A at slot `N`
yield from _apply_base_block_a(spec, state, store, test_steps)
state_a = state.copy()
# Block B at slot `N + 1`, parent is A
state_b = state_a.copy()
block = build_empty_block(spec, state_a, slot=state_a.slot + 1)
signed_block_b = state_transition_and_sign_block(spec, state_b, block)
# Block C at slot `N + 2`, parent is A
state_c = state_a.copy()
block = build_empty_block(spec, state_c, slot=state_a.slot + 2)
signed_block_c = state_transition_and_sign_block(spec, state_c, block)
# Attestation_1 at slot `N + 1` voting for block B
def _filter_participant_set(participants):
return [next(iter(participants))]
attestation = get_valid_attestation(
spec, state_b, slot=state_b.slot, signed=False, filter_participant_set=_filter_participant_set
)
attestation.data.beacon_block_root = signed_block_b.message.hash_tree_root()
assert len([i for i in attestation.aggregation_bits if i == 1]) == 1
sign_attestation(spec, state_b, attestation)
# Block C received at N+2 — C is head
time = state_c.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_c, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block B received at N+2 — C is head due to proposer score boost
yield from add_block(spec, store, signed_block_b, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Attestation_1 received at N+2 — C is head
yield from add_attestation(spec, store, attestation, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
yield 'steps', test_steps
def _get_greater_than_proposer_boost_score(spec, store, state, proposer_boost_root, root):
"""
Return the minimum attestation participant count such that attestation_score > proposer_score
"""
# calculate proposer boost score
block = store.blocks[root]
proposer_score = 0
if spec.get_ancestor(store, root, block.slot) == proposer_boost_root:
num_validators = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
avg_balance = spec.get_total_active_balance(state) // num_validators
committee_size = num_validators // spec.SLOTS_PER_EPOCH
committee_weight = committee_size * avg_balance
proposer_score = (committee_weight * spec.config.PROPOSER_SCORE_BOOST) // 100
# calculate minimum participant count such that attestation_score > proposer_score
base_effective_balance = state.validators[0].effective_balance
return proposer_score // base_effective_balance + 1
@with_all_phases
@with_presets([MAINNET], reason="to create non-duplicate committee")
@spec_state_test
def test_ex_ante_attestations_is_greater_than_proposer_boost_with_boost(spec, state):
"""
Adversarial attestations > proposer boost
Objects:
Block A - slot N
Block B (parent A) - slot N+1
Block C (parent A) - slot N+2
Attestation_set_1 (Block B); size `proposer_boost + 1` - slot N+1
Steps:
Block A received at N — A is head
Block C received at N+2 — C is head
Block B received at N+2 — C is head
Attestation_1 received at N+2 — B is head
"""
test_steps = []
# Initialization
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
yield 'anchor_state', state
yield 'anchor_block', anchor_block
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, current_time, test_steps)
assert store.time == current_time
# On receiving block A at slot `N`
yield from _apply_base_block_a(spec, state, store, test_steps)
state_a = state.copy()
# Block B at slot `N + 1`, parent is A
state_b = state_a.copy()
block = build_empty_block(spec, state_a, slot=state_a.slot + 1)
signed_block_b = state_transition_and_sign_block(spec, state_b, block)
# Block C at slot `N + 2`, parent is A
state_c = state_a.copy()
block = build_empty_block(spec, state_c, slot=state_a.slot + 2)
signed_block_c = state_transition_and_sign_block(spec, state_c, block)
# Block C received at N+2 — C is head
time = state_c.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_c, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block B received at N+2 — C is head due to proposer score boost
yield from add_block(spec, store, signed_block_b, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Attestation_set_1 at slot `N + 1` voting for block B
proposer_boost_root = signed_block_b.message.hash_tree_root()
root = signed_block_b.message.hash_tree_root()
participant_num = _get_greater_than_proposer_boost_score(spec, store, state, proposer_boost_root, root)
def _filter_participant_set(participants):
return [index for i, index in enumerate(participants) if i < participant_num]
attestation = get_valid_attestation(
spec, state_b, slot=state_b.slot, signed=False, filter_participant_set=_filter_participant_set
)
attestation.data.beacon_block_root = signed_block_b.message.hash_tree_root()
assert len([i for i in attestation.aggregation_bits if i == 1]) == participant_num
sign_attestation(spec, state_b, attestation)
# Attestation_set_1 received at N+2 — B is head because B's attestation_score > C's proposer_score.
# (B's proposer_score = C's attestation_score = 0)
yield from add_attestation(spec, store, attestation, test_steps)
assert spec.get_head(store) == signed_block_b.message.hash_tree_root()
yield 'steps', test_steps
@with_all_phases
@spec_state_test
def test_ex_ante_sandwich_without_attestations(spec, state):
"""
Simple Sandwich test with boost and no attestations.
Obejcts:
Block A - slot N
Block B (parent A) - slot N+1
Block C (parent A) - slot N+2
Block D (parent B) - slot N+3
Steps:
Block A received at N — A is head
Block C received at N+2 — C is head
Block B received at N+2 — C is head (with boost)
Block D received at N+3 — D is head (with boost)
"""
test_steps = []
# Initialization
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
yield 'anchor_state', state
yield 'anchor_block', anchor_block
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, current_time, test_steps)
assert store.time == current_time
# On receiving block A at slot `N`
yield from _apply_base_block_a(spec, state, store, test_steps)
state_a = state.copy()
# Block B at slot `N + 1`, parent is A
state_b = state_a.copy()
block = build_empty_block(spec, state_a, slot=state_a.slot + 1)
signed_block_b = state_transition_and_sign_block(spec, state_b, block)
# Block C at slot `N + 2`, parent is A
state_c = state_a.copy()
block = build_empty_block(spec, state_c, slot=state_a.slot + 2)
signed_block_c = state_transition_and_sign_block(spec, state_c, block)
# Block D at slot `N + 3`, parent is B
state_d = state_b.copy()
block = build_empty_block(spec, state_d, slot=state_a.slot + 3)
signed_block_d = state_transition_and_sign_block(spec, state_d, block)
# Block C received at N+2 — C is head
time = state_c.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_c, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block B received at N+2 — C is head, it has proposer score boost
yield from add_block(spec, store, signed_block_b, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block D received at N+3 - D is head, it has proposer score boost
time = state_d.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_d, test_steps)
assert spec.get_head(store) == signed_block_d.message.hash_tree_root()
yield 'steps', test_steps
@with_all_phases
@spec_state_test
def test_ex_ante_sandwich_with_honest_attestation(spec, state):
"""
Boosting necessary to sandwich attack.
Objects:
Block A - slot N
Block B (parent A) - slot N+1
Block C (parent A) - slot N+2
Block D (parent B) - slot N+3
Attestation_1 (Block C); size 1 - slot N+2 (honest)
Steps:
Block A received at N — A is head
Block C received at N+2 — C is head
Block B received at N+2 — C is head
Attestation_1 received at N+3 — C is head
Block D received at N+3 — D is head
"""
test_steps = []
# Initialization
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
yield 'anchor_state', state
yield 'anchor_block', anchor_block
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, current_time, test_steps)
assert store.time == current_time
# On receiving block A at slot `N`
yield from _apply_base_block_a(spec, state, store, test_steps)
state_a = state.copy()
# Block B at slot `N + 1`, parent is A
state_b = state_a.copy()
block = build_empty_block(spec, state_a, slot=state_a.slot + 1)
signed_block_b = state_transition_and_sign_block(spec, state_b, block)
# Block C at slot `N + 2`, parent is A
state_c = state_a.copy()
block = build_empty_block(spec, state_c, slot=state_a.slot + 2)
signed_block_c = state_transition_and_sign_block(spec, state_c, block)
# Attestation_1 at N+2 voting for block C
def _filter_participant_set(participants):
return [next(iter(participants))]
attestation = get_valid_attestation(
spec, state_c, slot=state_c.slot, signed=False, filter_participant_set=_filter_participant_set
)
attestation.data.beacon_block_root = signed_block_c.message.hash_tree_root()
assert len([i for i in attestation.aggregation_bits if i == 1]) == 1
sign_attestation(spec, state_c, attestation)
# Block D at slot `N + 3`, parent is B
state_d = state_b.copy()
block = build_empty_block(spec, state_d, slot=state_a.slot + 3)
signed_block_d = state_transition_and_sign_block(spec, state_d, block)
# Block C received at N+2 — C is head
time = state_c.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_c, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block B received at N+2 — C is head, it has proposer score boost
yield from add_block(spec, store, signed_block_b, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Attestation_1 received at N+3 — C is head
time = state_d.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_attestation(spec, store, attestation, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block D received at N+3 - D is head, it has proposer score boost
yield from add_block(spec, store, signed_block_d, test_steps)
assert spec.get_head(store) == signed_block_d.message.hash_tree_root()
yield 'steps', test_steps
@with_all_phases
@with_presets([MAINNET], reason="to create non-duplicate committee")
@spec_state_test
def test_ex_ante_sandwich_with_boost_not_sufficient(spec, state):
"""
Boost not sufficient to sandwich attack.
Objects:
Block A - slot N
Block B (parent A) - slot N+1
Block C (parent A) - slot N+2
Block D (parent B) - slot N+3
Attestation_set_1 (Block C); size proposer_boost + 1 - slot N+2
Steps:
Block A received at N — A is head
Block C received at N+2 — C is head
Block B received at N+2 — C is head
Attestation_set_1 received — C is head
Block D received at N+3 — C is head
"""
test_steps = []
# Initialization
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
yield 'anchor_state', state
yield 'anchor_block', anchor_block
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, current_time, test_steps)
assert store.time == current_time
# On receiving block A at slot `N`
yield from _apply_base_block_a(spec, state, store, test_steps)
state_a = state.copy()
# Block B at slot `N + 1`, parent is A
state_b = state_a.copy()
block = build_empty_block(spec, state_a, slot=state_a.slot + 1)
signed_block_b = state_transition_and_sign_block(spec, state_b, block)
# Block C at slot `N + 2`, parent is A
state_c = state_a.copy()
block = build_empty_block(spec, state_c, slot=state_a.slot + 2)
signed_block_c = state_transition_and_sign_block(spec, state_c, block)
# Block D at slot `N + 3`, parent is B
state_d = state_b.copy()
block = build_empty_block(spec, state_d, slot=state_a.slot + 3)
signed_block_d = state_transition_and_sign_block(spec, state_d, block)
# Block C received at N+2 — C is head
time = state_c.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_block(spec, store, signed_block_c, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block B received at N+2 — C is head, it has proposer score boost
yield from add_block(spec, store, signed_block_b, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Attestation_set_1 at N+2 voting for block C
proposer_boost_root = signed_block_c.message.hash_tree_root()
root = signed_block_c.message.hash_tree_root()
participant_num = _get_greater_than_proposer_boost_score(spec, store, state, proposer_boost_root, root)
def _filter_participant_set(participants):
return [index for i, index in enumerate(participants) if i < participant_num]
attestation = get_valid_attestation(
spec, state_c, slot=state_c.slot, signed=False, filter_participant_set=_filter_participant_set
)
attestation.data.beacon_block_root = signed_block_c.message.hash_tree_root()
assert len([i for i in attestation.aggregation_bits if i == 1]) == participant_num
sign_attestation(spec, state_c, attestation)
# Attestation_1 received at N+3 — B is head because B's attestation_score > C's proposer_score.
# (B's proposer_score = C's attestation_score = 0)
time = state_d.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
on_tick_and_append_step(spec, store, time, test_steps)
yield from add_attestation(spec, store, attestation, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
# Block D received at N+3 - C is head, D's boost not sufficient!
yield from add_block(spec, store, signed_block_d, test_steps)
assert spec.get_head(store) == signed_block_c.message.hash_tree_root()
yield 'steps', test_steps
| 41.727488 | 107 | 0.717928 | 2,805 | 17,609 | 4.22246 | 0.051337 | 0.044073 | 0.035292 | 0.041709 | 0.898852 | 0.881543 | 0.866515 | 0.859338 | 0.838906 | 0.826072 | 0 | 0.008488 | 0.197172 | 17,609 | 421 | 108 | 41.826603 | 0.826767 | 0.239423 | 0 | 0.772532 | 0 | 0 | 0.016185 | 0 | 0 | 0 | 0 | 0 | 0.11588 | 1 | 0.04721 | false | 0 | 0.021459 | 0.017167 | 0.090129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d935633d889b16611a873c6f1a78d4da99d57c9f | 575 | py | Python | fabfile/testbeds/traffic_profile_sample.py | GaryGaryWU/contrail_fabric_util | 70b944afe801593cd2664ae46e87363534085bcc | [
"Apache-2.0"
] | null | null | null | fabfile/testbeds/traffic_profile_sample.py | GaryGaryWU/contrail_fabric_util | 70b944afe801593cd2664ae46e87363534085bcc | [
"Apache-2.0"
] | null | null | null | fabfile/testbeds/traffic_profile_sample.py | GaryGaryWU/contrail_fabric_util | 70b944afe801593cd2664ae46e87363534085bcc | [
"Apache-2.0"
] | null | null | null | traffic_servers = {
'192.168.2.253' : {'tcp': ['9100', '9101', '9102'], 'udp': ['9200', '9201']},
'192.168.2.252' : {'tcp': ['9100', '9101', '9102'], 'udp': ['9200', '9201']},
}
traffic_clients = {
'192.168.1.253' :
{
'192.168.2.253' : {'tcp': ['9100', '9101'], 'udp': ['9200']},
'192.168.2.252' : {'tcp': ['9100', '9101', '9102'], 'udp': ['9200', '9201']}
},
'192.168.1.252' :
{
'192.168.2.253' : {'tcp': ['9100', '9101'], 'udp': ['9200']},
'192.168.2.252' : {'tcp': ['9100', '9101', '9102'], 'udp': ['9200', '9201']}
},
}
| 28.75 | 84 | 0.434783 | 74 | 575 | 3.351351 | 0.216216 | 0.193548 | 0.169355 | 0.241935 | 0.830645 | 0.830645 | 0.830645 | 0.790323 | 0.790323 | 0.66129 | 0 | 0.401747 | 0.203478 | 575 | 19 | 85 | 30.263158 | 0.139738 | 0 | 0 | 0.25 | 0 | 0 | 0.425087 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
d9668bb3c26cf1160b9b6d5d1a7d7fb29a34a9e7 | 218 | py | Python | javascript/expressions/__init__.py | chrisdickinson/python-javascript | 6cc73bd1c71c40b1898c5927d896986d99c39d3d | [
"MIT"
] | 10 | 2015-08-11T06:48:19.000Z | 2019-03-26T22:17:13.000Z | javascript/expressions/__init__.py | chrisdickinson/python-javascript | 6cc73bd1c71c40b1898c5927d896986d99c39d3d | [
"MIT"
] | 1 | 2018-05-18T09:30:56.000Z | 2018-05-18T09:30:56.000Z | javascript/expressions/__init__.py | chrisdickinson/python-javascript | 6cc73bd1c71c40b1898c5927d896986d99c39d3d | [
"MIT"
] | null | null | null | from javascript.expressions.base import *
from javascript.expressions.unary import *
from javascript.expressions.binary import *
from javascript.expressions.ternary import *
from javascript.expressions.value import *
| 31.142857 | 44 | 0.834862 | 25 | 218 | 7.28 | 0.36 | 0.384615 | 0.686813 | 0.681319 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09633 | 218 | 6 | 45 | 36.333333 | 0.923858 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
d9873f47c3509a26a8447cacfd39173d1b1b1d9e | 37 | py | Python | py3plex/algorithms/statistics/__init__.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | 1 | 2020-02-20T07:37:02.000Z | 2020-02-20T07:37:02.000Z | py3plex/algorithms/statistics/__init__.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | null | null | null | py3plex/algorithms/statistics/__init__.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | null | null | null | print ("Core statistics imported..")
| 18.5 | 36 | 0.72973 | 4 | 37 | 6.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 37 | 1 | 37 | 37 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0.702703 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 8 |
d98a7cf74947bdb21e894a3795e94a5fa2db8119 | 18,378 | py | Python | tests/unit/operators/airflow/test_table_ingestion_sensor.py | badal-io/gcp-airflow-foundations | 7b8cb2bb487c85a382b7381e4ff9824018e4cc1e | [
"Apache-2.0"
] | 3 | 2021-11-11T23:16:19.000Z | 2022-03-23T21:53:50.000Z | tests/unit/operators/airflow/test_table_ingestion_sensor.py | badal-io/gcp-airflow-foundations | 7b8cb2bb487c85a382b7381e4ff9824018e4cc1e | [
"Apache-2.0"
] | 9 | 2021-10-18T20:32:58.000Z | 2022-03-30T22:08:59.000Z | tests/unit/operators/airflow/test_table_ingestion_sensor.py | badal-io/gcp-airflow-foundations | 7b8cb2bb487c85a382b7381e4ff9824018e4cc1e | [
"Apache-2.0"
] | 1 | 2022-03-23T21:53:53.000Z | 2022-03-23T21:53:53.000Z | import logging
import unittest
import pytest
from airflow.exceptions import AirflowException
from airflow.models import DagBag, DagRun, DagTag, TaskInstance, DagModel
from airflow.models.dag import DAG
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from airflow.utils.session import create_session
from gcp_airflow_foundations.operators.airflow.external_task import TableIngestionSensor
from tests.unit.conftest import execute_task
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = "unit_test_dag"
DEV_NULL = "/dev/null"
SOURCE = "TestSource1"
REGEX = r".*Table1$"
EXTERNAL_SOURCE_TABLES = {SOURCE: [REGEX]}
def clear_db_dags():
with create_session() as session:
session.query(DagTag).delete()
session.query(DagModel).delete()
session.query(DagRun).delete()
session.query(TaskInstance).delete()
class TestTableIngestionSensor(unittest.TestCase):
def setUp(self):
self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True)
self.args = {"owner": "airflow", "start_date": DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args, schedule_interval="@once")
def test_table_ingestion_sensor(self):
clear_db_dags()
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
def test_table_ingestion_sensor_multiple_tables(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES["TestSource2"] = [r".*Table2$"]
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
other_ingestion_dag = DAG(
"TestSource2.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag, other_ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
other_ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1']."
in cm.output
)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource2: ['TestSource2.TestTable2']."
in cm.output
)
def test_table_ingestion_sensor_multiple_tables_overlapping(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES["TestSource2"] = [r"^Test.*"]
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
other_ingestion_dag = DAG(
"TestSource2.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag, other_ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
other_ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1']."
in cm.output
)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource2: ['TestSource2.TestTable2']."
in cm.output
)
def test_table_ingestion_sensor_multiple_dags(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES = {SOURCE: [r".*"]}
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
other_ingestion_dag = DAG(
"TestSource1.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag, other_ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
other_ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:2 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1', 'TestSource1.TestTable2']."
or "INFO:airflow.task.operators:2 dependent DAGs found for source TestSource1: ['TestSource1.TestTable2', 'TestSource1.TestTable1']."
in cm.output
)
def test_table_ingestion_sensor_multiple_regex(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES = {SOURCE: [r"TestTable1", r"TestTable2"]}
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
other_ingestion_dag = DAG(
"TestSource1.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag, other_ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
other_ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:2 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1', 'TestSource1.TestTable2']."
or "INFO:airflow.task.operators:2 dependent DAGs found for source TestSource1: ['TestSource1.TestTable2', 'TestSource1.TestTable1']."
in cm.output
)
def test_table_ingestion_sensor_exclusion_regex(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES = {SOURCE: [r"(.*)([^Table2]$)"]}
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
other_ingestion_dag = DAG(
"TestSource1.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag, other_ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
other_ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1']."
in cm.output
)
def test_table_ingestion_sensor_multiple_regex_overlapping(self):
clear_db_dags()
EXTERNAL_SOURCE_TABLES = {SOURCE: [r"TestTable1", r".*"]}
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
"INFO:airflow.task.operators:1 dependent DAGs found for source TestSource1: ['TestSource1.TestTable1']."
in cm.output
)
def test_catch_invalid_regex_error(self):
clear_db_dags()
regex = r"*"
EXTERNAL_SOURCE_TABLES = {"TestSource1": [regex]}
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert str(ctx.value) == f"The regex expression '{regex}' is invalid."
def test_catch_no_dags_error(self):
clear_db_dags()
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert str(ctx.value) == "No active dags found."
def test_catch_no_dags_for_source_error(self):
clear_db_dags()
ingestion_dag = DAG(
"TestSource2.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert str(ctx.value) == f"No active dags found for source {SOURCE}."
def test_catch_no_matching_dags_error(self):
clear_db_dags()
ingestion_dag = DAG(
"TestSource1.TestTable2",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
str(ctx.value)
== f'No active dags found for source {SOURCE} using regex: "{REGEX}".'
)
def test_catch_incompatible_schedules_error(self):
clear_db_dags()
schedule_interval = "@daily"
ingestion_dag = DAG(
"TestSource1.TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval=schedule_interval,
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
str(ctx.value)
== "Incompatible schedule intervals with that of the main DAG: @once."
)
def test_catch_delimiter_error(self):
clear_db_dags()
ingestion_dag = DAG(
"TestSource1_TestTable1",
default_args=self.args,
end_date=DEFAULT_DATE,
schedule_interval="@once",
)
DAG.bulk_write_to_db([ingestion_dag])
ingestion_dag.create_dagrun(
run_id="test",
start_date=DEFAULT_DATE,
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
)
op = TableIngestionSensor(
task_id="test_external_dag_sensor_check",
external_source_tables=EXTERNAL_SOURCE_TABLES,
allowed_states=["success"],
failed_states=["failed"],
dag=self.dag,
)
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
execute_task(task=op, execution_date=DEFAULT_DATE)
assert (
str(ctx.value)
== "Unable to determine table ingestion DAGs. Make sure the period delimiter is used correctly."
)
| 33.353902 | 149 | 0.609315 | 1,970 | 18,378 | 5.348731 | 0.069036 | 0.096043 | 0.129544 | 0.05884 | 0.879377 | 0.869982 | 0.867325 | 0.862864 | 0.858878 | 0.843789 | 0 | 0.007625 | 0.300686 | 18,378 | 550 | 150 | 33.414545 | 0.812247 | 0 | 0 | 0.746171 | 0 | 0.021882 | 0.147132 | 0.077484 | 0 | 0 | 0 | 0 | 0.043764 | 1 | 0.032823 | false | 0 | 0.02407 | 0 | 0.059081 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
79717c71036b5b730cce8548bc27f6fef7222c21 | 111 | py | Python | Matting/utils/__init__.py | Amanda-Barbara/PaddleSeg | a7de36a5fae96011f5b188987670274101b8ede1 | [
"Apache-2.0"
] | 2 | 2021-11-26T09:02:58.000Z | 2021-12-10T08:35:37.000Z | Matting/utils/__init__.py | Amanda-Barbara/PaddleSeg | a7de36a5fae96011f5b188987670274101b8ede1 | [
"Apache-2.0"
] | null | null | null | Matting/utils/__init__.py | Amanda-Barbara/PaddleSeg | a7de36a5fae96011f5b188987670274101b8ede1 | [
"Apache-2.0"
] | null | null | null | from .estimate_foreground_ml import estimate_foreground_ml
from .utils import get_files, get_image_list, mkdir
| 37 | 58 | 0.873874 | 17 | 111 | 5.294118 | 0.647059 | 0.4 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09009 | 111 | 2 | 59 | 55.5 | 0.891089 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
798ddcbcb1866ccb0c8a6cd67fa3124d908e69ac | 122,935 | py | Python | thrift/compiler/test/fixtures/patch/gen-py/patch/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | thrift/compiler/test/fixtures/patch/gen-py/patch/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | thrift/compiler/test/fixtures/patch/gen-py/patch/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | #
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
# @generated
#
from __future__ import absolute_import
import sys
from thrift.util.Recursive import fix_spec
from thrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef
from thrift.protocol.TProtocol import TProtocolException
from json import loads
import sys
if sys.version_info[0] >= 3:
long = int
import thrift.annotation.thrift.ttypes
import thrift.annotation.scope.ttypes
import pprint
import warnings
from thrift import Thrift
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.protocol import THeaderProtocol
fastproto = None
try:
from thrift.protocol import fastproto
except ImportError:
pass
all_structs = []
UTF8STRINGS = bool(0) or sys.version_info.major >= 3
__all__ = ['UTF8STRINGS', 'GeneratePatch', 'GenerateOptionalPatch', 'BoolPatch', 'BytePatch', 'I16Patch', 'I32Patch', 'I64Patch', 'FloatPatch', 'DoublePatch', 'StringPatch', 'BinaryPatch', 'OptionalBoolPatch', 'OptionalBytePatch', 'OptionalI16Patch', 'OptionalI32Patch', 'OptionalI64Patch', 'OptionalFloatPatch', 'OptionalDoublePatch', 'OptionalStringPatch', 'OptionalBinaryPatch']
class GeneratePatch:
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('GeneratePatch')
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
def __repr__(self):
L = []
padding = ' ' * 4
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class GenerateOptionalPatch:
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('GenerateOptionalPatch')
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
def __repr__(self):
L = []
padding = ' ' * 4
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class BoolPatch:
"""
Attributes:
- assign
- invert
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.assign = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.invert = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('BoolPatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.BOOL, 1)
oprot.writeBool(self.assign)
oprot.writeFieldEnd()
if self.invert != None:
oprot.writeFieldBegin('invert', TType.BOOL, 2)
oprot.writeBool(self.invert)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
if 'invert' in json_obj and json_obj['invert'] is not None:
self.invert = json_obj['invert']
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.invert is not None:
value = pprint.pformat(self.invert, indent=0)
value = padding.join(value.splitlines(True))
L.append(' invert=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class BytePatch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.assign = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.add = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('BytePatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.BYTE, 1)
oprot.writeByte(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.BYTE, 2)
oprot.writeByte(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
if self.assign > 0x7f or self.assign < -0x80:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'add' in json_obj and json_obj['add'] is not None:
self.add = json_obj['add']
if self.add > 0x7f or self.add < -0x80:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class I16Patch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.assign = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I16:
self.add = iprot.readI16()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('I16Patch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.I16, 1)
oprot.writeI16(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.I16, 2)
oprot.writeI16(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
if self.assign > 0x7fff or self.assign < -0x8000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'add' in json_obj and json_obj['add'] is not None:
self.add = json_obj['add']
if self.add > 0x7fff or self.add < -0x8000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class I32Patch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.assign = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.add = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('I32Patch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.I32, 1)
oprot.writeI32(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.I32, 2)
oprot.writeI32(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
if self.assign > 0x7fffffff or self.assign < -0x80000000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'add' in json_obj and json_obj['add'] is not None:
self.add = json_obj['add']
if self.add > 0x7fffffff or self.add < -0x80000000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class I64Patch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.assign = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.add = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('I64Patch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.I64, 1)
oprot.writeI64(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.I64, 2)
oprot.writeI64(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = long(json_obj['assign'])
if 'add' in json_obj and json_obj['add'] is not None:
self.add = long(json_obj['add'])
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class FloatPatch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.FLOAT:
self.assign = iprot.readFloat()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.FLOAT:
self.add = iprot.readFloat()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('FloatPatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.FLOAT, 1)
oprot.writeFloat(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.FLOAT, 2)
oprot.writeFloat(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = float(json_obj['assign'])
if 'add' in json_obj and json_obj['add'] is not None:
self.add = float(json_obj['add'])
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class DoublePatch:
"""
Attributes:
- assign
- add
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.assign = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.add = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('DoublePatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.DOUBLE, 1)
oprot.writeDouble(self.assign)
oprot.writeFieldEnd()
if self.add != None:
oprot.writeFieldBegin('add', TType.DOUBLE, 2)
oprot.writeDouble(self.add)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = float(json_obj['assign'])
if 'add' in json_obj and json_obj['add'] is not None:
self.add = float(json_obj['add'])
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.add is not None:
value = pprint.pformat(self.add, indent=0)
value = padding.join(value.splitlines(True))
L.append(' add=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class StringPatch:
"""
Attributes:
- assign
- clear
- append
- prepend
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.assign = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.append = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.prepend = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('StringPatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.STRING, 1)
oprot.writeString(self.assign.encode('utf-8')) if UTF8STRINGS and not isinstance(self.assign, bytes) else oprot.writeString(self.assign)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.append != None:
oprot.writeFieldBegin('append', TType.STRING, 4)
oprot.writeString(self.append.encode('utf-8')) if UTF8STRINGS and not isinstance(self.append, bytes) else oprot.writeString(self.append)
oprot.writeFieldEnd()
if self.prepend != None:
oprot.writeFieldBegin('prepend', TType.STRING, 5)
oprot.writeString(self.prepend.encode('utf-8')) if UTF8STRINGS and not isinstance(self.prepend, bytes) else oprot.writeString(self.prepend)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'append' in json_obj and json_obj['append'] is not None:
self.append = json_obj['append']
if 'prepend' in json_obj and json_obj['prepend'] is not None:
self.prepend = json_obj['prepend']
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.append is not None:
value = pprint.pformat(self.append, indent=0)
value = padding.join(value.splitlines(True))
L.append(' append=%s' % (value))
if self.prepend is not None:
value = pprint.pformat(self.prepend, indent=0)
value = padding.join(value.splitlines(True))
L.append(' prepend=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class BinaryPatch:
"""
Attributes:
- assign
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.assign = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('BinaryPatch')
if self.assign != None:
oprot.writeFieldBegin('assign', TType.STRING, 1)
oprot.writeString(self.assign)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'assign' in json_obj and json_obj['assign'] is not None:
self.assign = json_obj['assign']
def __repr__(self):
L = []
padding = ' ' * 4
if self.assign is not None:
value = pprint.pformat(self.assign, indent=0)
value = padding.join(value.splitlines(True))
L.append(' assign=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalBoolPatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = BoolPatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.BOOL:
self.ensure = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = BoolPatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalBoolPatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.BOOL, 1)
oprot.writeBool(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = BoolPatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = BoolPatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalBytePatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = BytePatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.BYTE:
self.ensure = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = BytePatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalBytePatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.BYTE, 1)
oprot.writeByte(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = BytePatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if self.ensure > 0x7f or self.ensure < -0x80:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = BytePatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalI16Patch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = I16Patch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.I16:
self.ensure = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = I16Patch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalI16Patch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.I16, 1)
oprot.writeI16(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = I16Patch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if self.ensure > 0x7fff or self.ensure < -0x8000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = I16Patch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalI32Patch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = I32Patch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.I32:
self.ensure = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = I32Patch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalI32Patch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.I32, 1)
oprot.writeI32(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = I32Patch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if self.ensure > 0x7fffffff or self.ensure < -0x80000000:
raise TProtocolException(TProtocolException.INVALID_DATA, 'number exceeds limit in field')
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = I32Patch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalI64Patch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = I64Patch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.I64:
self.ensure = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = I64Patch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalI64Patch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.I64, 1)
oprot.writeI64(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = I64Patch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = long(json_obj['ensure'])
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = I64Patch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalFloatPatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = FloatPatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.FLOAT:
self.ensure = iprot.readFloat()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = FloatPatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalFloatPatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.FLOAT, 1)
oprot.writeFloat(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = FloatPatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = float(json_obj['ensure'])
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = FloatPatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalDoublePatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = DoublePatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.DOUBLE:
self.ensure = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = DoublePatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalDoublePatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.DOUBLE, 1)
oprot.writeDouble(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = DoublePatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = float(json_obj['ensure'])
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = DoublePatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalStringPatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = StringPatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRING:
self.ensure = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = StringPatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalStringPatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.STRING, 1)
oprot.writeString(self.ensure.encode('utf-8')) if UTF8STRINGS and not isinstance(self.ensure, bytes) else oprot.writeString(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = StringPatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = StringPatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
class OptionalBinaryPatch:
"""
Attributes:
- clear: Clears any set value. Applies first.
- patch: Patches any set value. Applies second.
- ensure: Initializes any unset value. Applies third.
- patchAfter: Patches any set value, including newly set values. Applies fourth.
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.BOOL:
self.clear = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.patch = BinaryPatch()
self.patch.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRING:
self.ensure = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.patchAfter = BinaryPatch()
self.patchAfter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('OptionalBinaryPatch')
if self.ensure != None:
oprot.writeFieldBegin('ensure', TType.STRING, 1)
oprot.writeString(self.ensure)
oprot.writeFieldEnd()
if self.clear != None:
oprot.writeFieldBegin('clear', TType.BOOL, 2)
oprot.writeBool(self.clear)
oprot.writeFieldEnd()
if self.patch != None:
oprot.writeFieldBegin('patch', TType.STRUCT, 3)
self.patch.write(oprot)
oprot.writeFieldEnd()
if self.patchAfter != None:
oprot.writeFieldBegin('patchAfter', TType.STRUCT, 4)
self.patchAfter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def readFromJson(self, json, is_text=True, **kwargs):
relax_enum_validation = bool(kwargs.pop('relax_enum_validation', False))
set_cls = kwargs.pop('custom_set_cls', set)
dict_cls = kwargs.pop('custom_dict_cls', dict)
if kwargs:
extra_kwargs = ', '.join(kwargs.keys())
raise ValueError(
'Unexpected keyword arguments: ' + extra_kwargs
)
json_obj = json
if is_text:
json_obj = loads(json)
if 'clear' in json_obj and json_obj['clear'] is not None:
self.clear = json_obj['clear']
if 'patch' in json_obj and json_obj['patch'] is not None:
self.patch = BinaryPatch()
self.patch.readFromJson(json_obj['patch'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
if 'ensure' in json_obj and json_obj['ensure'] is not None:
self.ensure = json_obj['ensure']
if 'patchAfter' in json_obj and json_obj['patchAfter'] is not None:
self.patchAfter = BinaryPatch()
self.patchAfter.readFromJson(json_obj['patchAfter'], is_text=False, relax_enum_validation=relax_enum_validation, custom_set_cls=set_cls, custom_dict_cls=dict_cls)
def __repr__(self):
L = []
padding = ' ' * 4
if self.clear is not None:
value = pprint.pformat(self.clear, indent=0)
value = padding.join(value.splitlines(True))
L.append(' clear=%s' % (value))
if self.patch is not None:
value = pprint.pformat(self.patch, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patch=%s' % (value))
if self.ensure is not None:
value = pprint.pformat(self.ensure, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ensure=%s' % (value))
if self.patchAfter is not None:
value = pprint.pformat(self.patchAfter, indent=0)
value = padding.join(value.splitlines(True))
L.append(' patchAfter=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
all_structs.append(GeneratePatch)
GeneratePatch.thrift_spec = (
)
GeneratePatch.thrift_struct_annotations = {
"thrift.uri": "facebook.com/thrift/op/GeneratePatch",
}
GeneratePatch.thrift_field_annotations = {
}
all_structs.append(GenerateOptionalPatch)
GenerateOptionalPatch.thrift_spec = (
)
GenerateOptionalPatch.thrift_struct_annotations = {
"thrift.uri": "facebook.com/thrift/op/GenerateOptionalPatch",
}
GenerateOptionalPatch.thrift_field_annotations = {
}
all_structs.append(BoolPatch)
BoolPatch.thrift_spec = (
None, # 0
(1, TType.BOOL, 'assign', None, None, 1, ), # 1
(2, TType.BOOL, 'invert', None, None, 2, ), # 2
)
BoolPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::BoolPatchAdapter",
"cpp.name": "BoolPatchStruct",
}
BoolPatch.thrift_field_annotations = {
}
def BoolPatch__init__(self, assign=None, invert=None,):
self.assign = assign
self.invert = invert
BoolPatch.__init__ = BoolPatch__init__
def BoolPatch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('invert', None)
self.__dict__ = state
BoolPatch.__getstate__ = lambda self: self.__dict__.copy()
BoolPatch.__setstate__ = BoolPatch__setstate__
all_structs.append(BytePatch)
BytePatch.thrift_spec = (
None, # 0
(1, TType.BYTE, 'assign', None, None, 1, ), # 1
(2, TType.BYTE, 'add', None, None, 2, ), # 2
)
BytePatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "BytePatchStruct",
}
BytePatch.thrift_field_annotations = {
}
def BytePatch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
BytePatch.__init__ = BytePatch__init__
def BytePatch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
BytePatch.__getstate__ = lambda self: self.__dict__.copy()
BytePatch.__setstate__ = BytePatch__setstate__
all_structs.append(I16Patch)
I16Patch.thrift_spec = (
None, # 0
(1, TType.I16, 'assign', None, None, 1, ), # 1
(2, TType.I16, 'add', None, None, 2, ), # 2
)
I16Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "I16PatchStruct",
}
I16Patch.thrift_field_annotations = {
}
def I16Patch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
I16Patch.__init__ = I16Patch__init__
def I16Patch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
I16Patch.__getstate__ = lambda self: self.__dict__.copy()
I16Patch.__setstate__ = I16Patch__setstate__
all_structs.append(I32Patch)
I32Patch.thrift_spec = (
None, # 0
(1, TType.I32, 'assign', None, None, 1, ), # 1
(2, TType.I32, 'add', None, None, 2, ), # 2
)
I32Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "I32PatchStruct",
}
I32Patch.thrift_field_annotations = {
}
def I32Patch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
I32Patch.__init__ = I32Patch__init__
def I32Patch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
I32Patch.__getstate__ = lambda self: self.__dict__.copy()
I32Patch.__setstate__ = I32Patch__setstate__
all_structs.append(I64Patch)
I64Patch.thrift_spec = (
None, # 0
(1, TType.I64, 'assign', None, None, 1, ), # 1
(2, TType.I64, 'add', None, None, 2, ), # 2
)
I64Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "I64PatchStruct",
}
I64Patch.thrift_field_annotations = {
}
def I64Patch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
I64Patch.__init__ = I64Patch__init__
def I64Patch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
I64Patch.__getstate__ = lambda self: self.__dict__.copy()
I64Patch.__setstate__ = I64Patch__setstate__
all_structs.append(FloatPatch)
FloatPatch.thrift_spec = (
None, # 0
(1, TType.FLOAT, 'assign', None, None, 1, ), # 1
(2, TType.FLOAT, 'add', None, None, 2, ), # 2
)
FloatPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "FloatPatchStruct",
}
FloatPatch.thrift_field_annotations = {
}
def FloatPatch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
FloatPatch.__init__ = FloatPatch__init__
def FloatPatch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
FloatPatch.__getstate__ = lambda self: self.__dict__.copy()
FloatPatch.__setstate__ = FloatPatch__setstate__
all_structs.append(DoublePatch)
DoublePatch.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'assign', None, None, 1, ), # 1
(2, TType.DOUBLE, 'add', None, None, 2, ), # 2
)
DoublePatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::NumberPatchAdapter",
"cpp.name": "DoublePatchStruct",
}
DoublePatch.thrift_field_annotations = {
}
def DoublePatch__init__(self, assign=None, add=None,):
self.assign = assign
self.add = add
DoublePatch.__init__ = DoublePatch__init__
def DoublePatch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('add', None)
self.__dict__ = state
DoublePatch.__getstate__ = lambda self: self.__dict__.copy()
DoublePatch.__setstate__ = DoublePatch__setstate__
all_structs.append(StringPatch)
StringPatch.thrift_spec = (
None, # 0
(1, TType.STRING, 'assign', True, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
None, # 3
(4, TType.STRING, 'append', True, None, 2, ), # 4
(5, TType.STRING, 'prepend', True, None, 2, ), # 5
)
StringPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::StringPatchAdapter",
"cpp.name": "StringPatchStruct",
}
StringPatch.thrift_field_annotations = {
}
def StringPatch__init__(self, assign=None, clear=None, append=None, prepend=None,):
self.assign = assign
self.clear = clear
self.append = append
self.prepend = prepend
StringPatch.__init__ = StringPatch__init__
def StringPatch__setstate__(self, state):
state.setdefault('assign', None)
state.setdefault('clear', None)
state.setdefault('append', None)
state.setdefault('prepend', None)
self.__dict__ = state
StringPatch.__getstate__ = lambda self: self.__dict__.copy()
StringPatch.__setstate__ = StringPatch__setstate__
all_structs.append(BinaryPatch)
BinaryPatch.thrift_spec = (
None, # 0
(1, TType.STRING, 'assign', False, None, 1, ), # 1
)
BinaryPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::AssignPatchAdapter",
"cpp.name": "BinaryPatchStruct",
}
BinaryPatch.thrift_field_annotations = {
}
def BinaryPatch__init__(self, assign=None,):
self.assign = assign
BinaryPatch.__init__ = BinaryPatch__init__
def BinaryPatch__setstate__(self, state):
state.setdefault('assign', None)
self.__dict__ = state
BinaryPatch.__getstate__ = lambda self: self.__dict__.copy()
BinaryPatch.__setstate__ = BinaryPatch__setstate__
all_structs.append(OptionalBoolPatch)
OptionalBoolPatch.thrift_spec = (
None, # 0
(1, TType.BOOL, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [BoolPatch, BoolPatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [BoolPatch, BoolPatch.thrift_spec, False], None, 2, ), # 4
)
OptionalBoolPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalBoolPatchStruct",
}
OptionalBoolPatch.thrift_field_annotations = {
}
def OptionalBoolPatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalBoolPatch.__init__ = OptionalBoolPatch__init__
def OptionalBoolPatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalBoolPatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalBoolPatch.__setstate__ = OptionalBoolPatch__setstate__
all_structs.append(OptionalBytePatch)
OptionalBytePatch.thrift_spec = (
None, # 0
(1, TType.BYTE, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [BytePatch, BytePatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [BytePatch, BytePatch.thrift_spec, False], None, 2, ), # 4
)
OptionalBytePatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalBytePatchStruct",
}
OptionalBytePatch.thrift_field_annotations = {
}
def OptionalBytePatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalBytePatch.__init__ = OptionalBytePatch__init__
def OptionalBytePatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalBytePatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalBytePatch.__setstate__ = OptionalBytePatch__setstate__
all_structs.append(OptionalI16Patch)
OptionalI16Patch.thrift_spec = (
None, # 0
(1, TType.I16, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [I16Patch, I16Patch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [I16Patch, I16Patch.thrift_spec, False], None, 2, ), # 4
)
OptionalI16Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalI16PatchStruct",
}
OptionalI16Patch.thrift_field_annotations = {
}
def OptionalI16Patch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalI16Patch.__init__ = OptionalI16Patch__init__
def OptionalI16Patch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalI16Patch.__getstate__ = lambda self: self.__dict__.copy()
OptionalI16Patch.__setstate__ = OptionalI16Patch__setstate__
all_structs.append(OptionalI32Patch)
OptionalI32Patch.thrift_spec = (
None, # 0
(1, TType.I32, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [I32Patch, I32Patch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [I32Patch, I32Patch.thrift_spec, False], None, 2, ), # 4
)
OptionalI32Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalI32PatchStruct",
}
OptionalI32Patch.thrift_field_annotations = {
}
def OptionalI32Patch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalI32Patch.__init__ = OptionalI32Patch__init__
def OptionalI32Patch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalI32Patch.__getstate__ = lambda self: self.__dict__.copy()
OptionalI32Patch.__setstate__ = OptionalI32Patch__setstate__
all_structs.append(OptionalI64Patch)
OptionalI64Patch.thrift_spec = (
None, # 0
(1, TType.I64, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [I64Patch, I64Patch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [I64Patch, I64Patch.thrift_spec, False], None, 2, ), # 4
)
OptionalI64Patch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalI64PatchStruct",
}
OptionalI64Patch.thrift_field_annotations = {
}
def OptionalI64Patch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalI64Patch.__init__ = OptionalI64Patch__init__
def OptionalI64Patch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalI64Patch.__getstate__ = lambda self: self.__dict__.copy()
OptionalI64Patch.__setstate__ = OptionalI64Patch__setstate__
all_structs.append(OptionalFloatPatch)
OptionalFloatPatch.thrift_spec = (
None, # 0
(1, TType.FLOAT, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [FloatPatch, FloatPatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [FloatPatch, FloatPatch.thrift_spec, False], None, 2, ), # 4
)
OptionalFloatPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalFloatPatchStruct",
}
OptionalFloatPatch.thrift_field_annotations = {
}
def OptionalFloatPatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalFloatPatch.__init__ = OptionalFloatPatch__init__
def OptionalFloatPatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalFloatPatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalFloatPatch.__setstate__ = OptionalFloatPatch__setstate__
all_structs.append(OptionalDoublePatch)
OptionalDoublePatch.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'ensure', None, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [DoublePatch, DoublePatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [DoublePatch, DoublePatch.thrift_spec, False], None, 2, ), # 4
)
OptionalDoublePatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalDoublePatchStruct",
}
OptionalDoublePatch.thrift_field_annotations = {
}
def OptionalDoublePatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalDoublePatch.__init__ = OptionalDoublePatch__init__
def OptionalDoublePatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalDoublePatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalDoublePatch.__setstate__ = OptionalDoublePatch__setstate__
all_structs.append(OptionalStringPatch)
OptionalStringPatch.thrift_spec = (
None, # 0
(1, TType.STRING, 'ensure', True, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [StringPatch, StringPatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [StringPatch, StringPatch.thrift_spec, False], None, 2, ), # 4
)
OptionalStringPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalStringPatchStruct",
}
OptionalStringPatch.thrift_field_annotations = {
}
def OptionalStringPatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalStringPatch.__init__ = OptionalStringPatch__init__
def OptionalStringPatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalStringPatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalStringPatch.__setstate__ = OptionalStringPatch__setstate__
all_structs.append(OptionalBinaryPatch)
OptionalBinaryPatch.thrift_spec = (
None, # 0
(1, TType.STRING, 'ensure', False, None, 1, ), # 1
(2, TType.BOOL, 'clear', None, None, 2, ), # 2
(3, TType.STRUCT, 'patch', [BinaryPatch, BinaryPatch.thrift_spec, False], None, 2, ), # 3
(4, TType.STRUCT, 'patchAfter', [BinaryPatch, BinaryPatch.thrift_spec, False], None, 2, ), # 4
)
OptionalBinaryPatch.thrift_struct_annotations = {
"cpp.adapter": "::apache::thrift::op::detail::OptionalPatchAdapter",
"cpp.name": "OptionalBinaryPatchStruct",
}
OptionalBinaryPatch.thrift_field_annotations = {
}
def OptionalBinaryPatch__init__(self, clear=None, patch=None, ensure=None, patchAfter=None,):
self.clear = clear
self.patch = patch
self.ensure = ensure
self.patchAfter = patchAfter
OptionalBinaryPatch.__init__ = OptionalBinaryPatch__init__
def OptionalBinaryPatch__setstate__(self, state):
state.setdefault('clear', None)
state.setdefault('patch', None)
state.setdefault('ensure', None)
state.setdefault('patchAfter', None)
self.__dict__ = state
OptionalBinaryPatch.__getstate__ = lambda self: self.__dict__.copy()
OptionalBinaryPatch.__setstate__ = OptionalBinaryPatch__setstate__
fix_spec(all_structs)
del all_structs
| 41.392256 | 381 | 0.702981 | 14,696 | 122,935 | 5.652899 | 0.019189 | 0.01625 | 0.029251 | 0.026964 | 0.909889 | 0.902401 | 0.888546 | 0.873199 | 0.861655 | 0.855071 | 0 | 0.011914 | 0.182047 | 122,935 | 2,969 | 382 | 41.406197 | 0.814251 | 0.031187 | 0 | 0.801988 | 1 | 0 | 0.059911 | 0.013758 | 0 | 0 | 0.001011 | 0 | 0 | 1 | 0.06998 | false | 0.000398 | 0.007157 | 0.015905 | 0.195626 | 0.022266 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8dbc1ee3bf8a4f921157bcb4c760fab49280b05b | 126 | py | Python | Solutions/Training/Lesson_10/__init__.py | dev-11/codility-solutions | 01b0ce4a43b1390fe15f2daabea95e90b834fbfc | [
"MIT"
] | null | null | null | Solutions/Training/Lesson_10/__init__.py | dev-11/codility-solutions | 01b0ce4a43b1390fe15f2daabea95e90b834fbfc | [
"MIT"
] | null | null | null | Solutions/Training/Lesson_10/__init__.py | dev-11/codility-solutions | 01b0ce4a43b1390fe15f2daabea95e90b834fbfc | [
"MIT"
] | null | null | null | from .count_factors import solution as count_factors
from .min_perimeter_rectangle import solution as min_perimeter_rectangle
| 42 | 72 | 0.888889 | 18 | 126 | 5.888889 | 0.5 | 0.226415 | 0.301887 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095238 | 126 | 2 | 73 | 63 | 0.929825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
309bf83e59932a6b3d668dd66f9d6f26b81d0e9f | 193 | py | Python | oneRing/util/oneTimeCode.py | TylerRudie/narvi | a4d08b00b38cc1c1dad424c138577dd583b7aa19 | [
"MIT"
] | 1 | 2018-02-21T15:59:48.000Z | 2018-02-21T15:59:48.000Z | oneRing/util/oneTimeCode.py | TylerRudie/narvi | a4d08b00b38cc1c1dad424c138577dd583b7aa19 | [
"MIT"
] | null | null | null | oneRing/util/oneTimeCode.py | TylerRudie/narvi | a4d08b00b38cc1c1dad424c138577dd583b7aa19 | [
"MIT"
] | null | null | null | from django.utils.crypto import get_random_string
def genOneTimeCode():
return get_random_string(length=8,
allowed_chars='ABCDEFGHIJKLMNPQRSTUVXYZ01234567890') | 38.6 | 81 | 0.720207 | 19 | 193 | 7.052632 | 0.842105 | 0.134328 | 0.223881 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07947 | 0.217617 | 193 | 5 | 81 | 38.6 | 0.807947 | 0 | 0 | 0 | 0 | 0 | 0.180412 | 0.180412 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.25 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
30eefba387fdf11871db423b19f88ca8ae3a03a1 | 131 | py | Python | vkbottle/tools/__init__.py | ScriptHound/vkbottle | 7d88f56e5e9ec42ee6fe8daf03094d79c9b244ea | [
"MIT"
] | 1 | 2021-02-18T16:52:00.000Z | 2021-02-18T16:52:00.000Z | vkbottle/tools/__init__.py | ScriptHound/vkbottle | 7d88f56e5e9ec42ee6fe8daf03094d79c9b244ea | [
"MIT"
] | null | null | null | vkbottle/tools/__init__.py | ScriptHound/vkbottle | 7d88f56e5e9ec42ee6fe8daf03094d79c9b244ea | [
"MIT"
] | null | null | null | from .dev_tools import *
from .production_tools import *
from .validator import ABCValidator, EqualsValidator, IsInstanceValidator
| 32.75 | 73 | 0.839695 | 14 | 131 | 7.714286 | 0.642857 | 0.203704 | 0.277778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.10687 | 131 | 3 | 74 | 43.666667 | 0.923077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a51b3994a261580c37d63ae89a3424642878bd18 | 198 | py | Python | provider/__init__.py | tombh/deis | f98fd5e43acfa13c0780c25cfd40dd34d4d1bcc0 | [
"Apache-2.0"
] | 1 | 2016-05-28T08:44:13.000Z | 2016-05-28T08:44:13.000Z | provider/__init__.py | tombh/deis | f98fd5e43acfa13c0780c25cfd40dd34d4d1bcc0 | [
"Apache-2.0"
] | null | null | null | provider/__init__.py | tombh/deis | f98fd5e43acfa13c0780c25cfd40dd34d4d1bcc0 | [
"Apache-2.0"
] | null | null | null | import importlib
def import_provider_module(provider_type):
"""
Return the module for a provider.
"""
tasks = importlib.import_module('provider.' + provider_type)
return tasks
| 19.8 | 64 | 0.70202 | 23 | 198 | 5.826087 | 0.478261 | 0.208955 | 0.268657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.207071 | 198 | 9 | 65 | 22 | 0.853503 | 0.166667 | 0 | 0 | 0 | 0 | 0.060403 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.75 | 0 | 1.25 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
eb7e98e4440ae15232a49e90ece636cb932a005e | 2,121 | py | Python | tests/test_time_parameters.py | MeisterBob/zeiterfassung | f26a5d7bfcead69a6b3625ae91756d0134556993 | [
"MIT"
] | 1 | 2020-02-11T09:12:56.000Z | 2020-02-11T09:12:56.000Z | tests/test_time_parameters.py | MeisterBob/zeiterfassung | f26a5d7bfcead69a6b3625ae91756d0134556993 | [
"MIT"
] | null | null | null | tests/test_time_parameters.py | MeisterBob/zeiterfassung | f26a5d7bfcead69a6b3625ae91756d0134556993 | [
"MIT"
] | 1 | 2020-08-12T20:48:45.000Z | 2020-08-12T20:48:45.000Z | import sys
from pytest import raises
from zeiterfassung import main
def test_pause():
sys.argv[1:] = [
"--db_path", "/tmp/",
"--date", "2018-07-18",
"--user", "test_pause",
"--work_time", "8:00",
"--start", "8:00",
"--end", "18:00",
"--export", "",
"--pause", "0"]
db = main(db={})
day = db[2018][7][29][18]
assert day["start"] == "8:00"
assert day["end"] == "18:00"
assert day["pause"] == 0
assert day["Arbeitszeit"] == "10:00"
assert day["Tagessaldo"] == "2:00"
sys.argv[-1] = "30"
db = main(db={})
day = db[2018][7][29][18]
assert day["start"] == "8:00"
assert day["end"] == "18:00"
assert day["pause"] == 30
assert day["Arbeitszeit"] == "9:30"
assert day["Tagessaldo"] == "1:30"
def test_work_time():
sys.argv[1:] = [
"--db_path", "/tmp/",
"--date", "2018-07-18",
"--user", "test_work_time",
"--start", "8:00",
"--end", "18:00",
"--export", "",
"--pause", "0",
"--work_time", "8:00"
]
# pause is at 0 min and working time is at 8 h
db = main(db={})
day = db[2018][7][29][18]
assert day["pause"] == 0
assert day["Arbeitszeit"] == "10:00"
assert day["Tagessaldo"] == "2:00"
# set pause to 30 min and working time to 8 h
sys.argv[-4:] = ["--pause", "30", "--work_time", "8:00"]
db = main(db={})
day = db[2018][7][29][18]
assert day["pause"] == 30
assert day["Arbeitszeit"] == "9:30"
assert day["Tagessaldo"] == "1:30"
# set pause to 0 min working time to 7:30 h
sys.argv[-4:] = ["--pause", "0", "--work_time", "7:30"]
db = main(db={})
day = db[2018][7][29][18]
assert day["pause"] == 0
assert day["Arbeitszeit"] == "10:00"
assert day["Tagessaldo"] == "2:30"
# set pause to 30 min and working time to 7:30
sys.argv[-4:] = ["--pause", "30", "--work_time", "7:30"]
db = main(db={})
day = db[2018][7][29][18]
assert day["pause"] == 30
assert day["Arbeitszeit"] == "9:30"
assert day["Tagessaldo"] == "2:00"
| 27.545455 | 60 | 0.491749 | 304 | 2,121 | 3.388158 | 0.154605 | 0.192233 | 0.074757 | 0.064078 | 0.823301 | 0.787379 | 0.787379 | 0.750485 | 0.750485 | 0.637864 | 0 | 0.121951 | 0.265441 | 2,121 | 76 | 61 | 27.907895 | 0.539153 | 0.082508 | 0 | 0.725806 | 0 | 0 | 0.264812 | 0 | 0 | 0 | 0 | 0 | 0.354839 | 1 | 0.032258 | false | 0 | 0.048387 | 0 | 0.080645 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
eb83be9ad0f56972a094053de5c7a810d9742298 | 17,911 | py | Python | xmeos/test/old_test_RTperturb.py | aswolf/xmeos | 0bb8adb7afdbe9b479e0620093b03579ad19d318 | [
"MIT"
] | 1 | 2020-08-19T17:09:11.000Z | 2020-08-19T17:09:11.000Z | xmeos/test/old_test_RTperturb.py | aswolf/xmeos | 0bb8adb7afdbe9b479e0620093b03579ad19d318 | [
"MIT"
] | 1 | 2017-08-04T21:46:04.000Z | 2017-08-04T21:46:04.000Z | xmeos/test/old_test_RTperturb.py | aswolf/xmeos | 0bb8adb7afdbe9b479e0620093b03579ad19d318 | [
"MIT"
] | 1 | 2018-11-20T20:48:10.000Z | 2018-11-20T20:48:10.000Z | import numpy as np
from models import compress
from models import thermal
from models import composite
from models import core
import pytest
import matplotlib.pyplot as plt
import matplotlib as mpl
from abc import ABCMeta, abstractmethod
import copy
#====================================================================
# Define "slow" tests
# - indicated by @slow decorator
# - slow tests are run only if using --runslow cmd line arg
#====================================================================
slow = pytest.mark.skipif(
not pytest.config.getoption("--runslow"),
reason="need --runslow option to run"
)
#====================================================================
class BaseTestThermalPathMod(object):
@abstractmethod
def load_thermal_path_mod(self, eos_d):
assert False, 'must implement load_thermal_path_mod()'
@abstractmethod
def init_params(self,eos_d):
assert False, 'must implement init_params()'
return eos_d
def test_heat_capacity(self):
Nsamp = 10001
eos_d = self.init_params({})
param_d = eos_d['param_d']
Tmod_a = np.linspace(.7,1.3,Nsamp)*param_d['T0']
dT = Tmod_a[1] - Tmod_a[0]
# print eos_d['modtype_d']
thermal_path_mod = eos_d['modtype_d']['ThermalPathMod']
heat_capacity_a = thermal_path_mod.heat_capacity(Tmod_a,eos_d)
energy_a = thermal_path_mod.energy(Tmod_a,eos_d)
heat_capacity_num_a = np.gradient(energy_a,dT)
E_range = np.max(energy_a)-np.min(energy_a)
T_range = Tmod_a[-1]-Tmod_a[0]
Cv_scl = E_range/T_range
# Cv_range = np.max(heat_capacity_a)-np.min(heat_capacity_a)
Cv_diff_a = heat_capacity_num_a-heat_capacity_a
# Cverr = np.max(np.abs(Cv_diff_a/Cv_range))
Cverr = np.max(np.abs(Cv_diff_a/Cv_scl))
CVTOL = 1.0/Nsamp
# print self
# print PTOL*Prange
# def plot_press_mismatch(Tmod_a,press_a,press_num_a):
# plt.figure()
# plt.ion()
# plt.clf()
# plt.plot(Tmod_a,press_num_a,'bx',Tmod_a,press_a,'r-')
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
# plot_press_mismatch(Tmod_a,press_a,press_num_a)
assert np.abs(Cverr) < CVTOL, '(Cv error)/Cv_scl, ' + np.str(Cverr) + \
', must be less than CVTOL, ' + np.str(CVTOL)
#====================================================================
class BaseTestThermalMod(object):
@abstractmethod
def load_thermal_mod(self, eos_d):
assert False, 'must implement load_thermal_mod()'
@abstractmethod
def init_params(self,eos_d):
assert False, 'must implement init_params()'
return eos_d
def test_heat_capacity_isochore(self):
Nsamp = 10001
eos_d = self.init_params({})
param_d = eos_d['param_d']
Viso = 0.7*param_d['V0']
Tmod_a = np.linspace(.7,1.3,Nsamp)*param_d['T0']
dT = Tmod_a[1] - Tmod_a[0]
# print eos_d['modtype_d']
thermal_mod = eos_d['modtype_d']['ThermalMod']
heat_capacity_a = thermal_mod.heat_capacity(Viso,Tmod_a,eos_d)
energy_a = np.squeeze( thermal_mod.energy(Viso,Tmod_a,eos_d) )
heat_capacity_num_a = np.gradient(energy_a,dT)
E_range = np.max(energy_a)-np.min(energy_a)
T_range = Tmod_a[-1]-Tmod_a[0]
Cv_scl = E_range/T_range
# Cv_range = np.max(heat_capacity_a)-np.min(heat_capacity_a)
Cv_diff_a = heat_capacity_num_a-heat_capacity_a
# Cverr = np.max(np.abs(Cv_diff_a/Cv_range))
Cverr = np.max(np.abs(Cv_diff_a/Cv_scl))
CVTOL = 1.0/Nsamp
# print self
# print PTOL*Prange
# def plot_press_mismatch(Tmod_a,press_a,press_num_a):
# plt.figure()
# plt.ion()
# plt.clf()
# plt.plot(Tmod_a,press_num_a,'bx',Tmod_a,press_a,'r-')
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
# plot_press_mismatch(Tmod_a,press_a,press_num_a)
assert np.abs(Cverr) < CVTOL, '(Cv error)/Cv_scl, ' + np.str(Cverr) + \
', must be less than CVTOL, ' + np.str(CVTOL)
#====================================================================
#====================================================================
class TestRosenfeldTaranzonaPerturb(BaseTestThermalMod):
def load_thermal_mod(self, eos_d):
thermal_mod = models.RosenfeldTaranzonaPerturb()
core.set_modtypes( ['ThermalMod'], [thermal_mod], eos_d )
pass
def load_gamma_mod(self, eos_d):
gamma_mod = models.GammaPowLaw()
core.set_modtypes( ['GammaMod'], [gamma_mod], eos_d )
pass
def load_compress_path_mod(self, eos_d):
S0, = core.get_params(['S0'],eos_d)
compress_path_mod = models.Vinet(path_const='S',level_const=S0,
supress_energy=False,
supress_press=False)
core.set_modtypes( ['CompressPathMod'], [compress_path_mod], eos_d )
pass
def load_eos_mod(self, eos_d):
self.load_compress_path_mod(eos_d)
self.load_gamma_mod(eos_d)
self.load_thermal_mod(eos_d)
full_mod = models.ThermalPressMod()
core.set_modtypes( ['FullMod'], [full_mod], eos_d )
pass
def init_params(self,eos_d):
core.set_consts( [], [], eos_d )
# EOS Parameter values initially set by Mosenfelder2009
# Set model parameter values
mass_avg = (24.31+28.09+3*16.0)/5.0 # g/(mol atom)
T0 = 1673.0
S0 = 0.0 # must adjust
param_key_a = ['T0','S0','mass_avg']
param_val_a = np.array([T0,S0,mass_avg])
core.set_params( param_key_a, param_val_a, eos_d )
V0 = (38.575*1e-5)*mass_avg/eos_d['const_d']['Nmol']/1e3*1e30 # ang^3/atom
K0 = 20.8
KP0= 10.2
# KP20 = -2.86 # Not actually used!
E0 = 0.0
param_key_a = ['V0','K0','KP0','E0']
param_val_a = np.array([V0,K0,KP0,E0])
core.set_params( param_key_a, param_val_a, eos_d )
VR = V0
gammaR = 0.46
qR = -1.35
param_key_a = ['VR','gammaR','qR']
param_val_a = np.array([VR,gammaR,qR])
core.set_params( param_key_a, param_val_a, eos_d )
dE0th = +1.0
dV0th = -0.02
dK0th = +0.1
dKP0th = -0.00
# dE0th = +0.4
# dV0th = -0.0
# dK0th = -0.01
# dKP0th = -0.03
lognfac = 0.0
mexp = 3.0/5
param_key_a = ['dE0th','dV0th','dK0th','dKP0th','lognfac','mexp']
param_val_a = np.array([dE0th,dV0th,dK0th,dKP0th,lognfac,mexp])
core.set_params( param_key_a, param_val_a, eos_d )
# Must convert energy units from kJ/g to eV/atom
energy_conv_fac = mass_avg/eos_d['const_d']['kJ_molpereV']
core.set_consts( ['energy_conv_fac'], [energy_conv_fac],
eos_d )
self.load_eos_mod( eos_d )
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
return eos_d
def test_energy_curves_Spera2011(self):
Nsamp = 101
eos_d = self.init_params({})
param_d = eos_d['param_d']
Vgrid_a = np.linspace(0.4,1.1,Nsamp)*param_d['V0']
Tgrid_a = np.array([2500,3000,3500,4000,4500,5000])
full_mod = eos_d['modtype_d']['FullMod']
# energy_conv_fac, = core.get_consts(['energy_conv_fac'],eos_d)
energy_mod_a = []
press_mod_a = []
for iT in Tgrid_a:
ienergy_a = full_mod.energy(Vgrid_a,iT,eos_d)
ipress_a = full_mod.press(Vgrid_a,iT,eos_d)
energy_mod_a.append(ienergy_a)
press_mod_a.append(ipress_a)
# energy_mod_a = np.array( energy_mod_a )
energy_mod_a = np.array( energy_mod_a )
press_mod_a = np.array( press_mod_a )
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
cmap=plt.get_cmap('coolwarm')
col_a = cmap(1.0*(Tgrid_a-Tgrid_a[0])/np.ptp(Tgrid_a))[:,:3]
plt.ion()
plt.figure()
[plt.plot(ipress_a, ienergy_a,'-',color=icol_a,label=iT) \
for ipress_a,ienergy_a,icol_a,iT in zip(press_mod_a,energy_mod_a,col_a,Tgrid_a)]
ax = plt.axes()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1],labels[::-1],loc='upper left')
plt.xlim(-5,165)
ybnd = [np.min(energy_mod_a[press_mod_a<165]), np.max(energy_mod_a[press_mod_a<165])]
plt.ylim(ybnd[0],ybnd[1])
# plt.ylim(-100.5,-92)
print 'Compare this plot with Spera2011 Fig 2b (Oganov potential):'
print 'Do the figures agree (y/n or k for keyboard)?'
s = raw_input('--> ')
if s=='k':
from IPython import embed; embed(); import ipdb; ipdb.set_trace()
assert s=='y', 'Figure must match published figure'
pass
def test_kinetic_contribution(self):
Nsamp = 1001
eos_d = self.init_params({})
eos_d['param_d']['E0'] = -21.3
eos_d['param_d']['dE0th'] = 0.5
V0 = eos_d['param_d']['V0']
Vgrid_a = V0*np.arange(0.4,1.11,0.1)
Tgrid_a = np.linspace( 2500, 5000, Nsamp)
dT = Tgrid_a[1]-Tgrid_a[0]
kboltz = eos_d['const_d']['kboltz']
# Test entropy
TOL = 1e-4
iV = Vgrid_a[0]
genRT_mod = models.GenRosenfeldTaranzona()
thermal_mod = eos_d['modtype_d']['ThermalMod']
full_mod = eos_d['modtype_d']['FullMod']
Cvkin_a = genRT_mod.calc_heat_capacity_kin( Tgrid_a ,eos_d )
Ekin_a = genRT_mod.calc_energy_kin( Tgrid_a ,eos_d )
Cvkin_dE_err_a = ( Cvkin_a - np.gradient( Ekin_a, dT ) )/kboltz
assert np.all( np.abs(Cvkin_dE_err_a[1:-1]) < TOL ), \
'Cvkin must match numerical energy deriv'
Skin_a = genRT_mod.calc_entropy_kin( Tgrid_a ,eos_d, Tref=eos_d['param_d']['T0'] )
Cvkin_dS_err_a = ( Cvkin_a - Tgrid_a*np.gradient( Skin_a, dT ) )/kboltz
assert np.all( np.abs(Cvkin_dS_err_a[1:-1]) < TOL ), \
'Cvkin must match numerical entropy deriv'
Fkin_a = Ekin_a-Tgrid_a*Skin_a
Skin_dF_err_a = ( Skin_a + np.gradient( Fkin_a, dT ) )/kboltz
assert np.all( np.abs(Skin_dF_err_a[1:-1]) < TOL ), \
'Skin must match numerical free energy deriv'
def test_potential_contribution(self):
Nsamp = 1001
eos_d = self.init_params({})
eos_d['param_d']['E0'] = -21.3
eos_d['param_d']['dE0th'] = 0.5
V0 = eos_d['param_d']['V0']
Vgrid_a = V0*np.arange(0.4,1.11,0.1)
Tgrid_a = np.linspace( 2500, 5000, Nsamp)
dT = Tgrid_a[1]-Tgrid_a[0]
kboltz = eos_d['const_d']['kboltz']
# Test entropy
TOL = 1e-4
iV = Vgrid_a[0]
genRT_mod = models.GenRosenfeldTaranzona()
thermal_mod = eos_d['modtype_d']['ThermalMod']
full_mod = eos_d['modtype_d']['FullMod']
# verify potential heat capacity (energy deriv)
acoef_a, bcoef_a = thermal_mod.calc_RT_coef( iV, eos_d )
Cvpot_a = np.squeeze( genRT_mod.calc_heat_capacity_pot( Tgrid_a, eos_d,
bcoef_a=bcoef_a ) )
Epot_a = np.squeeze( genRT_mod.calc_energy_pot( Tgrid_a, eos_d,
acoef_a=acoef_a,
bcoef_a=bcoef_a ) )
Cvpot_dE_a = (Cvpot_a - np.gradient( Epot_a, dT ))/kboltz
assert np.all( np.abs(Cvpot_dE_a[1:-1]) < TOL ), \
'Cvpot must match numerical energy deriv'
Spot_a = np.squeeze( genRT_mod.calc_entropy_pot( Tgrid_a, eos_d,
bcoef_a=bcoef_a ) )
Cvpot_dS_a = ( Cvpot_a - Tgrid_a*np.gradient( Spot_a, dT ) )/kboltz
assert np.all( np.abs(Cvpot_dS_a[1:-1]) < TOL ), \
'Cvpot must match numerical entropy deriv'
Fpot_a = Epot_a-Tgrid_a*Spot_a
Spot_dF_err_a = ( Spot_a + np.gradient( Fpot_a, dT ) )/kboltz
assert np.all( np.abs(Spot_dF_err_a[1:-1]) < TOL ), \
'Spot must match numerical free energy deriv'
def test_total_entropy(self):
Nsamp = 1001
eos_d = self.init_params({})
eos_d['param_d']['E0'] = -21.3
eos_d['param_d']['dE0th'] = 0.5
V0 = eos_d['param_d']['V0']
Vgrid_a = V0*np.arange(0.4,1.11,0.1)
Tgrid_a = np.linspace( 2500, 5000, Nsamp)
dT = Tgrid_a[1]-Tgrid_a[0]
kboltz = eos_d['const_d']['kboltz']
# Test entropy
TOL = 1e-4
iV = Vgrid_a[0]
genRT_mod = models.GenRosenfeldTaranzona()
thermal_mod = eos_d['modtype_d']['ThermalMod']
full_mod = eos_d['modtype_d']['FullMod']
# verify total entropy
iFtot = np.squeeze( full_mod.free_energy( Vgrid_a[0], Tgrid_a, eos_d ) )
iStot = np.squeeze( full_mod.entropy( Vgrid_a[0], Tgrid_a, eos_d ) )
iSnum = -np.gradient( iFtot, dT )
Stot_dF_err_a = ( iStot - iSnum )/kboltz
assert np.all( np.abs(Stot_dF_err_a[1:-1]) < TOL ), \
'Spot must match numerical free energy deriv'
#====================================================================
class TestRosenfeldTaranzonaPerturbExpand(TestRosenfeldTaranzonaPerturb):
def load_compress_path_mod(self, eos_d):
S0, = core.get_params(['S0'],eos_d)
expand_adj_mod=models.Tait()
compress_path_mod = models.Vinet(path_const='S',level_const=S0,
supress_energy=False,
supress_press=False,
expand_adj_mod=expand_adj_mod)
core.set_modtypes( ['CompressPathMod'], [compress_path_mod], eos_d )
pass
def init_params(self,eos_d):
core.set_consts( [], [], eos_d )
# EOS Parameter values initially set by Mosenfelder2009
# Set model parameter values
mass_avg = (24.31+28.09+3*16.0)/5.0 # g/(mol atom)
T0 = 1673.0
S0 = 0.0 # must adjust
param_key_a = ['T0','S0','mass_avg']
param_val_a = np.array([T0,S0,mass_avg])
core.set_params( param_key_a, param_val_a, eos_d )
V0 = (38.575*1e-5)*mass_avg/eos_d['const_d']['Nmol']/1e3*1e30 # ang^3/atom
K0 = 20.8
KP0= 10.2
KP20 = -2.86 # Not actually used!
E0 = 0.0
param_key_a = ['V0','K0','KP0','KP20','E0']
param_val_a = np.array([V0,K0,KP0,KP20,E0])
core.set_params( param_key_a, param_val_a, eos_d )
VR = V0
gammaR = 0.46
qR = -1.35
param_key_a = ['VR','gammaR','qR']
param_val_a = np.array([VR,gammaR,qR])
core.set_params( param_key_a, param_val_a, eos_d )
dE0th = +1.0
dV0th = -0.02
dK0th = +0.1
dKP0th = -0.00
dKP20th = +1.0
# dE0th = +0.4
# dV0th = -0.0
# dK0th = -0.01
# dKP0th = -0.03
lognfac = 0.0
mexp = 3.0/5
param_key_a = ['dE0th','dV0th','dK0th','dKP0th','dKP20th','lognfac','mexp']
param_val_a = np.array([dE0th,dV0th,dK0th,dKP0th,dKP20th,lognfac,mexp])
core.set_params( param_key_a, param_val_a, eos_d )
# Must convert energy units from kJ/g to eV/atom
energy_conv_fac = mass_avg/eos_d['const_d']['kJ_molpereV']
core.set_consts( ['energy_conv_fac'], [energy_conv_fac],
eos_d )
self.load_eos_mod( eos_d )
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
return eos_d
def test_energy_curves_Spera2011_exp(self):
Nsamp = 101
eos_d = self.init_params({})
param_d = eos_d['param_d']
Vgrid_a = np.linspace(0.4,1.1,Nsamp)*param_d['V0']
Tgrid_a = np.array([2500,3000,3500,4000,4500,5000])
full_mod = eos_d['modtype_d']['FullMod']
compress_path_mod = eos_d['modtype_d']['CompressPathMod']
thermal_mod = eos_d['modtype_d']['ThermalMod']
# energy_conv_fac, = core.get_consts(['energy_conv_fac'],eos_d)
energy_mod_a = []
press_mod_a = []
for iT in Tgrid_a:
ienergy_a = full_mod.energy(Vgrid_a,iT,eos_d)
ipress_a = full_mod.press(Vgrid_a,iT,eos_d)
energy_mod_a.append(ienergy_a)
press_mod_a.append(ipress_a)
# energy_mod_a = np.array( energy_mod_a )
energy_mod_a = np.array( energy_mod_a )
press_mod_a = np.array( press_mod_a )
# from IPython import embed; embed(); import ipdb; ipdb.set_trace()
cmap=plt.get_cmap('coolwarm')
col_a = cmap(1.0*(Tgrid_a-Tgrid_a[0])/np.ptp(Tgrid_a))[:,:3]
plt.ion()
plt.figure()
[plt.plot(ipress_a, ienergy_a,'-',color=icol_a,label=iT) \
for ipress_a,ienergy_a,icol_a,iT in zip(press_mod_a,energy_mod_a,col_a,Tgrid_a)]
ax = plt.axes()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1],labels[::-1],loc='upper left')
plt.xlim(-5,165)
ybnd = [np.min(energy_mod_a[press_mod_a<165]), np.max(energy_mod_a[press_mod_a<165])]
plt.ylim(ybnd[0],ybnd[1])
# plt.ylim(-100.5,-92)
print 'Compare this plot with Spera2011 Fig 2b (Oganov potential):'
print 'Do the figures agree (y/n or k for keyboard)?'
s = raw_input('--> ')
if s=='k':
from IPython import embed; embed(); import ipdb; ipdb.set_trace()
assert s=='y', 'Figure must match published figure'
pass
#====================================================================
| 35.397233 | 93 | 0.567752 | 2,577 | 17,911 | 3.656189 | 0.118743 | 0.042454 | 0.016345 | 0.014859 | 0.836871 | 0.803545 | 0.782106 | 0.766822 | 0.750371 | 0.720229 | 0 | 0.041197 | 0.276311 | 17,911 | 505 | 94 | 35.467327 | 0.685697 | 0.147228 | 0 | 0.717791 | 0 | 0 | 0.102164 | 0.001513 | 0 | 0 | 0 | 0 | 0.046012 | 0 | null | null | 0.021472 | 0.03681 | null | null | 0.01227 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
eb99fc53c07b067bc8fc325d7c51f06960d94bb2 | 5,489 | py | Python | chemics/gas_thermal_conductivity.py | jAniceto/chemics | d46e55896e03b32c7096679a1ee177a6ae99dc82 | [
"MIT"
] | 93 | 2019-05-13T01:17:55.000Z | 2022-03-30T17:43:06.000Z | chemics/gas_thermal_conductivity.py | RickeyEstes/chemics | a24a2379412a39c6cfa5cb63b01fdcba9e31ebea | [
"MIT"
] | 18 | 2018-10-31T19:33:14.000Z | 2021-05-09T23:42:30.000Z | chemics/gas_thermal_conductivity.py | RickeyEstes/chemics | a24a2379412a39c6cfa5cb63b01fdcba9e31ebea | [
"MIT"
] | 14 | 2018-09-29T06:16:57.000Z | 2021-10-08T07:26:31.000Z | import pandas as pd
import os
def k_gas_inorganic(formula, temp, full=False):
"""
Thermal conductivity of gas as a function of temperature. Applicable to gas
comprised of inorganic compounds. Results based on coefficients from Yaws'
Critical Property Data for Chemical Engineers and Chemists [1]_.
Parameters
----------
formula : string
Molecular formula of the gas.
temp : float
Temperature of the gas [K]
full : bool, optional
When set to :code:`False` (default) just thermal conductivity is
returned. When set to :code:`True` then return thermal conductivity and
other information.
Returns
-------
k_gas : float
Thermal conductivity of gas [W/(m K)]
k_gas, cas, tmin, tmax, a, b, c, d : tuple
Additional values are only returned when keyword :code:`full=True`.
| k_gas - Thermal conductivity of gas [W/(m K)]
| cas - CAS number [-]
| tmin, tmax - Temperature range at which results are applicable [K]
| a, b, c, d - Values for regression coefficients [-]
Raises
------
ValueError
If gas formula is not found in csv data file.
ValueError
If gas temperature is not in range between tmin and tmax.
Examples
--------
>>> k_gas_inorganic('N2', 773)
0.0535
>>> k_gas_inorganic('N2', 773, full=True)
(0.0535, '7727-37-9', 63.15, 1500.0, -0.0002267, 0.0001027, -6.0151e-08,
2.2331e-11)
References
----------
.. [1] Carl L. Yaws. Table 84. Thermal Conductivity of Gas – Inorganic
Compounds in Yaws' Critical Property Data for Chemical Engineers and
Chemists. Published by Knovel, 2014.
"""
abs_path = os.path.dirname(os.path.abspath(__file__))
data_file = abs_path + '/data/k-gas-inorganic.csv'
df = pd.read_csv(data_file, index_col=0)
if formula not in df.index:
raise ValueError(f'Gas species {formula} is not available.')
cas = df.loc[formula]['CAS No.']
a = df.loc[formula]['A']
b = df.loc[formula]['B']
c = df.loc[formula]['C']
d = df.loc[formula]['D']
tmin = df.loc[formula]['temperature, Tmin (K)']
tmax = df.loc[formula]['temperature, Tmax (K)']
if temp < tmin or temp > tmax:
raise ValueError('Temperature out of range. Applicable values are '
+ f'{tmin} - {tmax} K for {formula} gas.')
k_gas = a + b * temp + c * (temp**2) + d * (temp**3)
if full:
return k_gas, cas, tmin, tmax, a, b, c, d
else:
return k_gas
def k_gas_organic(formula, temp, cas=None, full=False):
"""
Thermal conductivity of gas as a function of temperature. Applicable to gas
comprised of organic compounds. Results based on coefficients from Yaws'
Critical Property Data for Chemical Engineers and Chemists [2]_.
Parameters
----------
formula : string
Molecular formula of the gas.
temp : float
Temperature of the gas [K]
cas : string
CAS number of the gas, required for some species [-]
full : bool, optional
When set to :code:`False` (default) just thermal conductivity is
returned. When set to :code:`True` then return thermal conductivity and
other information.
Returns
-------
k_gas : float
Thermal conductivity of gas [W/(m K)]
k_gas, cas, tmin, tmax, a, b, c, d : tuple
Additional values are only returned when keyword :code:`full=True`.
| k_gas - Thermal conductivity of gas [W/(m K)]
| cas - CAS number [-]
| tmin, tmax - Temperature range at which results are applicable [K]
| a, b, c, d - Values for regression coefficients [-]
Raises
------
ValueError
If gas formula is not available in CSV data file.
ValueError
If multiple substances have same formula. Require CAS number.
ValueError
If gas temperature is not in range between Tmin and Tmax.
Examples
--------
>>> k_gas_organic('CO', 801)
0.05722
>>> k_gas_organic('C18H38O', 920, cas='593-32-8')
0.04174
References
----------
.. [2] Carl L. Yaws. Table 85. Thermal Conductivity of Gas – Organic
Compounds in Yaws' Critical Property Data for Chemical Engineers and
Chemists. Published by Knovel, 2014.
"""
abs_path = os.path.dirname(os.path.abspath(__file__))
data_file = abs_path + '/data/k-gas-organic.csv'
df = pd.read_csv(data_file, index_col=0)
if formula not in df.index:
raise ValueError(f'Gas species {formula} is not available.')
if isinstance(df.loc[formula], pd.DataFrame) and cas is None:
raise ValueError(f'Multiple substances available for {formula}.'
+ ' Include CAS number as a function parameter.')
if cas:
df = df[df['CAS No.'] == str(cas)]
cas = df.loc[formula]['CAS No.']
tmin = df.loc[formula]['temperature, Tmin (K)']
tmax = df.loc[formula]['temperature, Tmax (K)']
a = df.loc[formula]['A']
b = df.loc[formula]['B']
c = df.loc[formula]['C']
d = df.loc[formula]['D']
if temp < tmin or temp > tmax:
raise ValueError('Temperature out of range. Applicable values are '
+ f'{tmin} - {tmax} K for {formula} gas.')
k_gas = a + b * temp + c * (temp**2) + d * (temp**3)
if full:
return k_gas, cas, tmin, tmax, a, b, c, d
else:
return k_gas
| 31.545977 | 79 | 0.606668 | 765 | 5,489 | 4.295425 | 0.205229 | 0.024346 | 0.054778 | 0.05843 | 0.824711 | 0.799757 | 0.776019 | 0.776019 | 0.776019 | 0.776019 | 0 | 0.028571 | 0.273092 | 5,489 | 173 | 80 | 31.728324 | 0.794486 | 0.547276 | 0 | 0.77551 | 0 | 0 | 0.233271 | 0.02262 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040816 | false | 0 | 0.040816 | 0 | 0.163265 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
690e0d62df215637ce98de48fecfff7c606b0ef5 | 11,825 | py | Python | tests/test_crypto.py | tnoff/backup-tool | 114d066b0aeaa9dab9e2594f42a520839587df20 | [
"BSD-2-Clause"
] | null | null | null | tests/test_crypto.py | tnoff/backup-tool | 114d066b0aeaa9dab9e2594f42a520839587df20 | [
"BSD-2-Clause"
] | null | null | null | tests/test_crypto.py | tnoff/backup-tool | 114d066b0aeaa9dab9e2594f42a520839587df20 | [
"BSD-2-Clause"
] | null | null | null | import os
from tempfile import TemporaryDirectory
from backup_tool import crypto
from backup_tool import utils
def test_encyrpt_file_md5():
# Generate passhparse
passphrase = utils.random_string(length=16)
randomish_string = utils.random_string(length=20)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'w') as writer:
writer.write(randomish_string)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_binary():
# Generate passhparse
passphrase = utils.random_string(length=16)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'wb') as writer:
writer.write(os.urandom(16))
orig_md5_sum = utils.md5(input_temp)
print(f'Original md5 {orig_md5_sum}')
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
print(f'Encrypted md5 {encrypted_md5}')
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
print(f'Encrypted md5 should be {en_md5}')
print(f'Decrypted md5 should be {or_md5}')
print(f'Decrypted md5 is {decrypted_md5}')
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_small_file():
# File size less than 16
# Generate passhparse
passphrase = utils.random_string(length=16)
randomish_string = utils.random_string(length=5)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'w') as writer:
writer.write(randomish_string)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_spaces():
# File size less than 16
# Generate passhparse
passphrase = utils.random_string(length=16)
randomish_string = " " * 24
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'w') as writer:
writer.write(randomish_string)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_binary_larger_file():
# Generate passhparse
passphrase = utils.random_string(length=16)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'wb') as writer:
writer.write(os.urandom(100))
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_empty_file():
# Generate passhparse
passphrase = utils.random_string(length=16)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'a'):
os.utime(input_temp, None)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_large_text():
# Generate passhparse
passphrase = utils.random_string(length=16)
randomish_string = utils.random_string(length=102400)
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'w') as writer:
writer.write(randomish_string)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file'
def test_encyrpt_file_md5_trail_spaces():
# Generate passhparse
passphrase = utils.random_string(length=16)
randomish_string = utils.random_string(length=1024) + ' ' * 5
with TemporaryDirectory() as tmp_dir:
with utils.temp_file(tmp_dir) as input_temp:
# Write random data to file
with open(input_temp, 'w') as writer:
writer.write(randomish_string)
orig_md5_sum = utils.md5(input_temp)
# Ecnrypt and decrypt file, make sure md5 matches
with utils.temp_file(tmp_dir) as encrypted:
or_md5, en_md5 = crypto.encrypt_file(input_temp, encrypted, passphrase)
assert or_md5 == orig_md5_sum, 'Encryption returns wrong md5 value for original file'
encrypted_md5 = utils.md5(encrypted)
assert en_md5 == encrypted_md5, 'Encryption return wrong md5 value for encrypted file'
with utils.temp_file(tmp_dir) as decrypted:
en_md5, or_md5 = crypto.decrypt_file(encrypted, decrypted, passphrase)
decrypted_md5 = utils.md5(decrypted)
assert decrypted_md5 == orig_md5_sum, 'MD5 of decrypted file does not match original'
assert en_md5 == encrypted_md5, 'Decryption returns wrong md5 value for original file'
assert or_md5 == orig_md5_sum, 'Decryption returns wrong md5 value for decrypted file' | 53.506787 | 106 | 0.644989 | 1,490 | 11,825 | 4.891946 | 0.058389 | 0.040746 | 0.045274 | 0.070243 | 0.960214 | 0.951571 | 0.951571 | 0.947867 | 0.947867 | 0.947867 | 0 | 0.031704 | 0.290486 | 11,825 | 221 | 107 | 53.506787 | 0.837068 | 0.0674 | 0 | 0.835443 | 1 | 0 | 0.1996 | 0 | 0 | 0 | 0 | 0 | 0.253165 | 1 | 0.050633 | false | 0.151899 | 0.025316 | 0 | 0.075949 | 0.031646 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
6957ed28ac537a78acbd96a5d39ddefc8fd8bf9e | 640 | py | Python | djangocms_frontend/contrib/frontend_forms/cms_plugins/__init__.py | fsbraun/djangocms-bootstrap5 | 368f736fa1ce264086493153a256eb90dd8c4df0 | [
"BSD-3-Clause"
] | 7 | 2022-01-28T14:21:14.000Z | 2022-03-29T20:07:11.000Z | djangocms_frontend/contrib/frontend_forms/cms_plugins/__init__.py | marksweb/djangocms-frontend | c74130cbd15cfacb588933bb4adaf1a7d780daaf | [
"BSD-3-Clause"
] | 13 | 2022-02-23T21:10:07.000Z | 2022-03-30T08:33:20.000Z | djangocms_frontend/contrib/frontend_forms/cms_plugins/__init__.py | marksweb/djangocms-frontend | c74130cbd15cfacb588933bb4adaf1a7d780daaf | [
"BSD-3-Clause"
] | 2 | 2022-02-07T14:48:11.000Z | 2022-02-23T23:55:18.000Z | from .ajax_plugins import FormPlugin
from .form_plugins import (
BooleanFieldPlugin,
CharFieldPlugin,
ChoicePlugin,
DateFieldPlugin,
DateTimeFieldPlugin,
DecimalFieldPlugin,
EmailFieldPlugin,
IntegerFieldPlugin,
SelectPlugin,
TextareaPlugin,
TimeFieldPlugin,
URLFieldPlugin,
)
__all__ = [
"FormPlugin",
"BooleanFieldPlugin",
"CharFieldPlugin",
"ChoicePlugin",
"DateFieldPlugin",
"DateTimeFieldPlugin",
"DecimalFieldPlugin",
"EmailFieldPlugin",
"IntegerFieldPlugin",
"SelectPlugin",
"TextareaPlugin",
"TimeFieldPlugin",
"URLFieldPlugin",
]
| 20 | 36 | 0.692188 | 35 | 640 | 12.485714 | 0.542857 | 0.059497 | 0.20595 | 0.2746 | 0.851259 | 0.851259 | 0.851259 | 0.851259 | 0.851259 | 0.851259 | 0 | 0 | 0.217188 | 640 | 31 | 37 | 20.645161 | 0.872255 | 0 | 0 | 0 | 0 | 0 | 0.30625 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.066667 | 0 | 0.066667 | 0 | 0 | 0 | 1 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
6960f1b9c5ed063d6a2351c8e9e7fd0f50b8ede3 | 22,863 | py | Python | dataset.py | dungnb1333/global-wheat-dection-2020 | f4f1a9614d897d76cc2b28bf7a601004a74df15e | [
"MIT"
] | 37 | 2021-08-10T03:42:04.000Z | 2022-02-18T03:16:59.000Z | dataset.py | dungnb1333/global-wheat-dection-2020 | f4f1a9614d897d76cc2b28bf7a601004a74df15e | [
"MIT"
] | 2 | 2021-08-21T20:27:38.000Z | 2021-12-10T03:09:57.000Z | dataset.py | dungnb1333/global-wheat-dection-2020 | f4f1a9614d897d76cc2b28bf7a601004a74df15e | [
"MIT"
] | 19 | 2021-08-12T09:46:47.000Z | 2022-03-15T06:07:18.000Z | import os
import numpy as np
import cv2
import random
from PIL import Image
import pandas as pd
import torch
from torch.utils.data import Dataset
import torch.nn.functional as F
from albumentations import *
cv2.setNumThreads(0)
cv2.ocl.setUseOpenCL(False)
def get_aug(aug):
return Compose(aug, bbox_params=BboxParams(format='pascal_voc', min_area=0, min_visibility=0, label_fields=['category_id']))
def bb_overlap(boxA, boxB):
xA = max(boxA[0], boxB[0])
yA = max(boxA[1], boxB[1])
xB = min(boxA[2], boxB[2])
yB = min(boxA[3], boxB[3])
interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1)
boxAArea = (boxA[2] - boxA[0] + 1) * (boxA[3] - boxA[1] + 1)
iou = interArea / float(boxAArea)
return iou
class WheatDataset(Dataset):
def __init__(self, df, img_size, mode='train', network='FasterRCNN', bbox_removal_threshold=0.25):
super(WheatDataset,self).__init__()
self.df = df
self.image_ids = list(np.unique(self.df.image_id.values))
self.img_size = img_size
self.root_dir = 'dataset/train'
self.w2017_ext_dir = 'dataset/wheat2017'
self.spike_ext_dir = 'dataset/spike-wheat'
assert mode in ['train', 'valid']
self.mode = mode
assert network in ['FasterRCNN', 'EffDet']
self.network = network
self.bbox_removal_threshold = bbox_removal_threshold
if self.mode == 'train':
random.shuffle(self.image_ids)
self.train_transforms = get_aug([
HorizontalFlip(p=0.5),
VerticalFlip(p=0.5),
ToGray(p=0.01),
OneOf([
IAAAdditiveGaussianNoise(),
GaussNoise(),
], p=0.2),
OneOf([
MotionBlur(p=0.2),
MedianBlur(blur_limit=3, p=0.1),
Blur(blur_limit=3, p=0.1),
], p=0.2),
OneOf([
CLAHE(),
IAASharpen(),
IAAEmboss(),
RandomBrightnessContrast(),
], p=0.25),
HueSaturationValue(p=0.25)
])
self.resize_transforms = get_aug([
Resize(height=self.img_size, width=self.img_size, interpolation=1, p=1)
])
def __len__(self):
return len(self.image_ids)
def refine_boxes(self, boxes):
result_boxes = []
for box in boxes:
if box[2] - box[0] < 10 or box[3] - box[1] < 10:
continue
result_boxes.append(box)
result_boxes = np.array(result_boxes)
return result_boxes
def resize_image(self, image, boxes):
cats = np.ones(boxes.shape[0], dtype=int)
annotations = {'image': image, 'bboxes': boxes, 'category_id': cats}
augmented = self.resize_transforms(**annotations)
image = augmented['image']
boxes = np.array(augmented['bboxes'])
return image, boxes
def crop_image(self, image, boxes, xmin, ymin, xmax, ymax):
image = image[ymin:ymax,xmin:xmax,:]
cutout_box = [xmin, ymin, xmax, ymax]
result_boxes = []
for box in boxes:
iou = bb_overlap(box, cutout_box)
if iou > self.bbox_removal_threshold:
result_boxes.append(box)
if len(result_boxes) > 0:
result_boxes = np.array(result_boxes, dtype=float)
result_boxes[:,[0,2]] -= xmin
result_boxes[:,[1,3]] -= ymin
result_boxes[:,[0,2]] = result_boxes[:,[0,2]].clip(0, xmax-xmin)
result_boxes[:,[1,3]] = result_boxes[:,[1,3]].clip(0, ymax-ymin)
else:
result_boxes = np.array([], dtype=float).reshape(0,4)
return image, result_boxes
def random_crop_resize(self, image, boxes, img_size=1024, p=0.5):
if random.random() > p:
new_img_size = random.randint(int(0.75*img_size), img_size)
x = random.randint(0, img_size-new_img_size)
y = random.randint(0, img_size-new_img_size)
image, boxes = self.crop_image(image, boxes, x, y, x+new_img_size, y+new_img_size)
return self.resize_image(image, boxes)
else:
if self.img_size != 1024:
return self.resize_image(image, boxes)
else:
return image, boxes
def load_image_and_boxes(self, image_id):
tmp_df = self.df.loc[self.df['image_id']==image_id]
source = np.unique(tmp_df.source.values)[0]
if source == 'wheat2017':
img_path = '{}/{}.jpg'.format(self.w2017_ext_dir, image_id)
elif source == 'spike':
img_path = '{}/{}.jpg'.format(self.spike_ext_dir, image_id)
else:
img_path = '{}/{}.jpg'.format(self.root_dir, image_id)
img = Image.open(img_path)
img = img.convert('RGB')
img = np.array(img, dtype=np.uint8)
boxes = []
for _, row in tmp_df.iterrows():
if row['isbox'] == False:
continue
boxes.append([float(row['xmin']),float(row['ymin']),float(row['xmax']),float(row['ymax'])])
boxes = self.refine_boxes(boxes)
if len(boxes) > 0:
boxes = np.array(boxes, dtype=float)
else:
boxes = np.array([], dtype=float).reshape(0,4)
return img, boxes, source
def load_cutmix_image_and_boxes(self, image_id, imsize=1024): #custom mosaic data augmentation
image_ids = self.image_ids.copy()
image_ids.remove(image_id)
cutmix_image_ids = [image_id] + random.sample(image_ids, 3)
result_image = np.full((imsize, imsize, 3), 1, dtype=np.uint8)
result_boxes = []
xc, yc = [int(random.uniform(imsize * 0.25, imsize * 0.75)) for _ in range(2)]
for i, img_id in enumerate(cutmix_image_ids):
image, boxes, source = self.load_image_and_boxes(img_id)
if source == 'spike':
height, width = image.shape[0:2]
if i == 0 or i == 3:
image, boxes = self.crop_image(image, boxes, xmin=width-1024, ymin=0, xmax=width, ymax=1024)
else:
image, boxes = self.crop_image(image, boxes, xmin=0, ymin=0, xmax=1024, ymax=1024)
if i == 0:
image, boxes = self.crop_image(image, boxes, imsize-xc, imsize-yc, imsize, imsize)
result_image[0:yc, 0:xc,:] = image
result_boxes.extend(boxes)
elif i == 1:
image, boxes = self.crop_image(image, boxes, 0, imsize-yc, imsize-xc, imsize)
result_image[0:yc, xc:imsize, :] = image
if boxes.shape[0] > 0:
boxes[:,[0,2]] += xc
result_boxes.extend(boxes)
elif i == 2:
image, boxes = self.crop_image(image, boxes, 0, 0, imsize-xc, imsize-yc)
result_image[yc:imsize, xc:imsize, :] = image
if boxes.shape[0] > 0:
boxes[:,[0,2]] += xc
boxes[:,[1,3]] += yc
result_boxes.extend(boxes)
else:
image, boxes = self.crop_image(image, boxes, imsize-xc, 0, imsize, imsize-yc)
result_image[yc:imsize, 0:xc, :] = image
if boxes.shape[0] > 0:
boxes[:,[1,3]] += yc
result_boxes.extend(boxes)
del image
del boxes
del cutmix_image_ids
del image_ids
if len(result_boxes) == 0:
result_boxes = np.array([], dtype=float).reshape(0,4)
else:
result_boxes = np.vstack(result_boxes)
result_boxes[:,[0,2]] = result_boxes[:,[0,2]].clip(0, imsize)
result_boxes[:,[1,3]] = result_boxes[:,[1,3]].clip(0, imsize)
return result_image, result_boxes
def __getitem__(self, index):
image_id = self.image_ids[index]
if self.mode == 'train':
while(True):
if random.random() > 0.5:
image, boxes, source = self.load_image_and_boxes(image_id)
if source == 'spike':
height, width = image.shape[0:2]
if random.random() > 0.5:
image, boxes = self.crop_image(image, boxes, xmin=0, ymin=0, xmax=1024, ymax=1024)
else:
image, boxes = self.crop_image(image, boxes, xmin=width-1024, ymin=0, xmax=width, ymax=1024)
else:
image, boxes = self.load_cutmix_image_and_boxes(image_id)
image, boxes = self.random_crop_resize(image, boxes, p=0.5)
if len(boxes) > 0:
cats = np.ones(boxes.shape[0], dtype=int)
annotations = {'image': image, 'bboxes': boxes, 'category_id': cats}
augmented = self.train_transforms(**annotations)
image = augmented['image']
boxes = np.array(augmented['bboxes'])
break
else:
image, boxes, _ = self.load_image_and_boxes(image_id)
if self.img_size != 1024:
image, boxes = self.resize_image(image, boxes)
if self.network == 'EffDet':
if boxes.shape[0] == 0:
target = {
"boxes": torch.zeros((0, 4), dtype=torch.float32),
"labels": torch.zeros(0, dtype=torch.int64)
}
else:
boxes[:,[0,1,2,3]] = boxes[:,[1,0,3,2]]
target = {
'boxes': torch.as_tensor(boxes, dtype=torch.float32),
'labels': torch.ones((boxes.shape[0],), dtype=torch.int64)
}
else:
if boxes.shape[0] == 0:
target = {
"boxes": torch.zeros((0, 4), dtype=torch.float32),
"labels": torch.zeros(0, dtype=torch.int64),
"area": torch.zeros(0, dtype=torch.float32),
"iscrowd": torch.zeros((0,), dtype=torch.int64)
}
else:
target = {}
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
target['boxes'] = torch.as_tensor(boxes, dtype=torch.float32)
target['labels'] = torch.ones((boxes.shape[0],), dtype=torch.int64)
target['area'] = torch.as_tensor(area, dtype=torch.float32)
target['iscrowd'] = torch.zeros((boxes.shape[0],), dtype=torch.int64)
image = image.astype(np.float32)
image /= 255.0
image = torch.from_numpy(image).permute(2,0,1)
return image, target
class WheatTestset(Dataset):
def __init__(self, df, img_size, root_dir='dataset/train', shuffle=True):
super(WheatTestset,self).__init__()
self.df = df
self.image_ids = list(np.unique(self.df.image_id.values))
if shuffle:
random.shuffle(self.image_ids)
self.img_size = img_size
self.root_dir = root_dir
self.transforms = Resize(height=self.img_size, width=self.img_size, interpolation=1, p=1)
def __len__(self):
return len(self.image_ids)
def __getitem__(self, index):
image_id = self.image_ids[index]
img_path = '{}/{}.jpg'.format(self.root_dir, image_id)
img = Image.open(img_path)
img = img.convert('RGB')
img = np.array(img)
if img.shape[0] != self.img_size or img.shape[1] != self.img_size:
img = self.transforms(image=img)['image']
img = img.astype(np.float32)
img /= 255.0
img = torch.from_numpy(img).permute(2,0,1)
return img, image_id
class WheatPseudoTestset(Dataset):
def __init__(self, df, img_size, mode='train', bbox_removal_threshold=0.25):
super(WheatPseudoTestset,self).__init__()
self.df = df
self.image_paths = list(np.unique(self.df.image_path.values))
self.img_size = img_size
assert mode in ['train', 'valid']
self.mode = mode
self.bbox_removal_threshold = bbox_removal_threshold
if self.mode == 'train':
random.shuffle(self.image_paths)
self.train_transforms = get_aug([
HorizontalFlip(p=0.5),
VerticalFlip(p=0.5),
ToGray(p=0.01),
OneOf([
IAAAdditiveGaussianNoise(),
GaussNoise(),
], p=0.2),
OneOf([
MotionBlur(p=0.2),
MedianBlur(blur_limit=3, p=0.1),
Blur(blur_limit=3, p=0.1),
], p=0.2),
OneOf([
CLAHE(),
IAASharpen(),
IAAEmboss(),
RandomBrightnessContrast(),
], p=0.25),
HueSaturationValue(p=0.25)
])
self.resize_transforms = get_aug([
Resize(height=self.img_size, width=self.img_size, interpolation=1, p=1)
])
def __len__(self):
return len(self.image_paths)
def refine_boxes(self, boxes):
result_boxes = []
for box in boxes:
if box[2] - box[0] < 10 or box[3] - box[1] < 10:
continue
result_boxes.append(box)
result_boxes = np.array(result_boxes)
return result_boxes
def resize_image(self, image, boxes):
cats = np.ones(boxes.shape[0], dtype=int)
annotations = {'image': image, 'bboxes': boxes, 'category_id': cats}
augmented = self.resize_transforms(**annotations)
image = augmented['image']
boxes = np.array(augmented['bboxes'])
return image, boxes
def crop_image(self, image, boxes, xmin, ymin, xmax, ymax):
image = image[ymin:ymax,xmin:xmax,:]
cutout_box = [xmin, ymin, xmax, ymax]
result_boxes = []
for box in boxes:
iou = bb_overlap(box, cutout_box)
if iou > self.bbox_removal_threshold:
result_boxes.append(box)
if len(result_boxes) > 0:
result_boxes = np.array(result_boxes, dtype=float)
result_boxes[:,[0,2]] -= xmin
result_boxes[:,[1,3]] -= ymin
result_boxes[:,[0,2]] = result_boxes[:,[0,2]].clip(0, xmax-xmin)
result_boxes[:,[1,3]] = result_boxes[:,[1,3]].clip(0, ymax-ymin)
else:
result_boxes = np.array([], dtype=float).reshape(0,4)
return image, result_boxes
def random_crop_resize(self, image, boxes, img_size=1024, p=0.5):
if random.random() > p:
new_img_size = random.randint(int(0.75*img_size), img_size)
x = random.randint(0, img_size-new_img_size)
y = random.randint(0, img_size-new_img_size)
image, boxes = self.crop_image(image, boxes, x, y, x+new_img_size, y+new_img_size)
return self.resize_image(image, boxes)
else:
if self.img_size != 1024:
return self.resize_image(image, boxes)
else:
return image, boxes
def load_image_and_boxes(self, image_path):
tmp_df = self.df.loc[self.df['image_path']==image_path]
img = Image.open(image_path)
img = img.convert('RGB')
img = np.array(img, dtype=np.uint8)
boxes = []
for _, row in tmp_df.iterrows():
if row['isbox'] == False:
continue
boxes.append([float(row['xmin']),float(row['ymin']),float(row['xmax']),float(row['ymax'])])
boxes = self.refine_boxes(boxes)
if img.shape[0] != 1024 or img.shape[1] != 1024:
augs = get_aug([
Resize(height=1024, width=1024, interpolation=1, p=1)
])
cats = np.ones(boxes.shape[0], dtype=int)
annotations = {'image': img, 'bboxes': boxes, 'category_id': cats}
augmented = augs(**annotations)
img = augmented['image']
boxes = np.array(augmented['bboxes'])
if len(boxes) > 0:
boxes = np.array(boxes, dtype=float)
else:
boxes = np.array([], dtype=float).reshape(0,4)
return img, boxes
def load_cutmix_image_and_boxes(self, image_path, imsize=1024): #custom mosaic data augmentation
image_paths = self.image_paths.copy()
image_paths.remove(image_path)
cutmix_image_paths = [image_path] + random.sample(image_paths, 3)
xc, yc = [int(random.uniform(imsize * 0.25, imsize * 0.75)) for _ in range(2)]
result_image = np.full((imsize, imsize, 3), 1, dtype=np.uint8)
result_boxes = []
for i, img_path in enumerate(cutmix_image_paths):
image, boxes = self.load_image_and_boxes(img_path)
if i == 0:
image, boxes = self.crop_image(image, boxes, imsize-xc, imsize-yc, imsize, imsize)
result_image[0:yc, 0:xc,:] = image
result_boxes.extend(boxes)
elif i == 1:
image, boxes = self.crop_image(image, boxes, 0, imsize-yc, imsize-xc, imsize)
result_image[0:yc, xc:imsize, :] = image
if boxes.shape[0] > 0:
boxes[:,[0,2]] += xc
result_boxes.extend(boxes)
elif i == 2:
image, boxes = self.crop_image(image, boxes, 0, 0, imsize-xc, imsize-yc)
result_image[yc:imsize, xc:imsize, :] = image
if boxes.shape[0] > 0:
boxes[:,[0,2]] += xc
boxes[:,[1,3]] += yc
result_boxes.extend(boxes)
else:
image, boxes = self.crop_image(image, boxes, imsize-xc, 0, imsize, imsize-yc)
result_image[yc:imsize, 0:xc, :] = image
if boxes.shape[0] > 0:
boxes[:,[1,3]] += yc
result_boxes.extend(boxes)
del image
del boxes
del cutmix_image_paths
del image_paths
if len(result_boxes) == 0:
result_boxes = np.array([], dtype=float).reshape(0,4)
else:
result_boxes = np.vstack(result_boxes)
result_boxes[:,[0,2]] = result_boxes[:,[0,2]].clip(0, imsize)
result_boxes[:,[1,3]] = result_boxes[:,[1,3]].clip(0, imsize)
return result_image, result_boxes
def __getitem__(self, index):
image_path = self.image_paths[index]
if self.mode == 'train':
while(True):
if random.random() > 0.5:
image, boxes = self.load_image_and_boxes(image_path)
else:
image, boxes = self.load_cutmix_image_and_boxes(image_path)
image, boxes = self.random_crop_resize(image, boxes, p=0.5)
if len(boxes) > 0:
cats = np.ones(boxes.shape[0], dtype=int)
annotations = {'image': image, 'bboxes': boxes, 'category_id': cats}
augmented = self.train_transforms(**annotations)
image = augmented['image']
boxes = np.array(augmented['bboxes'])
break
else:
image, boxes = self.load_image_and_boxes(image_path)
if self.img_size != 1024:
image, boxes = self.resize_image(image, boxes)
if boxes.shape[0] == 0:
target = {
"boxes": torch.zeros((0, 4), dtype=torch.float32),
"labels": torch.zeros(0, dtype=torch.int64)
}
else:
boxes[:,[0,1,2,3]] = boxes[:,[1,0,3,2]]
target = {
'boxes': torch.as_tensor(boxes, dtype=torch.float32),
'labels': torch.ones((boxes.shape[0],), dtype=torch.int64)
}
image = image.astype(np.float32)
image /= 255.0
image = torch.from_numpy(image).permute(2,0,1)
return image, target
class BaseWheatTTA:
def augment(self, images):
raise NotImplementedError
def prepare_boxes(self, boxes):
result_boxes = boxes.copy()
result_boxes[:,0] = np.min(boxes[:, [0,2]], axis=1)
result_boxes[:,2] = np.max(boxes[:, [0,2]], axis=1)
result_boxes[:,1] = np.min(boxes[:, [1,3]], axis=1)
result_boxes[:,3] = np.max(boxes[:, [1,3]], axis=1)
return result_boxes
def deaugment_boxes(self, boxes):
raise NotImplementedError
class TTAHorizontalFlip(BaseWheatTTA):
def __init__(self, image_size):
self.image_size = image_size
def fasterrcnn_augment(self, images):
return list(image.flip(1) for image in images)
def effdet_augment(self, images):
return images.flip(2)
def deaugment_boxes(self, boxes):
boxes[:, [1,3]] = self.image_size - boxes[:, [3,1]]
return self.prepare_boxes(boxes)
class TTAVerticalFlip(BaseWheatTTA):
def __init__(self, image_size):
self.image_size = image_size
def fasterrcnn_augment(self, images):
return list(image.flip(2) for image in images)
def effdet_augment(self, images):
return images.flip(3)
def deaugment_boxes(self, boxes):
boxes[:, [0,2]] = self.image_size - boxes[:, [2,0]]
return boxes
class TTARotate90(BaseWheatTTA):
def __init__(self, image_size):
self.image_size = image_size
def fasterrcnn_augment(self, images):
return list(torch.rot90(image, 1, (1, 2)) for image in images)
def effdet_augment(self, images):
return torch.rot90(images, 1, (2, 3))
def deaugment_boxes(self, boxes):
res_boxes = boxes.copy()
res_boxes[:, [0,2]] = self.image_size - boxes[:, [1,3]]
res_boxes[:, [1,3]] = boxes[:, [2,0]]
return self.prepare_boxes(res_boxes)
class TTACompose:
def __init__(self, transforms):
self.transforms = transforms
def fasterrcnn_augment(self, images):
for transform in self.transforms:
images = transform.fasterrcnn_augment(images)
return images
def effdet_augment(self, images):
for transform in self.transforms:
images = transform.effdet_augment(images)
return images
def prepare_boxes(self, boxes):
result_boxes = boxes.copy()
result_boxes[:,0] = np.min(boxes[:, [0,2]], axis=1)
result_boxes[:,2] = np.max(boxes[:, [0,2]], axis=1)
result_boxes[:,1] = np.min(boxes[:, [1,3]], axis=1)
result_boxes[:,3] = np.max(boxes[:, [1,3]], axis=1)
return result_boxes
def deaugment_boxes(self, boxes):
for transform in self.transforms[::-1]:
boxes = transform.deaugment_boxes(boxes)
return self.prepare_boxes(boxes) | 40.04028 | 128 | 0.54691 | 2,850 | 22,863 | 4.221404 | 0.07614 | 0.069487 | 0.027928 | 0.020946 | 0.831435 | 0.812651 | 0.789128 | 0.765024 | 0.737511 | 0.72147 | 0 | 0.035581 | 0.320212 | 22,863 | 571 | 129 | 40.04028 | 0.738515 | 0.002712 | 0 | 0.746094 | 0 | 0 | 0.024211 | 0 | 0 | 0 | 0 | 0 | 0.005859 | 1 | 0.083984 | false | 0 | 0.019531 | 0.019531 | 0.193359 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
69680864c61fecd30d639c7dda81015a8c700bfb | 26,686 | py | Python | src/python/zquantum/optimizers/daemon_optimizer/proxy/rest_test.py | wugaxp/z-quantum-optimizers | 6af467a724afa20b41a24a12c4ed4688f7211c5a | [
"Apache-2.0"
] | null | null | null | src/python/zquantum/optimizers/daemon_optimizer/proxy/rest_test.py | wugaxp/z-quantum-optimizers | 6af467a724afa20b41a24a12c4ed4688f7211c5a | [
"Apache-2.0"
] | null | null | null | src/python/zquantum/optimizers/daemon_optimizer/proxy/rest_test.py | wugaxp/z-quantum-optimizers | 6af467a724afa20b41a24a12c4ed4688f7211c5a | [
"Apache-2.0"
] | null | null | null | from .rest import start_proxy
from zquantum.core.circuit import save_circuit_template_params, load_circuit_template_params, generate_random_ansatz_params
from zquantum.core.utils import load_value_estimate, save_value_estimate, ValueEstimate
from multiprocessing import Process
import socket
import json
import http.client
import time
import numpy as np
import subprocess
import unittest
class TestOptimizationServer(unittest.TestCase):
def setUp(self):
self.listening_port = 8080
self.proxy_process = Process(target=start_proxy, args=[self.listening_port])
self.proxy_process.start()
# Get the proxy IP address
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
self.ipaddress = str(s.getsockname()[0])
s.close()
self.max_tries = 60
counter = 0
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
while(s.connect_ex((self.ipaddress, self.listening_port)) != 0):
time.sleep(1)
counter += 1
if counter > self.max_tries:
raise SystemExit("Testing server took too long to start.")
def test_ping_204(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
connection.request('GET', '/')
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
def test_get_starting_status(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
connection.request('GET', '/status')
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# Assert that response body is STARTING
self.assertEqual(response.read().decode("utf-8"), "STARTING")
def test_post_status(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# Check that status is STARTING
connection.request('GET', '/status')
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# Assert that response body is STARTING
self.assertEqual(response.read().decode("utf-8"), "STARTING")
# set status to be OPTIMIZING
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# Check that status is OPTIMIZING
connection.request('GET', '/status')
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# Assert that response body is OPTIMIZING
self.assertEqual(response.read().decode("utf-8"), "OPTIMIZING")
def test_unsuccessful_post_invalid_status(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# Check that status is STARTING
connection.request('GET', '/status')
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# Assert that response body is STARTING
self.assertEqual(response.read().decode("utf-8"), "STARTING")
# attempt to set status to be INVALID STATUS
connection.request('POST', '/status', body="INVALID STATUS")
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('status') != -1)
def test_post_current_argument_values(self):
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
data = json.load(f)
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be OPTIMIZING
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# decode id from response
id_from_argument_value_post = response.read().decode("utf-8")
connection.request('GET', '/cost-function-argument-values')
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# remove id from response and verify it is correct
response_json = json.loads(response.read().decode("utf-8"))
response_id = response_json.pop("optimization-evaluation-id")
self.assertEqual(id_from_argument_value_post, response_id)
# assert argument values are same as above
with open('proxy_test_current_argument_values_artifact_from_proxy.json', 'w') as f:
f.write(json.dumps(response_json))
new_data_loaded_from_file = load_circuit_template_params('proxy_test_current_argument_values_artifact_from_proxy.json')
np.testing.assert_array_equal(params, new_data_loaded_from_file)
def test_unsuccessful_post_current_argument_values_wrong_status(self):
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
data = json.load(f)
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be EVALUATING - new argument values should not be able to
# be posted while that is the status
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 409)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('status') != -1)
def test_unsuccessful_post_argument_values_invalid_JSON(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be OPTIMIZING
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
connection.request('POST', '/cost-function-argument-values', body="invalid JSON")
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('format') != -1)
def test_unsuccessful_post_argument_values_no_keys_JSON(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be OPTIMIZING
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode("invalid JSON"))
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('dict') != -1)
def test_unsuccessful_post_argument_values_None(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be OPTIMIZING
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
connection.request('POST', '/cost-function-argument-values', body=None)
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('format') != -1)
def test_post_result(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that id that comes in with
# result POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# decode id from response
id_from_argument_value_post = response.read().decode("utf-8")
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
result_data["optimization-evaluation-id"] = id_from_argument_value_post
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# GET cost function result
connection.request('GET', '/cost-function-results', body=id_from_argument_value_post)
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# remove id from response and verify it is correct
response_string = response.read().decode("utf-8")
response_json = json.loads(response_string)
response_id = response_json.pop("optimization-evaluation-id")
self.assertEqual(id_from_argument_value_post, response_id)
# assert result is same as above
with open('proxy_test_results_artifact_from_proxy.json', 'w') as f:
f.write(json.dumps(response_json))
new_data_loaded_from_file = load_value_estimate('proxy_test_results_artifact_from_proxy.json')
self.assertEqual(result.value, new_data_loaded_from_file.value)
self.assertEqual(result.precision, new_data_loaded_from_file.precision)
def test_unsuccessful_get_result_no_id(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# decode id from response
id_from_argument_value_post = response.read().decode("utf-8")
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
result_data["optimization-evaluation-id"] = id_from_argument_value_post
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# GET cost function result
connection.request('GET', '/cost-function-results') # Will fail bc there's no ID in the body
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('id') != -1)
def test_unsuccessful_get_result_wrong_id(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# decode id from response
id_from_argument_value_post = response.read().decode("utf-8")
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
result_data["optimization-evaluation-id"] = id_from_argument_value_post
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# GET cost function result
connection.request('GET', '/cost-function-results', body="wrongid")
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('id') != -1)
def test_unsuccessful_post_result_wrong_id(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
result_data["optimization-evaluation-id"] = "wrongID"
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 409)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('id') != -1)
def test_unsuccessful_post_result_no_id(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('id') != -1)
def test_unsuccessful_post_result_no_keys_JSON(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode("invalidJSON"))
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('dict') != -1)
def test_unsuccessful_post_result_body_None(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST cost function result
connection.request('POST', '/cost-function-results', body=None)
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('format') != -1)
def test_unsuccessful_post_result_wrong_status(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# POST argument values to allow proxy to verify that argument values that come in with
# Value POST are correct
params = np.random.random((2,2))
save_circuit_template_params(params, 'proxy_test_current_argument_values_artifact.json')
with open('proxy_test_current_argument_values_artifact.json', 'r') as f:
arg_val_data = json.load(f)
# set status to be OPTIMIZING in order to POST argument values
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST argument values
connection.request('POST', '/cost-function-argument-values', body=json.JSONEncoder().encode(arg_val_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 200)
# decode id from response
id_from_argument_value_post = response.read().decode("utf-8")
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# make cost function result
result = ValueEstimate(1.5,10.0)
save_value_estimate(result, 'proxy_test_results_artifact.json')
with open('proxy_test_results_artifact.json', 'r') as f:
result_data = json.load(f)
result_data["optimization-evaluation-id"] = id_from_argument_value_post
# set status to be OPTIMIZING - new results should not be able to
# be posted while that is the status
connection.request('POST', '/status', body="OPTIMIZING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST cost function result
connection.request('POST', '/cost-function-results', body=json.JSONEncoder().encode(result_data))
response = connection.getresponse()
self.assertEqual(response.getcode(), 409)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('status') != -1)
self.assertTrue(response_lower.find('evaluating') != -1)
def test_unsuccessful_post_result_invalid_JSON(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
# set status to be EVALUATING
connection.request('POST', '/status', body="EVALUATING")
response = connection.getresponse()
self.assertEqual(response.getcode(), 204)
# POST cost function result
connection.request('POST', '/cost-function-results', body="invalid JSON")
response = connection.getresponse()
self.assertEqual(response.getcode(), 400)
response_lower = response.read().decode("utf-8").lower()
self.assertTrue(response_lower.find('error') != -1)
self.assertTrue(response_lower.find('format') != -1)
def tearDown(self):
connection = http.client.HTTPConnection(self.ipaddress+":"+str(self.listening_port), timeout=2)
connection.request('POST', '/shutdown')
response = connection.getresponse()
print(response.read().decode("utf-8"))
self.proxy_process.terminate()
counter = 0
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
while(s.connect_ex((self.ipaddress, self.listening_port)) == 0):
time.sleep(1)
counter += 1
if counter > self.max_tries:
raise SystemExit("Testing server took too long to start.")
@classmethod
def tearDownClass(cls):
subprocess.call(["rm", 'proxy_test_current_argument_values_artifact.json',
'proxy_test_current_argument_values_artifact_from_proxy.json',
'proxy_test_results_artifact.json',
'proxy_test_results_artifact_from_proxy.json',
'client_mock_evaluation_result.json']) | 47.910233 | 127 | 0.673649 | 3,104 | 26,686 | 5.624356 | 0.060567 | 0.055333 | 0.076412 | 0.102074 | 0.927082 | 0.91408 | 0.903826 | 0.886184 | 0.881831 | 0.87427 | 0 | 0.014435 | 0.208237 | 26,686 | 557 | 128 | 47.910233 | 0.811823 | 0.113318 | 0 | 0.795213 | 1 | 0 | 0.154332 | 0.106196 | 0 | 0 | 0 | 0 | 0.239362 | 1 | 0.055851 | false | 0 | 0.029255 | 0 | 0.087766 | 0.00266 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
15cb3f9f14c2b0cadb153658d5d95a5c2ae6bacf | 123 | py | Python | tests/transform/test_plain_token_to_conll.py | lanSeFangZhou/tokenizer_tools | edd931ae86a6e381b57e50f8b59ae19d3151d26b | [
"MIT"
] | null | null | null | tests/transform/test_plain_token_to_conll.py | lanSeFangZhou/tokenizer_tools | edd931ae86a6e381b57e50f8b59ae19d3151d26b | [
"MIT"
] | null | null | null | tests/transform/test_plain_token_to_conll.py | lanSeFangZhou/tokenizer_tools | edd931ae86a6e381b57e50f8b59ae19d3151d26b | [
"MIT"
] | null | null | null | from tokenizer_tools.transform.plain_token_to_conll import plain_token_to_conll
def test_plain_token_to_conll():
pass
| 24.6 | 79 | 0.861789 | 20 | 123 | 4.75 | 0.6 | 0.315789 | 0.378947 | 0.536842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 123 | 4 | 80 | 30.75 | 0.855856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 10 |
c603be3224278b2840458a3e9443c54c3d40197a | 92 | py | Python | nit/__init__.py | udasitharani/name-initials-tile-generator | 31fb8722cf1084e0827061f47baadf3ec034184d | [
"MIT"
] | null | null | null | nit/__init__.py | udasitharani/name-initials-tile-generator | 31fb8722cf1084e0827061f47baadf3ec034184d | [
"MIT"
] | null | null | null | nit/__init__.py | udasitharani/name-initials-tile-generator | 31fb8722cf1084e0827061f47baadf3ec034184d | [
"MIT"
] | null | null | null | from .code import generate_tile, generate_tile_from_initials, generate_initials_from_string
| 46 | 91 | 0.902174 | 13 | 92 | 5.846154 | 0.538462 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.065217 | 92 | 1 | 92 | 92 | 0.883721 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d698c22a8e67722ccb6b40e7d1e639df7d207557 | 4,590 | py | Python | test/test_configs.py | sn0wfree/ClickSQL | 392a6fa904c3ad6a2c6ebc50cfcfd794b73051eb | [
"MIT"
] | 6 | 2020-07-30T06:51:51.000Z | 2022-02-25T20:02:53.000Z | test/test_configs.py | sn0wfree/ClickSQL | 392a6fa904c3ad6a2c6ebc50cfcfd794b73051eb | [
"MIT"
] | 14 | 2021-02-10T03:22:55.000Z | 2021-06-08T09:33:03.000Z | test/test_configs.py | sn0wfree/ClickSQL | 392a6fa904c3ad6a2c6ebc50cfcfd794b73051eb | [
"MIT"
] | null | null | null | import unittest
from ClickSQL.conf import Config
class MyTestCaseConfigs_get_set(unittest.TestCase):
def test_Configs_int(self):
Config.set('test', 1)
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), int)
def test_Configs_str(self):
Config.set('test', '1')
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), str)
# def test_Configs_byte(self):
# Config.set('test', b'1')
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), str)
def test_Configs_tuple(self):
Config.set('test', ('1', '2'))
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), (list,))
def test_Configs_list(self):
Config.set('test', ['1', '2'])
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), (list,))
def test_Configs_dict(self):
Config.set('test', {'1': 1, '2': 2})
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), (dict,))
# def test_Configs_set(self):
# Config.set('test', {'1', '2'})
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), (set,))
def test_Configs_bool(self):
Config.set('test', True)
print(Config.get('test'))
self.assertIsInstance(Config.get('test'), bool)
Config.set('test2', False)
print(Config.get('test2'))
self.assertIsInstance(Config.get('test2'), bool)
class MyTestCaseConfigs_getitem_setitem(unittest.TestCase):
def test_Configs_int(self):
Config['test'] = 1
print(Config['test'])
self.assertIsInstance(Config['test'], int)
def test_Configs_str(self):
Config['test'] = '1'
print(Config['test'])
self.assertIsInstance(Config['test'], str)
# def test_Configs_byte(self):
# Config.set('test', b'1')
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), str)
def test_Configs_tuple(self):
Config['test'] = ('1', '2')
print(Config['test'])
self.assertIsInstance(Config['test'], (list,))
def test_Configs_list(self):
Config['test'] = ['1', '2']
print(Config['test'])
self.assertIsInstance(Config['test'], (list,))
def test_Configs_dict(self):
Config['test'] = {'1': 1, '2': 2}
# Config.set('test', {'1': 1, '2': 2})
print(Config['test'])
self.assertIsInstance(Config['test'], (dict,))
# def test_Configs_set(self):
# Config.set('test', {'1', '2'})
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), (set,))
def test_Configs_bool(self):
Config['test'] = True
# Config.set('test', True)
print(Config['test'])
self.assertIsInstance(Config['test'], bool)
Config['test2'] = False
# Config.set('test2', False)
print(Config['test2'])
self.assertIsInstance(Config['test2'], bool)
class MyTestCaseConfigs_getattr_setattr(unittest.TestCase):
def test_Configs_int(self):
Config.test = 1
print(Config.test)
self.assertIsInstance(Config.test, int)
def test_Configs_str(self):
Config.test = '1'
print(Config.test)
self.assertIsInstance(Config.test, str)
# def test_Configs_byte(self):
# Config.set('test', b'1')
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), str)
def test_Configs_tuple(self):
Config.test = ('1', '2')
print(Config.test)
self.assertIsInstance(Config.test, (list,))
def test_Configs_list(self):
Config.test = ['1', '2']
print(Config.test)
self.assertIsInstance(Config.test, (list,))
def test_Configs_dict(self):
Config.test = {'1': 1, '2': 2}
# Config.set('test', {'1': 1, '2': 2})
print(Config.test)
self.assertIsInstance(Config.test, (dict,))
# def test_Configs_set(self):
# Config.set('test', {'1', '2'})
# print(Config.get('test'))
# self.assertIsInstance(Config.get('test'), (set,))
def test_Configs_bool(self):
Config.test = True
# Config.set('test', True)
print(Config.test)
self.assertIsInstance(Config.test, bool)
Config.test2 = False
# Config.set('test2', False)
print(Config.test2)
self.assertIsInstance(Config.test2, bool)
if __name__ == '__main__':
unittest.main()
| 30.397351 | 59 | 0.583442 | 537 | 4,590 | 4.871508 | 0.068901 | 0.137615 | 0.268349 | 0.275229 | 0.918196 | 0.918196 | 0.902523 | 0.902523 | 0.877676 | 0.852064 | 0 | 0.016657 | 0.241394 | 4,590 | 150 | 60 | 30.6 | 0.734635 | 0.22549 | 0 | 0.477273 | 0 | 0 | 0.057605 | 0 | 0 | 0 | 0 | 0 | 0.238636 | 1 | 0.204545 | false | 0 | 0.022727 | 0 | 0.261364 | 0.238636 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ba4744632800bdd73145774b0ea03b8ac95a3dba | 2,695 | py | Python | tests/test_prop.py | kennknowles/python-doublecheck | 4e0b3ddc641bc7fc3ffdbb5475836e9d25b584f7 | [
"Apache-2.0"
] | 1 | 2017-06-07T08:27:07.000Z | 2017-06-07T08:27:07.000Z | tests/test_prop.py | kennknowles/python-doublecheck | 4e0b3ddc641bc7fc3ffdbb5475836e9d25b584f7 | [
"Apache-2.0"
] | null | null | null | tests/test_prop.py | kennknowles/python-doublecheck | 4e0b3ddc641bc7fc3ffdbb5475836e9d25b584f7 | [
"Apache-2.0"
] | null | null | null |
from doublecheck import gen, prop
class TestProp(object):
def test_Falsified(self):
assert prop.Falsified().quickcheck(100).status == prop.TestCase.Falsified
assert prop.Falsified().smallcheck(100).status == prop.TestCase.Falsified
def test_Certain(self):
assert prop.Certain().quickcheck(100).status == prop.TestCase.Certain
assert prop.Certain().smallcheck(100).status == prop.TestCase.Certain
def test_Undecided(self):
assert prop.Undecided().quickcheck(100).status == prop.TestCase.Undecided
assert prop.Undecided().smallcheck(100).status == prop.TestCase.Undecided
def test_Implies(self):
assert prop.Implies(True, True).quickcheck(100).status == prop.TestCase.Certain
assert prop.Implies(True, False).quickcheck(100).status == prop.TestCase.Falsified
assert prop.Implies(False, True).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(False, False).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Certain(), prop.Certain()).quickcheck(100).status == prop.TestCase.Certain
assert prop.Implies(prop.Certain(), prop.Falsified()).quickcheck(100).status == prop.TestCase.Falsified
assert prop.Implies(prop.Certain(), prop.Undecided()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Falsified(), prop.Certain()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Falsified(), prop.Falsified()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Falsified(), prop.Undecided()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Undecided(), prop.Certain()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Undecided(), prop.Falsified()).quickcheck(100).status == prop.TestCase.Undecided
assert prop.Implies(prop.Undecided(), prop.Undecided()).quickcheck(100).status == prop.TestCase.Undecided
def test_ForAll(self):
assert prop.ForAll(lambda i: prop.Certain(), gen.Ints()).quickcheck(100).status == prop.TestCase.Certain
assert prop.ForAll(lambda i: i > 0, gen.Ints()).quickcheck(100).status == prop.TestCase.Falsified
# Probably going to miss this random int
assert prop.ForAll(lambda i: i != 42, gen.Ints()).quickcheck(100000).status == prop.TestCase.Certain
assert prop.ForAll(lambda i: i != 42, gen.Ints()).smallcheck(100000).status == prop.TestCase.Falsified
def test_Exists(self):
pass
def test_Iff(self):
pass
def test_Throws(self):
pass
| 50.849057 | 113 | 0.69833 | 325 | 2,695 | 5.766154 | 0.126154 | 0.122732 | 0.220918 | 0.235326 | 0.826041 | 0.781217 | 0.715048 | 0.667556 | 0.634472 | 0.514408 | 0 | 0.035414 | 0.161781 | 2,695 | 52 | 114 | 51.826923 | 0.794157 | 0.0141 | 0 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.638889 | 1 | 0.222222 | false | 0.083333 | 0.027778 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
bab3d1f6e284e9ce9adde66d942f2e22800ccccb | 60,139 | py | Python | ecotaxa_py_client/api/objects_api.py | ecotaxa/ecotaxa_py_client | 956e80a9eaa989679b8d5a108a466013c69ca08f | [
"RSA-MD"
] | 1 | 2022-03-01T01:56:42.000Z | 2022-03-01T01:56:42.000Z | ecotaxa_py_client/api/objects_api.py | ecotaxa/ecotaxa_py_client | 956e80a9eaa989679b8d5a108a466013c69ca08f | [
"RSA-MD"
] | 3 | 2021-12-23T04:48:50.000Z | 2022-01-18T14:01:50.000Z | ecotaxa_py_client/api/objects_api.py | ecotaxa/ecotaxa_py_client | 956e80a9eaa989679b8d5a108a466013c69ca08f | [
"RSA-MD"
] | null | null | null | """
EcoTaxa
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 0.0.28
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from ecotaxa_py_client.api_client import ApiClient, Endpoint as _Endpoint
from ecotaxa_py_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from ecotaxa_py_client.model.body_export_object_set_object_set_export_post import BodyExportObjectSetObjectSetExportPost
from ecotaxa_py_client.model.body_predict_object_set_object_set_predict_post import BodyPredictObjectSetObjectSetPredictPost
from ecotaxa_py_client.model.bulk_update_req import BulkUpdateReq
from ecotaxa_py_client.model.classify_auto_req import ClassifyAutoReq
from ecotaxa_py_client.model.classify_req import ClassifyReq
from ecotaxa_py_client.model.export_rsp import ExportRsp
from ecotaxa_py_client.model.http_validation_error import HTTPValidationError
from ecotaxa_py_client.model.object_set_query_rsp import ObjectSetQueryRsp
from ecotaxa_py_client.model.object_set_revert_to_history_rsp import ObjectSetRevertToHistoryRsp
from ecotaxa_py_client.model.object_set_summary_rsp import ObjectSetSummaryRsp
from ecotaxa_py_client.model.prediction_rsp import PredictionRsp
from ecotaxa_py_client.model.project_filters import ProjectFilters
class ObjectsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.classify_auto_object_set_endpoint = _Endpoint(
settings={
'response_type': (int,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/classify_auto',
'operation_id': 'classify_auto_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'classify_auto_req',
],
'required': [
'classify_auto_req',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'classify_auto_req':
(ClassifyAutoReq,),
},
'attribute_map': {
},
'location_map': {
'classify_auto_req': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.classify_object_set_endpoint = _Endpoint(
settings={
'response_type': (int,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/classify',
'operation_id': 'classify_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'classify_req',
],
'required': [
'classify_req',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'classify_req':
(ClassifyReq,),
},
'attribute_map': {
},
'location_map': {
'classify_req': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.erase_object_set_endpoint = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/',
'operation_id': 'erase_object_set',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'request_body',
],
'required': [
'request_body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'request_body':
([int],),
},
'attribute_map': {
},
'location_map': {
'request_body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.export_object_set_endpoint = _Endpoint(
settings={
'response_type': (ExportRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/export',
'operation_id': 'export_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body_export_object_set_object_set_export_post',
],
'required': [
'body_export_object_set_object_set_export_post',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body_export_object_set_object_set_export_post':
(BodyExportObjectSetObjectSetExportPost,),
},
'attribute_map': {
},
'location_map': {
'body_export_object_set_object_set_export_post': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_object_set_endpoint = _Endpoint(
settings={
'response_type': (ObjectSetQueryRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/{project_id}/query',
'operation_id': 'get_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'project_filters',
'fields',
'order_field',
'window_start',
'window_size',
],
'required': [
'project_id',
'project_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'project_filters':
(ProjectFilters,),
'fields':
(str,),
'order_field':
(str,),
'window_start':
(int,),
'window_size':
(int,),
},
'attribute_map': {
'project_id': 'project_id',
'fields': 'fields',
'order_field': 'order_field',
'window_start': 'window_start',
'window_size': 'window_size',
},
'location_map': {
'project_id': 'path',
'project_filters': 'body',
'fields': 'query',
'order_field': 'query',
'window_start': 'query',
'window_size': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_object_set_summary_endpoint = _Endpoint(
settings={
'response_type': (ObjectSetSummaryRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/{project_id}/summary',
'operation_id': 'get_object_set_summary',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'only_total',
'project_filters',
],
'required': [
'project_id',
'only_total',
'project_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'only_total':
(bool,),
'project_filters':
(ProjectFilters,),
},
'attribute_map': {
'project_id': 'project_id',
'only_total': 'only_total',
},
'location_map': {
'project_id': 'path',
'only_total': 'query',
'project_filters': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.predict_object_set_endpoint = _Endpoint(
settings={
'response_type': (PredictionRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/predict',
'operation_id': 'predict_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body_predict_object_set_object_set_predict_post',
],
'required': [
'body_predict_object_set_object_set_predict_post',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body_predict_object_set_object_set_predict_post':
(BodyPredictObjectSetObjectSetPredictPost,),
},
'attribute_map': {
},
'location_map': {
'body_predict_object_set_object_set_predict_post': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.query_object_set_parents_endpoint = _Endpoint(
settings={
'response_type': (ObjectSetQueryRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/parents',
'operation_id': 'query_object_set_parents',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'request_body',
],
'required': [
'request_body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'request_body':
([int],),
},
'attribute_map': {
},
'location_map': {
'request_body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.reclassify_object_set_endpoint = _Endpoint(
settings={
'response_type': (int,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/{project_id}/reclassify',
'operation_id': 'reclassify_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'forced_id',
'reason',
'project_filters',
],
'required': [
'project_id',
'forced_id',
'reason',
'project_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'forced_id':
(int,),
'reason':
(str,),
'project_filters':
(ProjectFilters,),
},
'attribute_map': {
'project_id': 'project_id',
'forced_id': 'forced_id',
'reason': 'reason',
},
'location_map': {
'project_id': 'path',
'forced_id': 'query',
'reason': 'query',
'project_filters': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.reset_object_set_to_predicted_endpoint = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/{project_id}/reset_to_predicted',
'operation_id': 'reset_object_set_to_predicted',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'project_filters',
],
'required': [
'project_id',
'project_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'project_filters':
(ProjectFilters,),
},
'attribute_map': {
'project_id': 'project_id',
},
'location_map': {
'project_id': 'path',
'project_filters': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.revert_object_set_to_history_endpoint = _Endpoint(
settings={
'response_type': (ObjectSetRevertToHistoryRsp,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/{project_id}/revert_to_history',
'operation_id': 'revert_object_set_to_history',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'dry_run',
'project_filters',
'target',
],
'required': [
'project_id',
'dry_run',
'project_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'dry_run':
(bool,),
'project_filters':
(ProjectFilters,),
'target':
(int,),
},
'attribute_map': {
'project_id': 'project_id',
'dry_run': 'dry_run',
'target': 'target',
},
'location_map': {
'project_id': 'path',
'dry_run': 'query',
'project_filters': 'body',
'target': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.update_object_set_endpoint = _Endpoint(
settings={
'response_type': (int,),
'auth': [
'BearerOrCookieAuth'
],
'endpoint_path': '/object_set/update',
'operation_id': 'update_object_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'bulk_update_req',
],
'required': [
'bulk_update_req',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'bulk_update_req':
(BulkUpdateReq,),
},
'attribute_map': {
},
'location_map': {
'bulk_update_req': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def classify_auto_object_set(
self,
classify_auto_req,
**kwargs
):
"""Classify Auto Object Set # noqa: E501
**Set automatic classification** of a set of objects. **Returns the number of updated entities.** # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_auto_object_set(classify_auto_req, async_req=True)
>>> result = thread.get()
Args:
classify_auto_req (ClassifyAutoReq):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
int
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['classify_auto_req'] = \
classify_auto_req
return self.classify_auto_object_set_endpoint.call_with_http_info(**kwargs)
def classify_object_set(
self,
classify_req,
**kwargs
):
"""Classify Object Set # noqa: E501
**Change classification and/or qualification for a set of objects.** **Returns the number of updated entities.** 🔒 Current user needs at *least Annotate* right on all projects of specified objects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_object_set(classify_req, async_req=True)
>>> result = thread.get()
Args:
classify_req (ClassifyReq):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
int
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['classify_req'] = \
classify_req
return self.classify_object_set_endpoint.call_with_http_info(**kwargs)
def erase_object_set(
self,
request_body,
**kwargs
):
"""Erase Object Set # noqa: E501
**Delete the objects with given object ids.** **Returns** the number of : **deleted objects**, 0, **deleated image rows** and **deleated image files**. 🔒 Current user needs *Manage* right on all projects of specified objects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.erase_object_set(request_body, async_req=True)
>>> result = thread.get()
Args:
request_body ([int]):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['request_body'] = \
request_body
return self.erase_object_set_endpoint.call_with_http_info(**kwargs)
def export_object_set(
self,
body_export_object_set_object_set_export_post,
**kwargs
):
"""Export Object Set # noqa: E501
**Start an export job for the given object set and options.** # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_object_set(body_export_object_set_object_set_export_post, async_req=True)
>>> result = thread.get()
Args:
body_export_object_set_object_set_export_post (BodyExportObjectSetObjectSetExportPost):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ExportRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body_export_object_set_object_set_export_post'] = \
body_export_object_set_object_set_export_post
return self.export_object_set_endpoint.call_with_http_info(**kwargs)
def get_object_set(
self,
project_id,
project_filters,
**kwargs
):
"""Get Object Set # noqa: E501
Returns **filtered object Ids** for the given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_object_set(project_id, project_filters, async_req=True)
>>> result = thread.get()
Args:
project_id (int): Internal, numeric id of the project.
project_filters (ProjectFilters):
Keyword Args:
fields (str): Specify the needed object (and ancilliary entities) fields. It follows the naming convention 'prefix.field' : Prefix is either 'obj' for main object, 'fre' for free fields, 'img' for the visible image. The column obj.imgcount contains the total count of images for the object. Use a comma to separate fields. 💡 More help : You can get the field labels by parsing the classiffieldlist returned by a call to https://ecotaxa.obs-vlfr.fr/api/docs#/projects/project_query_projects__project_id__get. **Note that the following fields must be prefixed with the header \"obj.\"** (for example → obj.orig_id): acquisid classif_auto_id, classif_auto_score, classif_auto_when, classif_crossvalidation_id, classif_id, classif_qual, classif_who, classif_when, complement_info, depth_max, depth_min, latitude, longitude, objdate, object_link, objid, objtime, orig_id, random_value, similarity, sunpos. **Note that the following fields must be prefixed with the header \"img.\"** (for example → img.file_name): file_name, height, imgid, imgrank, file_name, orig, objid, file_name thumb_file_name, thumb_height, thumb_width, width. **Note that the following fields must be prefixed with the header \"txo.\"** (for example → txo.display_name): creation_datetime, creator_email, display_name, id, id_instance, id_source, lastupdate_datetime, name, nbrobj, nbrobjcum, parent_id, rename_to source_desc, source_url, taxostatus, taxotype. **All other fields must be prefixed by the header \"fre.\"** (for example → fre.circ.). . [optional]
order_field (str): Order the result using given field. If prefixed with \"-\" then it will be reversed.. [optional]
window_start (int): Allows to return only a slice of the result. Skip window_start before returning data.. [optional]
window_size (int): Allows to return only a slice of the result. Return only window_size lines.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ObjectSetQueryRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['project_filters'] = \
project_filters
return self.get_object_set_endpoint.call_with_http_info(**kwargs)
def get_object_set_summary(
self,
project_id,
only_total,
project_filters,
**kwargs
):
"""Get Object Set Summary # noqa: E501
For the given project, with given filters, **return the classification summary**. i.e.: - Total number of objects And optionally - Number of Validated ones - Number of Dubious ones - Number of Predicted ones # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_object_set_summary(project_id, only_total, project_filters, async_req=True)
>>> result = thread.get()
Args:
project_id (int): Internal, numeric id of the project.
only_total (bool): If True, returns only the **Total number of objects**. Else returns also the **Number of validated ones**, the **number of Dubious ones** and the number of **predicted ones**.
project_filters (ProjectFilters):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ObjectSetSummaryRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['only_total'] = \
only_total
kwargs['project_filters'] = \
project_filters
return self.get_object_set_summary_endpoint.call_with_http_info(**kwargs)
def predict_object_set(
self,
body_predict_object_set_object_set_predict_post,
**kwargs
):
"""Predict Object Set # noqa: E501
**Start a prediction** AKA automatic classification for the given object set and options. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.predict_object_set(body_predict_object_set_object_set_predict_post, async_req=True)
>>> result = thread.get()
Args:
body_predict_object_set_object_set_predict_post (BodyPredictObjectSetObjectSetPredictPost):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PredictionRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body_predict_object_set_object_set_predict_post'] = \
body_predict_object_set_object_set_predict_post
return self.predict_object_set_endpoint.call_with_http_info(**kwargs)
def query_object_set_parents(
self,
request_body,
**kwargs
):
"""Query Object Set Parents # noqa: E501
**Return object ids, with parent ones and projects** for the objects in given list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.query_object_set_parents(request_body, async_req=True)
>>> result = thread.get()
Args:
request_body ([int]):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ObjectSetQueryRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['request_body'] = \
request_body
return self.query_object_set_parents_endpoint.call_with_http_info(**kwargs)
def reclassify_object_set(
self,
project_id,
forced_id,
reason,
project_filters,
**kwargs
):
"""Reclassify Object Set # noqa: E501
Regardless of present classification or state, **set the new classification for this object set.** If the filter designates \"all with given classification\", add a TaxonomyChangeLog entry. **Returns the number of affected objects.** # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reclassify_object_set(project_id, forced_id, reason, project_filters, async_req=True)
>>> result = thread.get()
Args:
project_id (int): Internal, numeric id of the project.
forced_id (int): The new classification Id.
reason (str): The reason of this new classification.
project_filters (ProjectFilters):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
int
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['forced_id'] = \
forced_id
kwargs['reason'] = \
reason
kwargs['project_filters'] = \
project_filters
return self.reclassify_object_set_endpoint.call_with_http_info(**kwargs)
def reset_object_set_to_predicted(
self,
project_id,
project_filters,
**kwargs
):
"""Reset Object Set To Predicted # noqa: E501
**Reset to Predicted** all objects for the given project with the filters. Return **NULL upon success.** # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_object_set_to_predicted(project_id, project_filters, async_req=True)
>>> result = thread.get()
Args:
project_id (int): Internal, numeric id of the project.
project_filters (ProjectFilters):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['project_filters'] = \
project_filters
return self.reset_object_set_to_predicted_endpoint.call_with_http_info(**kwargs)
def revert_object_set_to_history(
self,
project_id,
dry_run,
project_filters,
**kwargs
):
"""Revert Object Set To History # noqa: E501
**Revert all objects for the given project**, with the filters, to the target. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revert_object_set_to_history(project_id, dry_run, project_filters, async_req=True)
>>> result = thread.get()
Args:
project_id (int): Internal, numeric id of the project.
dry_run (bool): If set, then no real write but consequences of the revert will be replied.
project_filters (ProjectFilters):
Keyword Args:
target (int): Use null/None for reverting using the last annotation from anyone, or a user id for the last annotation from this user.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ObjectSetRevertToHistoryRsp
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['dry_run'] = \
dry_run
kwargs['project_filters'] = \
project_filters
return self.revert_object_set_to_history_endpoint.call_with_http_info(**kwargs)
def update_object_set(
self,
bulk_update_req,
**kwargs
):
"""Update Object Set # noqa: E501
Do the required **update for each objects in the set.** **Returns the number of updated entities.** 🔒 Current user needs *Manage* right on all projects of specified objects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_object_set(bulk_update_req, async_req=True)
>>> result = thread.get()
Args:
bulk_update_req (BulkUpdateReq):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
int
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['bulk_update_req'] = \
bulk_update_req
return self.update_object_set_endpoint.call_with_http_info(**kwargs)
| 37.918663 | 1,600 | 0.509885 | 5,642 | 60,139 | 5.161468 | 0.065934 | 0.040177 | 0.021428 | 0.022252 | 0.822225 | 0.791559 | 0.771436 | 0.747364 | 0.72264 | 0.686721 | 0 | 0.002832 | 0.406907 | 60,139 | 1,585 | 1,601 | 37.942587 | 0.81339 | 0.369544 | 0 | 0.634349 | 1 | 0 | 0.238715 | 0.046096 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012004 | false | 0 | 0.014774 | 0 | 0.038781 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
badfebd5662ca705a34fd71288d735579c1863cf | 9,304 | py | Python | comments/admin.py | pinoylearnpython/dev | 3fd904c594a8c5cab7fd1fe2ad775fd519410a8a | [
"MIT"
] | 2 | 2019-10-29T07:41:38.000Z | 2020-01-31T16:46:15.000Z | comments/admin.py | pinoylearnpython/dev | 3fd904c594a8c5cab7fd1fe2ad775fd519410a8a | [
"MIT"
] | null | null | null | comments/admin.py | pinoylearnpython/dev | 3fd904c594a8c5cab7fd1fe2ad775fd519410a8a | [
"MIT"
] | 2 | 2019-04-23T04:40:07.000Z | 2020-02-17T09:11:48.000Z | from django.conf import settings
from import_export.admin import ImportExportModelAdmin
from django.contrib import admin
from bp.models import (Business, BusinessTag, BusinessComment, BusinessReview,
BusinessInquiry)
from bp.resources import (BusinessResource, BusinessTagResource,
BusinessCommentResource, BusinessReviewResource,
BusinessInquiryResource)
class BusinessAdmin(ImportExportModelAdmin):
using = settings.APP_LABEL_BP
list_display = ('id', 'company_name', 'tel_no', 'fax_no', 'email',
'is_active', 'is_deleted', 'created_by', 'created_date')
search_fields = ['id', 'company_name', 'address', 'tel_no', 'fax_no',
'email', 'website', 'office_hours', 'short_desc', 'about',
'created_by']
fieldsets = (
(None, {
'fields': ('company_name', 'address', 'tel_no', 'fax_no', 'email',
'website', 'is_website_no_follow', 'office_hours',
'short_desc', 'about', 'is_active', 'created_by',
'modified_by', 'modified_date', 'is_deleted',
'deleted_by', 'deleted_date')
}),
)
def save_model(self, request, obj, form, change):
# Tell Django to save objects to the 'other' database.
obj.save(using=self.using)
def delete_model(self, request, obj):
# Tell Django to delete objects from the 'other' database
obj.delete(using=self.using)
def get_queryset(self, request):
# Tell Django to look for objects on the 'other' database.
return super().get_queryset(request).using(self.using)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# Tell Django to populate ForeignKey widgets using a query
# on the 'other' database.
return super().formfield_for_foreignkey(db_field, request, using=self.using, **kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
# Tell Django to populate ManyToMany widgets using a query
# on the 'other' database.
return super().formfield_for_manytomany(db_field, request, using=self.using, **kwargs)
resource_class = BusinessResource
admin.site.register(Business, BusinessAdmin)
class BusinessTagAdmin(ImportExportModelAdmin):
using = settings.APP_LABEL_BP
list_display = ('id', 'business_id', 'tag_name', 'tag_name_slug',
'is_active', 'is_deleted', 'created_by', 'created_date')
search_fields = ['id', 'business_id', 'tag_name', 'tag_name_slug',
'is_active', 'created_by']
fieldsets = (
(None, {
'fields': ('business_id', 'tag_name', 'tag_name_slug', 'is_active',
'created_by', 'modified_by', 'modified_date',
'is_deleted', 'deleted_by', 'deleted_date')
}),
)
def save_model(self, request, obj, form, change):
# Tell Django to save objects to the 'other' database.
obj.save(using=self.using)
def delete_model(self, request, obj):
# Tell Django to delete objects from the 'other' database
obj.delete(using=self.using)
def get_queryset(self, request):
# Tell Django to look for objects on the 'other' database.
return super().get_queryset(request).using(self.using)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# Tell Django to populate ForeignKey widgets using a query
# on the 'other' database.
return super().formfield_for_foreignkey(db_field, request, using=self.using, **kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
# Tell Django to populate ManyToMany widgets using a query
# on the 'other' database.
return super().formfield_for_manytomany(db_field, request, using=self.using, **kwargs)
resource_class = BusinessTagResource
admin.site.register(BusinessTag, BusinessTagAdmin)
class BusinessCommentAdmin(ImportExportModelAdmin):
using = settings.APP_LABEL_BP
list_display = ('id', 'business_id', 'full_name', 'is_active',
'is_deleted', 'created_by', 'created_date')
search_fields = ['id', 'business_id', 'full_name', 'comment', 'created_by']
fieldsets = (
(None, {
'fields': ('business_id', 'full_name', 'comment', 'is_active',
'created_by', 'modified_by', 'modified_date',
'is_deleted', 'deleted_by', 'deleted_date')
}),
)
def save_model(self, request, obj, form, change):
# Tell Django to save objects to the 'other' database.
obj.save(using=self.using)
def delete_model(self, request, obj):
# Tell Django to delete objects from the 'other' database
obj.delete(using=self.using)
def get_queryset(self, request):
# Tell Django to look for objects on the 'other' database.
return super().get_queryset(request).using(self.using)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# Tell Django to populate ForeignKey widgets using a query
# on the 'other' database.
return super().formfield_for_foreignkey(db_field, request, using=self.using, **kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
# Tell Django to populate ManyToMany widgets using a query
# on the 'other' database.
return super().formfield_for_manytomany(db_field, request, using=self.using, **kwargs)
resource_class = BusinessCommentResource
admin.site.register(BusinessComment, BusinessCommentAdmin)
class BusinessReviewAdmin(ImportExportModelAdmin):
using = settings.APP_LABEL_BP
list_display = ('id', 'business_id', 'full_name', 'email', 'rate',
'is_active', 'is_deleted', 'created_by',
'created_date')
search_fields = ['id', 'business_id', 'full_name', 'email', 'rate',
'review', 'created_by']
fieldsets = (
(None, {
'fields': ('business_id', 'full_name', 'email', 'rate', 'review',
'is_active', 'created_by', 'modified_by',
'modified_date', 'is_deleted', 'deleted_by',
'deleted_date')
}),
)
def save_model(self, request, obj, form, change):
# Tell Django to save objects to the 'other' database.
obj.save(using=self.using)
def delete_model(self, request, obj):
# Tell Django to delete objects from the 'other' database
obj.delete(using=self.using)
def get_queryset(self, request):
# Tell Django to look for objects on the 'other' database.
return super().get_queryset(request).using(self.using)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# Tell Django to populate ForeignKey widgets using a query
# on the 'other' database.
return super().formfield_for_foreignkey(db_field, request, using=self.using, **kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
# Tell Django to populate ManyToMany widgets using a query
# on the 'other' database.
return super().formfield_for_manytomany(db_field, request, using=self.using, **kwargs)
resource_class = BusinessReviewResource
admin.site.register(BusinessReview, BusinessReviewAdmin)
class BusinessInquiryAdmin(ImportExportModelAdmin):
using = settings.APP_LABEL_BP
list_display = ('id', 'business_id', 'full_name', 'email', 'subject',
'is_active', 'is_deleted', 'created_by',
'created_date')
search_fields = ['id', 'business_id', 'full_name', 'email', 'subject',
'inquiry', 'created_by']
fieldsets = (
(None, {
'fields': ('business_id', 'full_name', 'email', 'subject',
'inquiry', 'is_active', 'created_by', 'modified_by',
'modified_date', 'is_deleted', 'deleted_by',
'deleted_date')
}),
)
def save_model(self, request, obj, form, change):
# Tell Django to save objects to the 'other' database.
obj.save(using=self.using)
def delete_model(self, request, obj):
# Tell Django to delete objects from the 'other' database
obj.delete(using=self.using)
def get_queryset(self, request):
# Tell Django to look for objects on the 'other' database.
return super().get_queryset(request).using(self.using)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# Tell Django to populate ForeignKey widgets using a query
# on the 'other' database.
return super().formfield_for_foreignkey(db_field, request, using=self.using, **kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
# Tell Django to populate ManyToMany widgets using a query
# on the 'other' database.
return super().formfield_for_manytomany(db_field, request, using=self.using, **kwargs)
resource_class = BusinessInquiryResource
admin.site.register(BusinessInquiry, BusinessInquiryAdmin)
| 40.277056 | 94 | 0.638972 | 1,060 | 9,304 | 5.407547 | 0.096226 | 0.043615 | 0.052338 | 0.044487 | 0.862177 | 0.84351 | 0.84351 | 0.829902 | 0.829902 | 0.797802 | 0 | 0 | 0.247743 | 9,304 | 230 | 95 | 40.452174 | 0.818974 | 0.177236 | 0 | 0.604317 | 0 | 0 | 0.164982 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.179856 | false | 0 | 0.071942 | 0.107914 | 0.57554 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
bafbe0d7a2166a0cd6de650bd69a3c534ee08e74 | 7,050 | py | Python | donkeypart_dcmotor/ta7291p.py | coolerking/donkeypart_dcmotor | 3bc240696925225f5dca12c59370097f47cd9c7f | [
"MIT"
] | 1 | 2021-02-25T12:02:21.000Z | 2021-02-25T12:02:21.000Z | donkeypart_dcmotor/ta7291p.py | coolerking/donkeypart_dcmotor | 3bc240696925225f5dca12c59370097f47cd9c7f | [
"MIT"
] | null | null | null | donkeypart_dcmotor/ta7291p.py | coolerking/donkeypart_dcmotor | 3bc240696925225f5dca12c59370097f47cd9c7f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
TA7291Pモータドライバを表すクラスを提供する。
TA7291Pは、PWMピンによる制御をおこなうことで速度調整が可能である。
TA7291Pは、Vref ピンを10kΩ抵抗経由でVcc(Pi:5V)に接続し、IN1/IN2ピンをPWM化して
速度調整を行うことができる。
しかし本クラスでは、IN1、IN2はそれぞれ通常のGPIOピン、Vrefと接続したGPIOピンを
疑似PWM化して使用する前提とする。
"""
import pigpio
class MotorDriverWithoutVref:
'''
TA7219Pに接続されたモータを駆動させるためのドライバクラス。
VrefにVccより低い一定電圧をかけ、IN1/IN2をPWM化して速度調節する場合は
こちらのクラスを使用する。
消費するGPIOピン数を減らしたい場合に使用する(更に減らしたい場合はI2C化)。
指定するPWM値の範囲は0~100であるが、マイナス値を指定した場合は逆転動作を
実行する。
'''
def __init__(self, pi, gpio_in1, gpio_in2, pwm_range=255, pwm_freq=50):
'''
TA7219Pと接続されたGPIO番号をセットし、各ピンを使用可能な状態にする。
引数:
pi pigpioパッケージのpiインスタンス
gpio_in1 IN1と結線されているGPIOの番号
gpio_in2 IN2と結線されているGPIOの番号
pwm_range PWM1周期を構成するクロックの個数(デフォルト255)
pwm_freq PWM 周波数(単位:Hz, 0以上を指定、デフォルト50Hz)
戻り値:
なし
'''
self.gpio_in1 = gpio_in1
self.gpio_in2 = gpio_in2
self.pi = pi
self.pi.set_mode(self.gpio_in1, pigpio.OUTPUT)
self.pi.set_mode(self.gpio_in2, pigpio.OUTPUT)
# PWMの周波数(Hz:1秒に何回か)
self.pwm_freq = pwm_freq
self.pi.set_PWM_frequency(self.gpio_in1, self.pwm_freq)
self.pi.set_PWM_frequency(self.gpio_in2, self.pwm_freq)
# PWM1周期を構成するクロックの個数
self.pwm_range = pwm_range
self.pi.set_PWM_range(self.gpio_in1, self.pwm_range)
self.pi.set_PWM_range(self.gpio_in2, self.pwm_range)
# 初期状態、動力なし(free)にする
self.pi.set_PWM_dutycycle(self.gpio_in1, 0)
self.pi.set_PWM_dutycycle(self.gpio_in2, 0)
def free(self):
'''
動力なし状態にする。
引数:
なし
戻り値:なし
'''
self.pi.set_PWM_dutycycle(self.gpio_in1, 0)
self.pi.set_PWM_dutycycle(self.gpio_in2, 0)
def brake(self):
'''
制動停止状態にする。
引数:
なし
戻り値:
なし
'''
self.pi.set_PWM_dutycycle(self.gpio_in1, self.pwm_range)
self.pi.set_PWM_dutycycle(self.gpio_in2, self.pwm_range)
def move(self, input_value):
"""
Controller/AutoPilotから入力されたアナログ値(範囲[-1.0, 1.0])に
従ってモータを動かす。
引数:
input_value float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
"""
if input_value > 0:
self._forward(self._to_duty_cycle(input_value))
elif input_value < 0:
self._back(self._to_duty_cycle(abs(input_value)))
else:
self.free()
def _forward(self, pwm_duty_cycle):
'''
引数で指定された正値でモータを正転させる。
引数:
pwm_duty_cycle float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
'''
self.pi.set_PWM_dutycycle(self.gpio_in1, pwm_duty_cycle)
self.pi.set_PWM_dutycycle(self.gpio_in2, 0)
def _back(self, pwm_duty_cycle):
'''
引数で指定された正値でモータを逆転させる。
引数:
pwm_duty_cycle float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
'''
self.pi.set_PWM_dutycycle(self.gpio_in1, 0)
self.pi.set_PWM_dutycycle(self.gpio_in2, pwm_duty_cycle)
def _to_duty_cycle(self, input_value):
"""
input_value値をduty_cycle値に変換する。
引数:
input_value float([-1.0, 1.0]) DCモータへの入力値
戻り値:
duty_cycle int(0~self.range) PWM Duty Cycle値
"""
return int(float(self.pwm_range) * float(abs(input_value)))
class MotorDriverWithVref:
'''
TA7219Pに接続されたモータを駆動させるためのドライバクラス。
IN1,IN2はデジタルOUTPUTピンとし、VrefをPWM OUTPUTピンとして結線する場合
こちらのクラスを使用する。
PWM消費数を最小限にする場合などで使用する。
指定するPWM値の範囲は0~100であるが、マイナス値を指定した場合は逆転動作を
実行する。
'''
def __init__(self, pi, gpio_in1, gpio_in2, gpio_vref, pwm_range=255, pwm_freq=50):
'''
TA7219Pと接続されたGPIO番号をセットし、各ピンを使用可能な状態にする。
引数:
pi pigpioパッケージのpiインスタンス
gpio_in1 IN1と結線されているGPIOの番号(Digital OUTPUT)
gpio_in2 IN2と結線されているGPIOの番号(Digital OUTPUT)
gpio_vref Vrefと結線されているGPIOの番号(PWM OUTPUT)
pwm_range PWM1周期を構成するクロックの個数(デフォルト255)
pwm_freq PWM 周波数(単位:Hz, 0以上を指定、デフォルト50Hz)
戻り値:
なし
'''
self.gpio_in1 = gpio_in1
self.gpio_in2 = gpio_in2
self.gpio_vref = gpio_vref
self.pi = pi
self.pi.set_mode(self.gpio_in1, pigpio.OUTPUT)
self.pi.set_mode(self.gpio_in2, pigpio.OUTPUT)
self.pi.set_mode(self.gpio_vref, pigpio.OUTPUT)
# PWMの周波数(Hz:1秒に何回か)
self.pwm_freq = pwm_freq
self.pi.set_PWM_frequency(self.gpio_vref, self.pwm_freq)
# PWM1周期を構成するクロックの個数
self.pwm_range = pwm_range
self.pi.set_PWM_range(self.gpio_vref, self.pwm_range)
self.pi.set_PWM_range(self.gpio_in2, self.pwm_range)
# 初期状態、動力なし(free)にする
self.pi.write(self.gpio_in1, 0)
self.pi.write(self.gpio_in2, 0)
self.pi.set_PWM_dutycycle(self.gpio_vref, 0)
def free(self):
'''
動力なし状態にする。
引数:
なし
戻り値:なし
'''
self.pi.write(self.gpio_in1, 0)
self.pi.write(self.gpio_in2, 0)
self.pi.set_PWM_dutycycle(self.gpio_vref, 0)
def brake(self):
'''
制動停止状態にする。
引数:
なし
戻り値:
なし
'''
self.pi.write(self.gpio_in1, 1)
self.pi.write(self.gpio_in2, 1)
self.pi.set_PWM_dutycycle(self.gpio_vref, 0)
def move(self, input_value):
"""
Controller/AutoPilotから入力されたアナログ値(範囲[-1.0, 1.0])に
従ってモータを動かす。
引数:
input_value float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
"""
if input_value > 0:
self._forward(self._to_duty_cycle(input_value))
elif input_value < 0:
self._back(self._to_duty_cycle(abs(input_value)))
else:
self.free()
def _forward(self, pwm_duty_cycle):
'''
引数で指定された正値でモータを正転させる。
引数:
pwm_duty_cycle float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
'''
self.pi.write(self.gpio_in1, 1)
self.pi.write(self.gpio_in2, 0)
self.pi.set_PWM_dutycycle(self.gpio_vref, pwm_duty_cycle)
def _back(self, pwm_duty_cycle):
'''
引数で指定された正値でモータを逆転させる。
引数:
pwm_duty_cycle float([0.0, 1.0]) DCモータへの入力値
戻り値:
なし
'''
self.pi.write(self.gpio_in1, 0)
self.pi.write(self.gpio_in2, 1)
self.pi.set_PWM_dutycycle(self.gpio_vref, pwm_duty_cycle)
def _to_duty_cycle(self, input_value):
"""
input_value値をduty_cycle値に変換する。
引数:
input_value float([-1.0, 1.0]) DCモータへの入力値
戻り値:
duty_cycle int(0~self.range) PWM Duty Cycle値
"""
return int(float(self.pwm_range) * float(abs(input_value)))
| 30.128205 | 86 | 0.58 | 835 | 7,050 | 4.639521 | 0.128144 | 0.086732 | 0.062726 | 0.068147 | 0.833247 | 0.828601 | 0.825503 | 0.825503 | 0.81492 | 0.805627 | 0 | 0.0399 | 0.320993 | 7,050 | 233 | 87 | 30.257511 | 0.769375 | 0.337589 | 0 | 0.797468 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.177215 | false | 0 | 0.012658 | 0 | 0.240506 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
24062be986b0256e3a20d3d470cce782108ba550 | 5,321 | py | Python | codenerix_storages/migrations/0031_alter_inventoryinline_box_and_more.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | 1 | 2017-11-23T13:28:47.000Z | 2017-11-23T13:28:47.000Z | codenerix_storages/migrations/0031_alter_inventoryinline_box_and_more.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | null | null | null | codenerix_storages/migrations/0031_alter_inventoryinline_box_and_more.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | 2 | 2018-05-15T10:15:26.000Z | 2018-05-22T10:01:40.000Z | # Generated by Django 4.0.4 on 2022-05-13 12:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('codenerix_products', '0013_alter_attribute_public_alter_brand_outstanding_and_more'),
('codenerix_geodata', '0003_auto_20180118_1209'),
('codenerix_storages', '0030_auto_20180708_0711'),
]
operations = [
migrations.AlterField(
model_name='inventoryinline',
name='box',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storagebox', verbose_name='Box'),
),
migrations.AlterField(
model_name='inventoryinline',
name='operator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storageoperator', verbose_name='Storage Operator'),
),
migrations.AlterField(
model_name='inventoryinline',
name='product_final',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productfinal', verbose_name='Product Final'),
),
migrations.AlterField(
model_name='inventoryinline',
name='product_unique',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productunique', verbose_name='Product Unique'),
),
migrations.AlterField(
model_name='inventoryline',
name='box',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storagebox', verbose_name='Box'),
),
migrations.AlterField(
model_name='inventoryline',
name='operator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storageoperator', verbose_name='Storage Operator'),
),
migrations.AlterField(
model_name='inventoryline',
name='product_final',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productfinal', verbose_name='Product Final'),
),
migrations.AlterField(
model_name='inventoryline',
name='product_unique',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productunique', verbose_name='Product Unique'),
),
migrations.AlterField(
model_name='inventoryoutline',
name='box',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storagebox', verbose_name='Box'),
),
migrations.AlterField(
model_name='inventoryoutline',
name='operator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_storages.storageoperator', verbose_name='Storage Operator'),
),
migrations.AlterField(
model_name='inventoryoutline',
name='product_final',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productfinal', verbose_name='Product Final'),
),
migrations.AlterField(
model_name='inventoryoutline',
name='product_unique',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_%(class)s', to='codenerix_products.productunique', verbose_name='Product Unique'),
),
migrations.AlterField(
model_name='storage',
name='city',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_geo_addresses', to='codenerix_geodata.city', verbose_name='City'),
),
migrations.AlterField(
model_name='storage',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_geo_addresses', to='codenerix_geodata.country', verbose_name='Country'),
),
migrations.AlterField(
model_name='storage',
name='province',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_geo_addresses', to='codenerix_geodata.province', verbose_name='Province'),
),
migrations.AlterField(
model_name='storage',
name='region',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_geo_addresses', to='codenerix_geodata.region', verbose_name='Region'),
),
]
| 54.85567 | 216 | 0.671302 | 573 | 5,321 | 6.005236 | 0.129145 | 0.060738 | 0.069166 | 0.108689 | 0.874164 | 0.874164 | 0.802092 | 0.776518 | 0.776518 | 0.776518 | 0 | 0.011963 | 0.198835 | 5,321 | 96 | 217 | 55.427083 | 0.795215 | 0.008457 | 0 | 0.8 | 1 | 0 | 0.282897 | 0.138225 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022222 | 0 | 0.055556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
24597b3209a704e47ea169b2d00a19574b6579ad | 33,703 | py | Python | com/vmware/nsx_vmc_app/infra_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | com/vmware/nsx_vmc_app/infra_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | com/vmware/nsx_vmc_app/infra_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2019 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx_vmc_app.infra.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Accounts(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_vmc_app.infra.accounts'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _AccountsStub)
def get(self):
"""
Retrieve the shadow account and linked VPC account information from VMC
provider. This API is a live query to VMC provider.
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.VMCAccounts`
:return: com.vmware.nsx_vmc_app.model.VMCAccounts
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
class LinkedVpcs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_vmc_app.infra.linked_vpcs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LinkedVpcsStub)
def get(self,
linked_vpc_id,
):
"""
Get linked VPC information.
:type linked_vpc_id: :class:`str`
:param linked_vpc_id: (required)
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.LinkedVpcInfo`
:return: com.vmware.nsx_vmc_app.model.LinkedVpcInfo
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'linked_vpc_id': linked_vpc_id,
})
def list(self):
"""
List linked VPC information.
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.LinkedVpcsListResult`
:return: com.vmware.nsx_vmc_app.model.LinkedVpcsListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
class MgmtVms(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_vmc_app.infra.mgmt_vms'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MgmtVmsStub)
def get(self,
vm_id,
):
"""
Get management VM access information.
:type vm_id: :class:`str`
:param vm_id: (required)
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.MgmtVmInfo`
:return: com.vmware.nsx_vmc_app.model.MgmtVmInfo
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'vm_id': vm_id,
})
def list(self):
"""
List Management VM information.
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.MgmtVmsListResult`
:return: com.vmware.nsx_vmc_app.model.MgmtVmsListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
class PublicIps(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_vmc_app.infra.public_ips'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _PublicIpsStub)
def delete(self,
public_ip_id,
force=None,
):
"""
Delete a public IP. The IP will be released in VMC provider.
:type public_ip_id: :class:`str`
:param public_ip_id: (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'public_ip_id': public_ip_id,
'force': force,
})
def get(self,
public_ip_id,
):
"""
Get the public IP information.
:type public_ip_id: :class:`str`
:param public_ip_id: (required)
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.PublicIp`
:return: com.vmware.nsx_vmc_app.model.PublicIp
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'public_ip_id': public_ip_id,
})
def list(self):
"""
List all public IPs obtained in the SDDC.
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.PublicIpsListResult`
:return: com.vmware.nsx_vmc_app.model.PublicIpsListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
public_ip_id,
public_ip,
):
"""
This API is used to create or update a public IP. In creating, the API
allocates a new public IP from VMC provider. In updating, only the
display name can be modified, the IP is read-only.
:type public_ip_id: :class:`str`
:param public_ip_id: (required)
:type public_ip: :class:`com.vmware.nsx_vmc_app.model_client.PublicIp`
:param public_ip: (required)
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.PublicIp`
:return: com.vmware.nsx_vmc_app.model.PublicIp
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'public_ip_id': public_ip_id,
'public_ip': public_ip,
})
class SddcUserConfig(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_vmc_app.infra.sddc_user_config'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SddcUserConfigStub)
def get(self):
"""
Get the user-level SDDC configuration parameters
:rtype: :class:`com.vmware.nsx_vmc_app.model_client.SddcUserConfiguration`
:return: com.vmware.nsx_vmc_app.model.SddcUserConfiguration
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
class _AccountsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/accounts',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'VMCAccounts'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_vmc_app.infra.accounts',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LinkedVpcsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'linked_vpc_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/linked-vpcs/{linked-vpc-id}',
path_variables={
'linked_vpc_id': 'linked-vpc-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/linked-vpcs',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'LinkedVpcInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'LinkedVpcsListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_vmc_app.infra.linked_vpcs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MgmtVmsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'vm_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/mgmt-vms/{vm-id}',
path_variables={
'vm_id': 'vm-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/mgmt-vms',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'MgmtVmInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'MgmtVmsListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_vmc_app.infra.mgmt_vms',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _PublicIpsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'public_ip_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/cloud-service/api/v1/infra/public-ips/{public-ip-id}',
path_variables={
'public_ip_id': 'public-ip-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'public_ip_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/public-ips/{public-ip-id}',
path_variables={
'public_ip_id': 'public-ip-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/public-ips',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'public_ip_id': type.StringType(),
'public_ip': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'PublicIp'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/cloud-service/api/v1/infra/public-ips/{public-ip-id}',
request_body_parameter='public_ip',
path_variables={
'public_ip_id': 'public-ip-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'PublicIp'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'PublicIpsListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'PublicIp'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_vmc_app.infra.public_ips',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SddcUserConfigStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/cloud-service/api/v1/infra/sddc-user-config',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_vmc_app.model_client', 'SddcUserConfiguration'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_vmc_app.infra.sddc_user_config',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'Accounts': Accounts,
'LinkedVpcs': LinkedVpcs,
'MgmtVms': MgmtVms,
'PublicIps': PublicIps,
'SddcUserConfig': SddcUserConfig,
'direct_connect': 'com.vmware.nsx_vmc_app.infra.direct_connect_client.StubFactory',
'linked_vpcs': 'com.vmware.nsx_vmc_app.infra.linked_vpcs_client.StubFactory',
'realized_state': 'com.vmware.nsx_vmc_app.infra.realized_state_client.StubFactory',
}
| 41.051157 | 114 | 0.604041 | 3,450 | 33,703 | 5.642029 | 0.061739 | 0.089237 | 0.10018 | 0.123298 | 0.878859 | 0.867249 | 0.849319 | 0.828872 | 0.809658 | 0.797431 | 0 | 0.00062 | 0.282082 | 33,703 | 820 | 115 | 41.10122 | 0.803852 | 0.241136 | 0 | 0.620623 | 1 | 0.007782 | 0.323093 | 0.237635 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038911 | false | 0 | 0.023346 | 0 | 0.114786 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
245a1037bbcf2ac337ad453e88a6d018b0194ebe | 160 | py | Python | src/view/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | 2 | 2021-09-09T14:03:40.000Z | 2021-11-03T03:35:55.000Z | src/view/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | null | null | null | src/view/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | null | null | null | from src.view.window import Window
from src.view.scene_manager import SceneManager
from src.view.world_view import WorldView
from src.view.camera import Camera
| 32 | 47 | 0.85 | 26 | 160 | 5.153846 | 0.423077 | 0.208955 | 0.328358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 160 | 4 | 48 | 40 | 0.930556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
cee1158fcb1d6c10a68f60c9f3677cd578f858b8 | 96 | py | Python | test/test_torch_geometric.py | yjchoi1/gns-1 | 31c712dbb721be81b5fd193a23baaf56adf9c336 | [
"MIT"
] | 14 | 2021-11-17T16:50:22.000Z | 2022-03-11T14:14:36.000Z | test/test_torch_geometric.py | yjchoi1/gns-1 | 31c712dbb721be81b5fd193a23baaf56adf9c336 | [
"MIT"
] | 9 | 2021-11-17T14:00:13.000Z | 2022-03-15T00:53:13.000Z | test/test_torch_geometric.py | yjchoi1/gns-1 | 31c712dbb721be81b5fd193a23baaf56adf9c336 | [
"MIT"
] | 2 | 2022-02-28T17:47:11.000Z | 2022-03-07T09:58:53.000Z | import torch
import torch_geometric
from torch_geometric.nn import MessagePassing, radius_graph
| 24 | 59 | 0.885417 | 13 | 96 | 6.307692 | 0.615385 | 0.268293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 96 | 3 | 60 | 32 | 0.942529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
305c147b25ed743fb1e8eb9637eadb37ea73956d | 37,554 | py | Python | mysql_config/WebMonitoring/generators/website/tests/website_expected_time.py | raresraf/rafMetrics | 21eb5e8210364bf70eee746d71c45f3e353dcb10 | [
"MIT"
] | 15 | 2019-11-03T18:01:27.000Z | 2021-05-05T20:54:57.000Z | mysql_config/WebMonitoring/generators/website/tests/website_expected_time.py | raresraf/rafMetrics | 21eb5e8210364bf70eee746d71c45f3e353dcb10 | [
"MIT"
] | 392 | 2019-11-09T21:28:01.000Z | 2022-03-31T13:04:45.000Z | mysql_config/WebMonitoring/generators/website/tests/website_expected_time.py | raresraf/rafMetrics | 21eb5e8210364bf70eee746d71c45f3e353dcb10 | [
"MIT"
] | 1 | 2021-03-11T18:35:16.000Z | 2021-03-11T18:35:16.000Z | EXPECTED_DAILY_WEBSITE_GENERATE_SAMPLES_QUERIES = """delimiter //
DROP PROCEDURE IF EXISTS get_daily_samples_websites;
CREATE PROCEDURE get_daily_samples_websites (
IN id INT,
OUT entry0 FLOAT,
OUT entry1 FLOAT,
OUT entry2 FLOAT,
OUT entry3 FLOAT,
OUT entry4 FLOAT,
OUT entry5 FLOAT,
OUT entry6 FLOAT,
OUT entry7 FLOAT,
OUT entry8 FLOAT,
OUT entry9 FLOAT,
OUT entry10 FLOAT,
OUT entry11 FLOAT,
OUT entry12 FLOAT,
OUT entry13 FLOAT,
OUT entry14 FLOAT,
OUT entry15 FLOAT,
OUT entry16 FLOAT,
OUT entry17 FLOAT,
OUT entry18 FLOAT,
OUT entry19 FLOAT,
OUT entry20 FLOAT,
OUT entry21 FLOAT,
OUT entry22 FLOAT,
OUT entry23 FLOAT,
OUT start_hour FLOAT
)
BEGIN
select HOUR(now()) INTO start_hour;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 23 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry0 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 23 HOUR) AND Websiteid = id limit 1;
else SET entry0 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 23 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 22 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry1 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 23 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 22 HOUR) AND Websiteid = id limit 1;
else SET entry1 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 22 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 21 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry2 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 22 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 21 HOUR) AND Websiteid = id limit 1;
else SET entry2 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 21 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 20 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry3 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 21 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 20 HOUR) AND Websiteid = id limit 1;
else SET entry3 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 20 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 19 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry4 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 20 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 19 HOUR) AND Websiteid = id limit 1;
else SET entry4 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 19 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry5 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 19 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND Websiteid = id limit 1;
else SET entry5 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 17 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry6 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 17 HOUR) AND Websiteid = id limit 1;
else SET entry6 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 17 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 16 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry7 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 17 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 16 HOUR) AND Websiteid = id limit 1;
else SET entry7 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 16 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 15 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry8 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 16 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 15 HOUR) AND Websiteid = id limit 1;
else SET entry8 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 15 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 14 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry9 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 15 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 14 HOUR) AND Websiteid = id limit 1;
else SET entry9 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 14 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 13 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry10 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 14 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 13 HOUR) AND Websiteid = id limit 1;
else SET entry10 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 13 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry11 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 13 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND Websiteid = id limit 1;
else SET entry11 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 11 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry12 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 11 HOUR) AND Websiteid = id limit 1;
else SET entry12 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 11 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 10 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry13 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 11 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 10 HOUR) AND Websiteid = id limit 1;
else SET entry13 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 10 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 9 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry14 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 10 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 9 HOUR) AND Websiteid = id limit 1;
else SET entry14 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 9 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 8 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry15 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 9 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 8 HOUR) AND Websiteid = id limit 1;
else SET entry15 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 8 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 7 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry16 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 8 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 7 HOUR) AND Websiteid = id limit 1;
else SET entry16 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 7 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry17 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 7 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND Websiteid = id limit 1;
else SET entry17 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 5 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry18 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 5 HOUR) AND Websiteid = id limit 1;
else SET entry18 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 5 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 4 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry19 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 5 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 4 HOUR) AND Websiteid = id limit 1;
else SET entry19 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 4 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 3 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry20 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 4 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 3 HOUR) AND Websiteid = id limit 1;
else SET entry20 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 3 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 2 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry21 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 3 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 2 HOUR) AND Websiteid = id limit 1;
else SET entry21 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 2 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 1 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry22 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 2 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 1 HOUR) AND Websiteid = id limit 1;
else SET entry22 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 1 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry23 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 1 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 HOUR) AND Websiteid = id limit 1;
else SET entry23 := 0;
end if;
END//
delimiter ;
"""
EXPECTED_WEEKLY_WEBSITE_GENERATE_SAMPLES_QUERIES = """delimiter //
DROP PROCEDURE IF EXISTS get_weekly_samples_websites;
CREATE PROCEDURE get_weekly_samples_websites (
IN id INT,
OUT entry0 FLOAT,
OUT entry1 FLOAT,
OUT entry2 FLOAT,
OUT entry3 FLOAT,
OUT entry4 FLOAT,
OUT entry5 FLOAT,
OUT entry6 FLOAT,
OUT entry7 FLOAT,
OUT entry8 FLOAT,
OUT entry9 FLOAT,
OUT entry10 FLOAT,
OUT entry11 FLOAT,
OUT entry12 FLOAT,
OUT entry13 FLOAT,
OUT entry14 FLOAT,
OUT entry15 FLOAT,
OUT entry16 FLOAT,
OUT entry17 FLOAT,
OUT entry18 FLOAT,
OUT entry19 FLOAT,
OUT entry20 FLOAT,
OUT entry21 FLOAT,
OUT entry22 FLOAT,
OUT entry23 FLOAT,
OUT entry24 FLOAT,
OUT entry25 FLOAT,
OUT entry26 FLOAT,
OUT entry27 FLOAT,
OUT start_hour FLOAT
)
BEGIN
select HOUR(now()) INTO start_hour;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 168 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 162 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry0 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 168 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 162 HOUR) AND Websiteid = id limit 1;
else SET entry0 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 162 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 156 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry1 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 162 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 156 HOUR) AND Websiteid = id limit 1;
else SET entry1 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 156 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 150 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry2 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 156 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 150 HOUR) AND Websiteid = id limit 1;
else SET entry2 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 150 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 144 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry3 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 150 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 144 HOUR) AND Websiteid = id limit 1;
else SET entry3 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 144 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 138 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry4 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 144 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 138 HOUR) AND Websiteid = id limit 1;
else SET entry4 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 138 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 132 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry5 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 138 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 132 HOUR) AND Websiteid = id limit 1;
else SET entry5 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 132 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 126 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry6 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 132 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 126 HOUR) AND Websiteid = id limit 1;
else SET entry6 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 126 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 120 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry7 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 126 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 120 HOUR) AND Websiteid = id limit 1;
else SET entry7 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 120 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 114 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry8 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 120 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 114 HOUR) AND Websiteid = id limit 1;
else SET entry8 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 114 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 108 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry9 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 114 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 108 HOUR) AND Websiteid = id limit 1;
else SET entry9 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 108 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 102 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry10 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 108 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 102 HOUR) AND Websiteid = id limit 1;
else SET entry10 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 102 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 96 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry11 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 102 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 96 HOUR) AND Websiteid = id limit 1;
else SET entry11 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 96 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 90 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry12 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 96 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 90 HOUR) AND Websiteid = id limit 1;
else SET entry12 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 90 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 84 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry13 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 90 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 84 HOUR) AND Websiteid = id limit 1;
else SET entry13 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 84 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 78 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry14 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 84 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 78 HOUR) AND Websiteid = id limit 1;
else SET entry14 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 78 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 72 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry15 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 78 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 72 HOUR) AND Websiteid = id limit 1;
else SET entry15 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 72 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 66 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry16 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 72 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 66 HOUR) AND Websiteid = id limit 1;
else SET entry16 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 66 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 60 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry17 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 66 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 60 HOUR) AND Websiteid = id limit 1;
else SET entry17 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 60 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 54 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry18 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 60 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 54 HOUR) AND Websiteid = id limit 1;
else SET entry18 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 54 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 48 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry19 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 54 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 48 HOUR) AND Websiteid = id limit 1;
else SET entry19 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 48 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 42 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry20 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 48 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 42 HOUR) AND Websiteid = id limit 1;
else SET entry20 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 42 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 36 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry21 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 42 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 36 HOUR) AND Websiteid = id limit 1;
else SET entry21 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 36 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 30 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry22 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 36 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 30 HOUR) AND Websiteid = id limit 1;
else SET entry22 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 30 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry23 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 30 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND Websiteid = id limit 1;
else SET entry23 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry24 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND Websiteid = id limit 1;
else SET entry24 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry25 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND Websiteid = id limit 1;
else SET entry25 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry26 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND Websiteid = id limit 1;
else SET entry26 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 HOUR) AND Websiteid = id)
then SELECT TotalTime INTO entry27 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 HOUR) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 HOUR) AND Websiteid = id limit 1;
else SET entry27 := 0;
end if;
END//
delimiter ;
"""
EXPECTED_MONTHLY_WEBSITE_GENERATE_SAMPLES_QUERIES = """delimiter //
DROP PROCEDURE IF EXISTS get_monthly_samples_websites;
CREATE PROCEDURE get_monthly_samples_websites (
IN id INT,
OUT entry0 FLOAT,
OUT entry1 FLOAT,
OUT entry2 FLOAT,
OUT entry3 FLOAT,
OUT entry4 FLOAT,
OUT entry5 FLOAT,
OUT entry6 FLOAT,
OUT entry7 FLOAT,
OUT entry8 FLOAT,
OUT entry9 FLOAT,
OUT entry10 FLOAT,
OUT entry11 FLOAT,
OUT entry12 FLOAT,
OUT entry13 FLOAT,
OUT entry14 FLOAT,
OUT entry15 FLOAT,
OUT entry16 FLOAT,
OUT entry17 FLOAT,
OUT entry18 FLOAT,
OUT entry19 FLOAT,
OUT entry20 FLOAT,
OUT entry21 FLOAT,
OUT entry22 FLOAT,
OUT entry23 FLOAT,
OUT entry24 FLOAT,
OUT entry25 FLOAT,
OUT entry26 FLOAT,
OUT entry27 FLOAT,
OUT entry28 FLOAT,
OUT entry29 FLOAT,
OUT entry30 FLOAT,
OUT start_hour FLOAT
)
BEGIN
select DAY(now()) INTO start_hour;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 31 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 30 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry0 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 31 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 30 DAY) AND Websiteid = id limit 1;
else SET entry0 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 30 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 29 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry1 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 30 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 29 DAY) AND Websiteid = id limit 1;
else SET entry1 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 29 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 28 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry2 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 29 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 28 DAY) AND Websiteid = id limit 1;
else SET entry2 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 28 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 27 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry3 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 28 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 27 DAY) AND Websiteid = id limit 1;
else SET entry3 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 27 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 26 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry4 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 27 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 26 DAY) AND Websiteid = id limit 1;
else SET entry4 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 26 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 25 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry5 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 26 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 25 DAY) AND Websiteid = id limit 1;
else SET entry5 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 25 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 24 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry6 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 25 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 24 DAY) AND Websiteid = id limit 1;
else SET entry6 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 23 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry7 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 24 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 23 DAY) AND Websiteid = id limit 1;
else SET entry7 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 23 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 22 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry8 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 23 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 22 DAY) AND Websiteid = id limit 1;
else SET entry8 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 22 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 21 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry9 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 22 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 21 DAY) AND Websiteid = id limit 1;
else SET entry9 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 21 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 20 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry10 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 21 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 20 DAY) AND Websiteid = id limit 1;
else SET entry10 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 20 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 19 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry11 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 20 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 19 DAY) AND Websiteid = id limit 1;
else SET entry11 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 19 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry12 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 19 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 18 DAY) AND Websiteid = id limit 1;
else SET entry12 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 17 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry13 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 18 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 17 DAY) AND Websiteid = id limit 1;
else SET entry13 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 17 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 16 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry14 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 17 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 16 DAY) AND Websiteid = id limit 1;
else SET entry14 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 16 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 15 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry15 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 16 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 15 DAY) AND Websiteid = id limit 1;
else SET entry15 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 15 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 14 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry16 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 15 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 14 DAY) AND Websiteid = id limit 1;
else SET entry16 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 14 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 13 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry17 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 14 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 13 DAY) AND Websiteid = id limit 1;
else SET entry17 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 13 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry18 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 13 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 12 DAY) AND Websiteid = id limit 1;
else SET entry18 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 11 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry19 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 12 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 11 DAY) AND Websiteid = id limit 1;
else SET entry19 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 11 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 10 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry20 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 11 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 10 DAY) AND Websiteid = id limit 1;
else SET entry20 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 10 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 9 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry21 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 10 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 9 DAY) AND Websiteid = id limit 1;
else SET entry21 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 9 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 8 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry22 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 9 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 8 DAY) AND Websiteid = id limit 1;
else SET entry22 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 8 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 7 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry23 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 8 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 7 DAY) AND Websiteid = id limit 1;
else SET entry23 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 7 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry24 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 7 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 6 DAY) AND Websiteid = id limit 1;
else SET entry24 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 5 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry25 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 6 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 5 DAY) AND Websiteid = id limit 1;
else SET entry25 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 5 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 4 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry26 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 5 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 4 DAY) AND Websiteid = id limit 1;
else SET entry26 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 4 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 3 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry27 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 4 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 3 DAY) AND Websiteid = id limit 1;
else SET entry27 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 3 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 2 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry28 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 3 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 2 DAY) AND Websiteid = id limit 1;
else SET entry28 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 2 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 1 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry29 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 2 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 1 DAY) AND Websiteid = id limit 1;
else SET entry29 := 0;
end if;
if EXISTS(SELECT TotalTime FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 1 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 DAY) AND Websiteid = id)
then SELECT TotalTime INTO entry30 FROM WEBSITES_METRICS WHERE TIMESTAMP >= DATE_SUB(NOW(), INTERVAL 1 DAY) AND TIMESTAMP <= DATE_SUB(NOW(), INTERVAL 0 DAY) AND Websiteid = id limit 1;
else SET entry30 := 0;
end if;
END//
delimiter ;
"""
| 83.639198 | 199 | 0.704559 | 5,423 | 37,554 | 4.780011 | 0.023788 | 0.1665 | 0.204922 | 0.243345 | 0.994445 | 0.989931 | 0.988388 | 0.987115 | 0.982949 | 0.982949 | 0 | 0.040504 | 0.209778 | 37,554 | 448 | 200 | 83.825893 | 0.832996 | 0 | 0 | 0.587054 | 1 | 0.370536 | 0.995311 | 0.004447 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
307b2c9e02a36c940c08048cac13223292c131df | 5,331 | py | Python | tests/test_pyhpeimc_plat_vrm.py | HPNetworking/HP-Intelligent-Management-Center | 4fba31827573587e03a6233c7db60f188038c8e5 | [
"Apache-2.0"
] | 13 | 2016-03-14T17:55:03.000Z | 2021-03-26T07:18:26.000Z | tests/test_pyhpeimc_plat_vrm.py | HPNetworking/HP-Intelligent-Management-Center | 4fba31827573587e03a6233c7db60f188038c8e5 | [
"Apache-2.0"
] | 7 | 2016-08-04T17:39:11.000Z | 2017-09-19T13:42:35.000Z | tests/test_pyhpeimc_plat_vrm.py | HPNetworking/HP-Intelligent-Management-Center | 4fba31827573587e03a6233c7db60f188038c8e5 | [
"Apache-2.0"
] | 17 | 2016-03-03T05:24:20.000Z | 2022-03-10T08:16:31.000Z | # -*- coding: utf-8 -*-
"""
This module is used for testing the functions within the pyhpeimc.plat.vrm module.
"""
from unittest import TestCase
from nose.plugins.skip import SkipTest
from pyhpeimc.plat.vrm import *
from test_machine import *
# Test get_vm_host_info for Multiple Vendor Devices
# ESX
class TestGetDevVlanESX(TestCase):
def test_get_dev_vlans_type(self):
if ESX is None:
raise SkipTest
host = get_vm_host_info(ESX, auth.creds, auth.url)
self.assertIs(type(host), dict)
def test_get_dev_vlans_content(self):
if ESX is None:
raise SkipTest
host = get_vm_host_info(ESX, auth.creds, auth.url)
self.assertIs(len(host), 10)
self.assertIn('cpuNum', host)
self.assertIn('porductFlag', host)
self.assertIn('memory', host)
self.assertIn('devIp', host)
self.assertIn('devId', host)
self.assertIn('vendor', host)
self.assertIn('cpuFeg', host)
self.assertIn('parentDevId', host)
self.assertIn('serverName', host)
self.assertIn('diskSize', host)
# HyperV
class TestGetDevVLANsHyperV(TestCase):
def test_get_dev_vlans_type(self):
if HyperV is None:
raise SkipTest
host = get_vm_host_info(HyperV, auth.creds, auth.url)
self.assertIs(type(host), dict)
def test_get_dev_vlans_content(self):
if HyperV is None:
raise SkipTest
host = get_vm_host_info(HyperV, auth.creds, auth.url)
self.assertIs(len(host), 10)
self.assertIn('cpuNum', host)
self.assertIn('porductFlag', host)
self.assertIn('memory', host)
self.assertIn('devIp', host)
self.assertIn('devId', host)
self.assertIn('vendor', host)
self.assertIn('cpuFeg', host)
self.assertIn('parentDevId', host)
self.assertIn('serverName', host)
self.assertIn('diskSize', host)
# Test get_vm_host_vnic for Multiple Vendor Devices
# ESX
class TestGetVMHostVnicESX(TestCase):
def test_get_vm_host_vnic_type(self):
if ESX is None:
raise SkipTest
host = get_vm_host_vnic(ESX, auth.creds, auth.url)
self.assertIs(type(host), list)
def test_get_vm_host_vnic_content(self):
if ESX is None:
raise SkipTest
host = get_vm_host_vnic(ESX, auth.creds, auth.url)
self.assertIs(len(host), 8)
self.assertIn('vSwtichKey', host[0])
self.assertIn('mask', host[0])
self.assertIn('vSwitchName', host[0])
self.assertIn('serverDevId', host[0])
self.assertIn('ip', host[0])
self.assertIn('nicName', host[0])
# HyperV
class TestGetVMHostVnicHyperV(TestCase):
def test_get_vm_host_vnic_type(self):
if HyperV is None:
raise SkipTest
host = get_vm_host_vnic(HyperV, auth.creds, auth.url)
self.assertIs(type(host), dict)
def test_get_vm_host_vnic_content(self):
if HyperV is None:
raise SkipTest
host = get_vm_host_vnic(HyperV, auth.creds, auth.url)
self.assertIs(len(host), 8)
self.assertIn('vSwtichKey', host[0])
self.assertIn('mask', host[0])
self.assertIn('vSwitchName', host[0])
self.assertIn('serverDevId', host[0])
self.assertIn('ip', host[0])
self.assertIn('nicName', host[0])
# Test get_host_vms for Multiple Vendor Devices
# TODO Removed len test need to investigate
# ESX
class TestGetHostVMsESX(TestCase):
def test_get_vm_host_vnic_type(self):
if ESX is None:
raise SkipTest
vms = get_host_vms(ESX, auth.creds, auth.url)
self.assertIs(type(vms), list)
def test_get_host_vms_type(self):
if ESX is None:
raise SkipTest
vms = get_host_vms(ESX, auth.creds, auth.url)
# self.assertIs(len(vms), (16 or 17))
self.assertIn('vmIP', vms[0])
self.assertIn('memory', vms[0])
self.assertIn('osDesc', vms[0])
self.assertIn('powerState', vms[0])
self.assertIn('vmTools', vms[0])
self.assertIn('parentServerId', vms[0])
self.assertIn('porductFlag', vms[0])
self.assertIn('vmName', vms[0])
self.assertIn('cpu', vms[0])
self.assertIn('vmDevId', vms[0])
self.assertIn('vmMask', vms[0])
self.assertIn('coresPerCpu', vms[0])
# HyperV
class TestGetHostVMsHyperV(TestCase):
def test_get_vm_host_vnic_type(self):
if HyperV is None:
raise SkipTest
vms = get_host_vms(HyperV, auth.creds, auth.url)
self.assertIs(type(vms), list)
def test_get_host_vms_type(self):
if HyperV is None:
raise SkipTest
vms = get_host_vms(HyperV, auth.creds, auth.url)
self.assertIs(len(vms), 16)
self.assertIn('vmIP', vms[0])
self.assertIn('memory', vms[0])
self.assertIn('osDesc', vms[0])
self.assertIn('powerState', vms[0])
self.assertIn('vmTools', vms[0])
self.assertIn('parentServerId', vms[0])
self.assertIn('porductFlag', vms[0])
self.assertIn('vmName', vms[0])
self.assertIn('cpu', vms[0])
self.assertIn('vmDevId', vms[0])
self.assertIn('vmMask', vms[0])
self.assertIn('coresPerCpu', vms[0])
| 31.732143 | 82 | 0.623898 | 693 | 5,331 | 4.662338 | 0.137085 | 0.207985 | 0.128753 | 0.108945 | 0.863819 | 0.858558 | 0.83875 | 0.83875 | 0.83875 | 0.805942 | 0 | 0.012271 | 0.250985 | 5,331 | 167 | 83 | 31.922156 | 0.796895 | 0.067905 | 0 | 0.904 | 0 | 0 | 0.084866 | 0 | 0 | 0 | 0 | 0.005988 | 0.536 | 1 | 0.096 | false | 0 | 0.032 | 0 | 0.176 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
064b3fe1f3a3c3f4466f8475627def2488af4f5d | 12,615 | py | Python | Scripts/sod_sim_funcs.py | ehbussell/RedwoodCreekAnalysis | 086085165bb48bc9f2c559f505cad82afdc13666 | [
"MIT"
] | null | null | null | Scripts/sod_sim_funcs.py | ehbussell/RedwoodCreekAnalysis | 086085165bb48bc9f2c559f505cad82afdc13666 | [
"MIT"
] | null | null | null | Scripts/sod_sim_funcs.py | ehbussell/RedwoodCreekAnalysis | 086085165bb48bc9f2c559f505cad82afdc13666 | [
"MIT"
] | null | null | null | """Functions for running SOD simulations - including full weather data or averaged"""
import pdb
from IPython import embed
import inspect
import numpy as np
import os
import IndividualSimulator
import raster_tools
import matplotlib.pyplot as plt
import time
def run_avg_sim(sim_stub, num_years, infect_cell=None, iteration=0):
"""Run simulation for num_years, using constant averaged forest mask and weather data."""
analysis_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
params = IndividualSimulator.code.config.read_config_file(
filename=os.path.join(analysis_path, "InputData", "REDW_config.ini"))
params['FinalTime'] = 28 * num_years
params['OutputFileStub'] = os.path.join(sim_stub, 'output')
params['RasterFileStub'] = os.path.join(sim_stub, 'rasters_{}/raster'.format(iteration))
params['HostPosFile'] = 'GeneratedData/EROI/HostNumbers.txt'
params['InitCondFile'] = 'GeneratedData/EROI/InitialConditions_Numbers'
os.makedirs(os.path.join(sim_stub, 'rasters_{}'.format(iteration)), exist_ok=True)
params['SusceptibilityFile'] = 'GeneratedData/EROI/RMSMask.txt'
params['InfectiousnessFile'] = 'GeneratedData/EROI/RMSMask.txt'
params['RasterStatesOutput'] = 'SI'
params['RasterOutputFreq'] = 2
params['OutputEventData'] = False
params['OutputHostData'] = False
params['OutputFiles'] = False
IndividualSimulator.run_epidemics(params, iteration_start=iteration)
def run_full_sim(sim_stub, num_years, landscape_name="ROI_Landscape", infect_cell=None, iteration=0):
"""Run full simulation for num_years, using variable forest mask and weather data.
Note weather data will start from 1990.
"""
analysis_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
params = IndividualSimulator.code.config.read_config_file(
filename=os.path.join(analysis_path, "InputData", "REDW_config.ini"))
forest_mask = raster_tools.RasterData.from_file(os.path.join(
analysis_path, "GeneratedData", landscape_name, "ForestMask.txt"))
forest_mask.array[forest_mask.array == -9999] = np.nan
header = forest_mask.header_vals
susceptibility_raster = raster_tools.RasterData(
shape=(header['nrows'], header['ncols']),
llcorner=(header['xllcorner'], header['yllcorner']),
cellsize=header['cellsize'],
NODATA_value=header['NODATA_value'],
array=np.ones((header['nrows'], header['ncols']))
)
params['FinalTime'] = 1
params['OutputFileStub'] = os.path.join(sim_stub, 'output')
params['RasterFileStub'] = os.path.join(sim_stub, 'rasters_{}/raster'.format(iteration))
os.makedirs(
os.path.join(sim_stub, 'rasters_{}'.format(iteration)),
exist_ok=True)
weather_stub = os.path.join(analysis_path, "InputData", "weather", "gis_m_c_")
llcorner = (header['xllcorner'], header['yllcorner'])
urcorner = (header['xllcorner'] + (header['ncols'] - 0.5) * header['cellsize'],
header['yllcorner'] + (header['nrows'] - 0.5) * header['cellsize'])
run_sim = IndividualSimulator.Simulator(params)
run_sim.setup()
run_sim.initialise()
IndividualSimulator.code.outputdata.output_raster_data(
run_sim, time=0, iteration=iteration, states=['S', 'I'])
overall_week = 1
# Choose random cell to infect
if infect_cell is not None:
run_sim.params['init_cells'][infect_cell].states['I'] = 1
run_sim.params['init_cells'][infect_cell].states['S'] -= 1
print('Infected cell {}'.format(infect_cell))
for year in range(1990, 1990+num_years):
for week in range(1, 29):
# Extract correct region of weather file
weather_raster = raster_tools.extract_raster(
weather_stub + str(year) + "_" + str(week) + ".txt", llcorner, urcorner)
susceptibility_raster.array = weather_raster.array
# Set mixed evergreen forest type inactive for first 6 weeks
if week < 7:
susceptibility_raster.array[forest_mask.array == 2] = 0
IndividualSimulator.code.hosts.read_sus_inf_files(
run_sim.params['init_cells'], run_sim.params['header'],
susceptibility_raster, susceptibility_raster,
sim_type=run_sim.params['SimulationType'])
# Calculate intial rates
init_inf_rates = np.zeros(run_sim.params['ncells'])
if run_sim.params['VirtualSporulationStart'] is not None:
init_spore_rates = np.zeros(run_sim.params['ncells'])
init_adv_rates = np.zeros(run_sim.params['nhosts'])
for cell in run_sim.params['init_cells']:
for host in cell.hosts:
current_state = host.state
if current_state in "ECDI":
init_adv_rates[host.host_id] = run_sim.params[current_state + 'AdvRate']
if (cell.states["C"] + cell.states["I"]) > 0:
for cell2_rel_pos in run_sim.params['coupled_positions']:
cell2_pos = tuple(item1 + item2 for item1, item2
in zip(cell.cell_position, cell2_rel_pos))
cell2_id = run_sim.params['cell_map'].get(cell2_pos, None)
if cell2_id is None:
continue
cell2 = run_sim.params['init_cells'][cell2_id]
init_inf_rates[cell2_id] += (
cell2.susceptibility * cell2.states["S"] *
(cell.states["C"] + cell.states["I"]) * cell.infectiousness *
run_sim.event_handler.kernel(cell2_rel_pos) /
run_sim.params['MaxHosts'])
if run_sim.params['VirtualSporulationStart'] is not None:
init_spore_rates[cell.cell_id] = (
cell.states["C"] + cell.states["I"]) * cell.infectiousness
run_sim.params['init_inf_rates'] = init_inf_rates
if run_sim.params['VirtualSporulationStart'] is not None:
run_sim.params['init_spore_rates'] = init_spore_rates
run_sim.params['init_adv_rates'] = init_adv_rates
run_sim.initialise(silent=True)
hosts, cells, _ = run_sim.run_epidemic(silent=True)
# Set final state as new initial conditions
run_sim.params['init_hosts'] = hosts
run_sim.params['init_cells'] = cells
# Output rasters for this week
IndividualSimulator.code.outputdata.output_raster_data(
run_sim, time=overall_week, iteration=iteration, states=['S', 'I'])
overall_week += 1
cells_infected = np.sum([1 for x in cells if x.states["I"] > 0])
hosts_infected = np.sum([(x.states["C"] + x.states["I"]) for x in cells])
print("Year {} done. {} cells infected, {} hosts infected".format(
year, cells_infected, hosts_infected))
return cells_infected, hosts_infected
def run_weather_avg_sim(sim_stub, num_years, landscape_name="ROI_Landscape", infect_cell=None, iteration=0):
"""Run full simulation for num_years, using variable forest mask and averaged weather data.
Note weather data will start from 1990.
"""
analysis_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
params = IndividualSimulator.code.config.read_config_file(
filename=os.path.join(analysis_path, "InputData", "REDW_config.ini"))
forest_mask = raster_tools.RasterData.from_file(os.path.join(
analysis_path, "GeneratedData", landscape_name, "ForestMask.txt"))
forest_mask.array[forest_mask.array == -9999] = np.nan
weather_raster = raster_tools.RasterData.from_file(os.path.join(
analysis_path, "GeneratedData", landscape_name, "RMSWeather.txt"))
weather_raster.array[weather_raster.array == -9999] = np.nan
header = forest_mask.header_vals
susceptibility_raster = raster_tools.RasterData(
shape=(header['nrows'], header['ncols']),
llcorner=(header['xllcorner'], header['yllcorner']),
cellsize=header['cellsize'],
NODATA_value=header['NODATA_value'],
array=np.ones((header['nrows'], header['ncols']))
)
params['FinalTime'] = 1
params['OutputFileStub'] = os.path.join(sim_stub, 'output')
params['RasterFileStub'] = os.path.join(sim_stub, 'rasters_{}/raster'.format(iteration))
os.makedirs(
os.path.join(sim_stub, 'rasters_{}/raster'.format(iteration)),
exist_ok=True)
params['RasterStatesOutput'] = 'SI'
params['RasterOutputFreq'] = 0
run_sim = IndividualSimulator.Simulator(params)
run_sim.setup()
run_sim.initialise()
IndividualSimulator.code.outputdata.output_raster_data(
run_sim, time=0, iteration=iteration, states=['S', 'I'])
overall_week = 1
# Choose cell to infect
if infect_cell is not None:
run_sim.params['init_cells'][infect_cell].states['I'] = 1
run_sim.params['init_cells'][infect_cell].states['S'] -= 1
print('Infected cell {}'.format(infect_cell))
for year in range(1990, 1990+num_years):
for week in range(1, 29):
susceptibility_raster.array = weather_raster.array
# Set mixed evergreen forest type inactive for first 6 weeks
if week < 7:
susceptibility_raster.array[forest_mask.array == 2] = 0
IndividualSimulator.code.hosts.read_sus_inf_files(
run_sim.params['init_cells'], run_sim.params['header'],
susceptibility_raster, susceptibility_raster,
sim_type=run_sim.params['SimulationType'])
# Calculate intial rates
init_inf_rates = np.zeros(run_sim.params['ncells'])
if run_sim.params['VirtualSporulationStart'] is not None:
init_spore_rates = np.zeros(run_sim.params['ncells'])
init_adv_rates = np.zeros(run_sim.params['nhosts'])
for cell in run_sim.params['init_cells']:
for host in cell.hosts:
current_state = host.state
if current_state in "ECDI":
init_adv_rates[host.host_id] = run_sim.params[current_state + 'AdvRate']
if (cell.states["C"] + cell.states["I"]) > 0:
for cell2_rel_pos in run_sim.params['coupled_positions']:
cell2_pos = tuple(item1 + item2 for item1, item2
in zip(cell.cell_position, cell2_rel_pos))
cell2_id = run_sim.params['cell_map'].get(cell2_pos, None)
if cell2_id is None:
continue
cell2 = run_sim.params['init_cells'][cell2_id]
init_inf_rates[cell2_id] += (
cell2.susceptibility * cell2.states["S"] *
(cell.states["C"] + cell.states["I"]) * cell.infectiousness *
run_sim.event_handler.kernel(cell2_rel_pos) /
run_sim.params['MaxHosts'])
if run_sim.params['VirtualSporulationStart'] is not None:
init_spore_rates[cell.cell_id] = (
cell.states["C"] + cell.states["I"]) * cell.infectiousness
run_sim.params['init_inf_rates'] = init_inf_rates
if run_sim.params['VirtualSporulationStart'] is not None:
run_sim.params['init_spore_rates'] = init_spore_rates
run_sim.params['init_adv_rates'] = init_adv_rates
run_sim.initialise(silent=True)
hosts, cells, _ = run_sim.run_epidemic(silent=True)
# Set final state as new initial conditions
run_sim.params['init_hosts'] = hosts
run_sim.params['init_cells'] = cells
# Output rasters for this week
IndividualSimulator.code.outputdata.output_raster_data(
run_sim, time=overall_week, iteration=iteration, states=['S', 'I'])
overall_week += 1
cells_infected = np.sum([1 for x in cells if x.states["I"] > 0])
hosts_infected = np.sum([(x.states["C"] + x.states["I"]) for x in cells])
print("Year {} done. {} cells infected, {} hosts infected".format(
year, cells_infected, hosts_infected))
return cells_infected, hosts_infected
| 45.215054 | 108 | 0.623543 | 1,494 | 12,615 | 5.037483 | 0.140562 | 0.047834 | 0.070157 | 0.042519 | 0.888786 | 0.853973 | 0.840021 | 0.840021 | 0.840021 | 0.837231 | 0 | 0.011848 | 0.257313 | 12,615 | 278 | 109 | 45.377698 | 0.79144 | 0.064289 | 0 | 0.815 | 0 | 0 | 0.137133 | 0.023479 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015 | false | 0 | 0.045 | 0 | 0.07 | 0.02 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
06779039844cfb20c036ba874b6a6154461a3e24 | 1,866 | py | Python | networkit/test/test_graph.py | tsapko3628/networkit | 2953c9f30b676f930e301953f00f014c47edcf69 | [
"MIT"
] | 1 | 2019-08-15T10:35:07.000Z | 2019-08-15T10:35:07.000Z | networkit/test/test_graph.py | tsapko3628/networkit | 2953c9f30b676f930e301953f00f014c47edcf69 | [
"MIT"
] | null | null | null | networkit/test/test_graph.py | tsapko3628/networkit | 2953c9f30b676f930e301953f00f014c47edcf69 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import unittest
import networkit as nk
class TestGraph(unittest.TestCase):
def testSubgraphFromNodes(self):
# Directed
G = nk.Graph(4, True, True)
G.addEdge(0, 1, 1.0)
G.addEdge(0, 2, 2.0)
G.addEdge(3, 1, 4.0)
G.addEdge(3, 2, 5.0)
G.addEdge(1, 2, 3.0)
res = G.subgraphFromNodes([0])
self.assertTrue(res.isWeighted())
self.assertTrue(res.isDirected())
self.assertEqual(res.numberOfNodes(), 1)
self.assertEqual(res.numberOfEdges(), 0)
res = G.subgraphFromNodes([0], True)
self.assertEqual(res.numberOfNodes(), 3)
self.assertEqual(res.numberOfEdges(), 2)
res = G.subgraphFromNodes([0, 1])
self.assertEqual(res.numberOfNodes(), 2)
self.assertEqual(res.numberOfEdges(), 1)
res = G.subgraphFromNodes([0, 1], True)
self.assertEqual(res.numberOfNodes(), 3)
self.assertEqual(res.numberOfEdges(), 3)
res = G.subgraphFromNodes([0, 1], True, True)
self.assertEqual(res.numberOfNodes(), 4)
self.assertEqual(res.numberOfEdges(), 4)
# Undirected
G = G.toUndirected()
res = G.subgraphFromNodes([0])
self.assertTrue(res.isWeighted())
self.assertFalse(res.isDirected())
self.assertEqual(res.numberOfNodes(), 1)
self.assertEqual(res.numberOfEdges(), 0)
res = G.subgraphFromNodes([0], True)
self.assertEqual(res.numberOfNodes(), 3)
self.assertEqual(res.numberOfEdges(), 2)
self.assertEqual(G.weight(0, 1), 1.0)
self.assertEqual(G.weight(0, 2), 2.0)
res = G.subgraphFromNodes([0, 1])
self.assertEqual(res.numberOfNodes(), 2)
self.assertEqual(res.numberOfEdges(), 1)
res = G.subgraphFromNodes([0, 1], True)
self.assertEqual(res.numberOfNodes(), 4)
self.assertEqual(res.numberOfEdges(), 4)
res = G.subgraphFromNodes(set([0, 1]), True)
self.assertEqual(res.numberOfNodes(), 4)
self.assertEqual(res.numberOfEdges(), 4)
if __name__ == "__main__":
unittest.main()
| 27.850746 | 47 | 0.698821 | 249 | 1,866 | 5.204819 | 0.168675 | 0.25463 | 0.277778 | 0.239198 | 0.777778 | 0.740741 | 0.719907 | 0.719907 | 0.719907 | 0.638117 | 0 | 0.040665 | 0.130225 | 1,866 | 66 | 48 | 28.272727 | 0.757856 | 0.021972 | 0 | 0.591837 | 0 | 0 | 0.004391 | 0 | 0 | 0 | 0 | 0 | 0.530612 | 1 | 0.020408 | false | 0 | 0.040816 | 0 | 0.081633 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ebfa223e376b6bf483aa3255cc44c1e8cd07a35c | 184 | py | Python | simulation/environment/__init__.py | BillMakwae/Simulation | 8d0ec274643f23bc0e78c96e50508b60791c11d2 | [
"MIT"
] | 8 | 2020-03-29T01:44:16.000Z | 2022-03-26T23:15:34.000Z | simulation/environment/__init__.py | BillMakwae/Simulation | 8d0ec274643f23bc0e78c96e50508b60791c11d2 | [
"MIT"
] | 60 | 2020-02-08T22:07:16.000Z | 2022-03-26T23:51:55.000Z | simulation/environment/__init__.py | BillMakwae/Simulation | 8d0ec274643f23bc0e78c96e50508b60791c11d2 | [
"MIT"
] | 1 | 2021-10-20T20:07:06.000Z | 2021-10-20T20:07:06.000Z |
from simulation.environment.GIS import GIS
from simulation.environment.SolarCalculations import SolarCalculations
from simulation.environment.WeatherForecasts import WeatherForecasts
| 36.8 | 70 | 0.896739 | 18 | 184 | 9.166667 | 0.388889 | 0.254545 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070652 | 184 | 4 | 71 | 46 | 0.964912 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
88ec77c33f3ef5cbdf8666521708ac7598517e59 | 69 | py | Python | python/github_com/TheThingsNetwork/api/networkserver/__init__.py | LukasHabring/api | 9e3da3462f14dab4c45fa38b03335e85e1970833 | [
"MIT"
] | 14 | 2017-07-14T16:11:54.000Z | 2021-11-16T12:35:37.000Z | python/github_com/TheThingsNetwork/api/networkserver/__init__.py | LukasHabring/api | 9e3da3462f14dab4c45fa38b03335e85e1970833 | [
"MIT"
] | 34 | 2017-07-14T15:15:13.000Z | 2021-08-18T10:08:10.000Z | python/github_com/TheThingsNetwork/api/networkserver/__init__.py | LukasHabring/api | 9e3da3462f14dab4c45fa38b03335e85e1970833 | [
"MIT"
] | 12 | 2017-07-25T16:13:16.000Z | 2021-05-08T07:21:50.000Z | from networkserver_pb2_grpc import *
from networkserver_pb2 import *
| 23 | 36 | 0.855072 | 9 | 69 | 6.222222 | 0.555556 | 0.607143 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032787 | 0.115942 | 69 | 2 | 37 | 34.5 | 0.885246 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
88ed9a69864e16ef8f74ea36261e95d57067bab5 | 128 | py | Python | python/ql/test/experimental/dataflow/import-helper/test4.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
] | 2 | 2021-06-13T07:04:31.000Z | 2021-06-13T07:04:34.000Z | python/ql/test/experimental/dataflow/import-helper/test4.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
] | 1 | 2021-02-23T13:27:53.000Z | 2021-02-23T13:27:53.000Z | python/ql/test/experimental/dataflow/import-helper/test4.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
] | null | null | null | import mypkg.foo as _foo
import mypkg.bar as _bar
print(_foo) # <module 'mypkg.bar' ...
print(_bar) # <module 'mypkg.bar' ...
| 25.6 | 38 | 0.671875 | 20 | 128 | 4.1 | 0.35 | 0.292683 | 0.341463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15625 | 128 | 4 | 39 | 32 | 0.759259 | 0.367188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
cc536c6597b29c195da475e419f41bdd395cc697 | 32,642 | py | Python | attgconv/layers.py | dwromero/att_gconvs | 872259cad49763fdcfa3e96e80b6b5c331adf084 | [
"MIT"
] | 53 | 2020-07-07T11:06:30.000Z | 2022-03-26T02:42:49.000Z | attgconv/layers.py | dwromero/att_gconvs | 872259cad49763fdcfa3e96e80b6b5c331adf084 | [
"MIT"
] | null | null | null | attgconv/layers.py | dwromero/att_gconvs | 872259cad49763fdcfa3e96e80b6b5c331adf084 | [
"MIT"
] | 2 | 2020-09-19T12:10:33.000Z | 2020-10-29T19:37:08.000Z | # Based on implementation from Bekkers (2020) - B-Spline CNNs on Lie Groups.
import torch
import numpy as np
import math
# Start of (Parent Class)
class layers(torch.nn.Module):
def __init__(self, group):
super(layers, self).__init__()
self.group = group
self.G = group.G
self.Rn = group.Rn
self.H = group.H
# TODO include dilation everywhere
# Creates an spatial_layer object
def ConvRnRn(self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
dilation=1, # Dilation
conv_groups = 1,
wscale=1.0): # White scaling
return ConvRnRnLayer(self.group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups, wscale)
# Creates a lifting_layer object
def ConvRnG(
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
dilation=1, # Dilation
conv_groups = 1, # Name of generated tensorflow variables
wscale = 1.0): # White scaling
return ConvRnGLayer(self.group, N_in, N_out, kernel_size, h_grid, stride, padding, dilation, conv_groups, wscale)
# Creates a group convolution layer object
def ConvGG(
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
# Optional grid re-sampling related arguments
input_h_grid=None, # In case the input grid is not the same as the intended output grid (see h_grid parameter)
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
dilation=1, # Dilation
conv_groups=1, # Name of generated tensorflow variables
wscale = 1.0): # White scaling
return ConvGGLayer(self.group, N_in, N_out, kernel_size, h_grid, input_h_grid, stride, padding, dilation, conv_groups, wscale)
# Creates an attentive lifting_layer object
def AttConvRnG( # TODO add dilation to these layers as well.
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
channel_attention=None, # The form of attention utilized channel-wise
spatial_attention=None, # The form of attention utilized spatial-wise
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
dilation=1, # Dilation
wscale = 1.0): # White scaling
return AttConvRnGLayer(self.group, N_in, N_out, kernel_size, h_grid, channel_attention, spatial_attention, stride, padding, dilation, wscale)
# Creates an attentive group convolution layer object
def AttConvGG(
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
channel_attention=None, # The form of attention utilized channel-wise
spatial_attention=None, # The form of attention utilized spatial-wise
# Optional grid re-sampling related arguments
input_h_grid=None, # In case the input grid is not the same as the intended output grid (see h_grid parameter)
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
dilation=1, # Dilation
wscale = 1.0):
return AttConvGGLayer(self.group, N_in, N_out, kernel_size, h_grid, channel_attention, spatial_attention, input_h_grid, stride, padding, dilation, wscale)
# Creates a feature map attentive lifting_layer object
def fAttConvRnG(
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
channel_attention=None, # The form of attention utilized channel-wise
spatial_attention=None, # The form of attention utilized spatial-wise
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
wscale = 1.0): # White scaling
return fAttConvRnGLayer(self.group, N_in, N_out, kernel_size, h_grid, channel_attention, spatial_attention, stride, padding, wscale)
# Creates a feature map attentive group convolution layer object
def fAttConvGG(
self,
# Required arguments
N_in, # Number of input channels
N_out, # Number of output channels
kernel_size, # Kernel size (integer)
h_grid, # The grid of H on which to compute the output (see H.grid for more details)
channel_attention=None, # The form of attention utilized channel-wise
spatial_attention=None, # The form of attention utilized spatial-wise
# Optional grid re-sampling related arguments
input_h_grid=None, # In case the input grid is not the same as the intended output grid (see h_grid parameter)
# Optional generic arguments
stride=1, # Spatial stride in the convolution
padding=1, # Padding type
wscale = 1.0):
return fAttConvGGLayer(self.group, N_in, N_out, kernel_size, h_grid, channel_attention, spatial_attention, input_h_grid, stride, padding, wscale)
def max_pooling_Rn(self, input, kernel_size, stride, padding = 1):
input_size = input.size()
out = input.view(input_size[0], input_size[1] * input_size[2], input_size[3], input_size[4])
out = torch.max_pool2d(out, kernel_size=kernel_size, stride=stride, padding=padding)
out = out.view(input_size[0], input_size[1], input_size[2], out.size()[2], out.size()[3])
return out
def average_pooling_Rn(self, input, kernel_size, stride, padding = 1):
input_size = input.size()
out = input.view(input_size[0], input_size[1] * input_size[2], input_size[3], input_size[4])
out = torch.nn.functional.avg_pool2d(out, kernel_size=kernel_size, stride=stride, padding=padding)
out = out.view(input_size[0], input_size[1], input_size[2], out.size()[2], out.size()[3])
return out
##########################################################################
############################## ConvRnRnLayer #############################
##########################################################################
class ConvRnRnLayer(torch.nn.Module):
def __init__(self,
group,
N_in,
N_out,
kernel_size,
stride,
padding,
dilation,
conv_groups,
wscale):
super(ConvRnRnLayer, self).__init__()
## Assert and set inputs
self.kernel_type = 'Rn'
self._assert_and_set_inputs(group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups)
## Construct the trainable weights and initialize them
self.weight = torch.nn.Parameter(torch.Tensor(self.N_out, self.N_in, kernel_size, kernel_size))
self._reset_parameters(wscale=wscale)
########################### Assert and set inputs ########################
def _assert_and_set_inputs(self, group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups):
self._assert_and_set_inputs_RnRn(group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups)
def _assert_and_set_inputs_RnRn(self, group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups):
## Check (and parse) all the inputs
# Include the dictionary of the used parent class
self.group = group
self.G = group.G
self.H = group.H
self.Rn = group.Rn
# Mandatory inputs
self.N_in = self._assert_N_in(N_in)
self.N_out = self._assert_N_out(N_out)
self.kernel_size = self._assert_kernel_size(kernel_size)
# Optional arguments
self.conv_groups = self._assert_conv_groups(conv_groups)
self.stride = stride
self.padding = padding
self.dilation = dilation
def _assert_N_in(self, N_in):
assert isinstance(N_in, int), "The specified argument \"N_in\" should be an integer."
return N_in
def _assert_N_out(self, N_out):
assert isinstance(N_out, int), "The specified argument \"N_out\" should be an integer."
return N_out
def _assert_kernel_size(self, kernel_size):
assert isinstance(kernel_size, int), "The specified argument \"kernel_size\" should be an integer."
return kernel_size
def _assert_conv_groups(self, conv_groups):
assert isinstance(conv_groups, int), "The specified argument \"conv_groups\" should be an integer."
return conv_groups
############################ Compute the output ##########################
## Public functions
def kernel(self, h=None):
# The transformation to apply
if h is None:
h = self.H.e
# Sample the kernel on the (transformed) grid
return (1 / self.H.absdet(h)) * self.H.left_representation_on_Rn(h, self.weight)
def forward(self, input):
return self.conv_Rn_Rn(input)
def conv_Rn_Rn(self, input):
output = torch.conv2d(input=input,
weight=self.kernel(self.H.e),
bias= None,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.conv_groups)
return output
def _reset_parameters(self, wscale):
n = self.N_in
k = self.kernel_size ** 2
n *= k
stdv = wscale * (1. / math.sqrt(n))
self.stdv = stdv
self.weight.data.uniform_(-stdv, stdv)
#if self.bias is not None: # TODO bias
# self.bias.data.uniform_(-stdv, stdv)
##########################################################################
############################## ConvRnGLayer ##############################
##########################################################################
# Start of lifting_layer class
class ConvRnGLayer(ConvRnRnLayer, torch.nn.Module):
def __init__(self,
group,
N_in,
N_out,
kernel_size,
h_grid,
stride,
padding,
dilation,
conv_groups,
wscale):
torch.nn.Module.__init__(self)
## Assert and set inputs
self.kernel_type = 'Rn'
self._assert_and_set_inputs(group, N_in, N_out, kernel_size, h_grid, stride, padding, dilation, conv_groups)
## Construct the trainable weights and initialize them
self.weight = torch.nn.Parameter(torch.Tensor(self.N_out, self.N_in, kernel_size, kernel_size))
self._reset_parameters(wscale=wscale)
########################### Assert and set inputs ########################
# Method overriding:
def _assert_and_set_inputs(self, group, N_in, N_out, kernel_size, h_grid, stride, padding, dilation, conv_groups):
# Default Rn assertions
self._assert_and_set_inputs_RnRn(group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups)
# Specific initialization/assertion
self.h_grid = self._assert_h_grid(h_grid)
self.N_h = int(self.h_grid.grid.shape[0])
def _assert_h_grid(self, h_grid ):
assert (len(h_grid.grid.shape) == 2), "The \"h_grid\" option value should be a grid object with h_grid.grid a tensor of dimension 2 (a list of group elements)."
assert (h_grid.grid.shape[-1] == self.H.n), "The group element specification in \"h_grid\" is not correct. For the current group \"{}\" each group element should be a vector of length {}.".format(self.H.name,self.H.n)
return h_grid
############################ Compute the output ##########################
# Method overriding:
def forward(self, input):
return self.conv_Rn_G(input)
def conv_Rn_G(self, input):
# Generate the full stack of convolution kernels (all transformed copies)
kernel_stack = torch.cat([self.kernel(self.h_grid.grid[i]) for i in range(self.N_h)], dim=0) # [N_out x N_h, N_in, X, Y]
# And apply them all at once
output = torch.conv2d(
input=input,
weight=kernel_stack,
bias=None,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.conv_groups)
# Reshape the last channel to create a vector valued RnxH feature map
output = torch.stack(torch.split(output, self.N_out, 1), 2)
#kernel_stack = torch.stack([self.kernel(self.h_grid.grid[i]) for i in range(self.N_h)], dim=1)
# ks = kernel_stack.shape
# kernel_stack = torch.reshape(kernel_stack, [ks[0] * ks[1], ks[2], ks[-2], ks[-1]])
# output_2 = torch.conv2d(
# input=input,
# weight=kernel_stack,
# bias=None,
# stride=self.stride,
# padding=self.padding,
# dilation=self.dilation,
# groups=self.conv_groups)
# output_2=output_2.reshape(output_2.shape[0], self.N_out, self.N_h, output_2.shape[-2], output_2.shape[-1])
# Return the output
return output
##########################################################################
############################### ConvGGLayer ##############################
##########################################################################
# Start of group_conv class
class ConvGGLayer(ConvRnGLayer, torch.nn.Module):
def __init__(
self,
group,
N_in,
N_out,
kernel_size,
h_grid,
input_h_grid,
stride,
padding,
dilation,
conv_groups,
wscale
):
torch.nn.Module.__init__(self)
## Assert and set inputs
self.kernel_type = 'G'
self._assert_and_set_inputs(group, N_in, N_out, kernel_size, h_grid, input_h_grid, stride, padding, dilation, conv_groups)
## Construct the trainable weights and initialize them
self.weight = torch.nn.Parameter(torch.Tensor(self.N_out, self.N_in, input_h_grid.grid.shape[0], self.kernel_size, self.kernel_size))
self._reset_parameters(wscale=wscale)
########################### Assert and set inputs ########################
# Method overriding:
def _assert_and_set_inputs(self, group, N_in, N_out, kernel_size, h_grid, input_h_grid, stride, padding, dilation, conv_groups):
# Default Rn assertions
self._assert_and_set_inputs_GG(group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups)
# Specific initialization/assertion
self.h_grid = self._assert_h_grid(h_grid)
self.input_h_grid = self._assert_input_h_grid(input_h_grid)
self.N_h = int(self.h_grid.grid.shape[0]) # Target sampling
self.N_h_in = int(self.input_h_grid.grid.shape[0])
def _assert_and_set_inputs_GG(self, group, N_in, N_out, kernel_size, stride, padding, dilation, conv_groups):
## Check (and parse) all the inputs
# Include the dictionary of the used parent class
self.group = group
self.G = group.G
self.H = group.H
self.Rn = group.Rn
# Mandatory inputs
self.N_in = self._assert_N_in(N_in)
self.N_out = self._assert_N_out(N_out)
self.kernel_size = self._assert_kernel_size(kernel_size)
# Optional arguments
self.conv_groups = self._assert_conv_groups(conv_groups)
self.stride = stride
self.padding = padding
self.dilation = dilation
def _assert_input_h_grid( self, input_h_grid ):
if (input_h_grid is None):
return self.h_grid
else:
assert (len(input_h_grid.grid.shape) == 2), "The \"input_h_grid\" option value should be a grid object with input_h_grid.grid a tensorflow tensor of dimension 2 (a list of group elements)."
assert (input_h_grid.grid.shape[-1] == self.H.n), "The group element specification in \"input_h_grid\" is not correct. For the current group \"{}\" each group element should be a vector of length {}.".format(self.H.name,self.H.n)
return input_h_grid
############################ Compute the output ##########################
# Method overriding:
def forward(self, input):
return self.conv_G_G(input)
# Method overriding:
def kernel(self, h=None):
# The transformation to apply
if h is None:
h = self.H.e
# Sample the kernel on the (transformed) grid
if not False in (h == self.H.e):
return self.weight
h_weight = self.H.left_representation_on_G(h, self.weight)
return (1 / self.H.absdet(h)) * h_weight
def conv_G_G(self, input):
# Generate the full stack of convolution kernels (all transformed copies)
kernel_stack = torch.cat([self.kernel(self.h_grid.grid[i]) for i in range(self.N_h)], dim=0) # [N_out x N_h, N_in, N_h_in, Nxy_x, Nxy_y]
# Reshape input tensor and kernel as if they were Rn tensors
kernel_stack_as_if_Rn = torch.reshape(kernel_stack, [self.N_h * self.N_out, self.N_in * self.N_h_in, self.kernel_size, self.kernel_size])
input_tensor_as_if_Rn = torch.reshape(input, [input.shape[0], self.N_in * self.N_h_in, input.shape[-2], input.shape[-1]])
# And apply them all at once
output = torch.conv2d(
input=input_tensor_as_if_Rn,
weight=kernel_stack_as_if_Rn,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.conv_groups)
# Reshape the last channel to create a vector valued RnxH feature map
output = torch.stack(torch.split(output, self.N_out, 1), 2)
# The above includes integration over S1, take discretization into account
output = self.group.H.haar_meas * output
# # Return the output
return output
###############################################################################
########################### AttentiveConvRnGLayer #############################
###############################################################################
# Start of lifting_layer class
class AttConvRnGLayer(ConvRnGLayer):
# The initialization of both layers is equal up to the additional parameters
def __init__(self,
group,
N_in,
N_out,
kernel_size,
h_grid,
channel_attention,
spatial_attention,
stride,
padding,
dilation,
wscale):
super(AttConvRnGLayer, self).__init__(group, N_in, N_out, kernel_size, h_grid, stride, padding, dilation, conv_groups=N_in, wscale=wscale)
self.channel_attention = channel_attention
self.spatial_attention = spatial_attention
############################ Compute the output ##########################
# Method overriding:
def forward(self, input):
return self.att_conv_Rn_G(input)
def att_conv_Rn_G(self, input):
# Generate the full stack of convolution kernels (all transformed copies)
kernel_stack = torch.stack([self.kernel(self.h_grid.grid[i]) for i in range(self.N_h)], dim=1)
kernel_stack = torch.reshape(kernel_stack, [self.N_out * self.N_h, self.N_in, kernel_stack.shape[-2], kernel_stack.shape[-1]])
kernel_stack = torch.transpose(kernel_stack, 0, 1) # Required for convolutions with parameter group to work accurately.
kernel_stack_for_conv_groups = torch.reshape(kernel_stack, [self.N_out * self.N_in * self.N_h, 1, self.weight.shape[-2], self.weight.shape[-1]])
# And apply them all at once
output = torch.conv2d(
input=input,
weight=kernel_stack_for_conv_groups,
bias=None,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.conv_groups)
# Reshape the last channel to create a vector valued RnxH feature map
output = torch.reshape(output, [output.shape[0], self.N_in, self.N_out * self.N_h, output.shape[-2], output.shape[-1]])
output = torch.transpose(output, 1, 2) # Required for convolutions with parameter group to work accurately.
output = torch.reshape(output, [output.shape[0], self.N_out, self.N_h, self.N_in, output.shape[-2], output.shape[-1]])
# Perform attention
# Check if there's attention mechanism. Otherwise, omit.
if (self.channel_attention is not None) or (self.spatial_attention is not None):
# Do channel-wise
if (self.channel_attention is not None):
output_att = self._perform_channel_attention(output)
else: output_att = output
# Do spatial-wise
if (self.spatial_attention is not None):
output_att = self._perform_spatial_attention(output_att)
# Checkout with responses -- To ensure good gradient propagation we model output * (1 - opposite_attention)
output = output - output_att
# Sum over the attented dimension N_in
output = output.sum(dim=-3)
# Return the output
return output
def _perform_channel_attention(self, input):
output = self.channel_attention(input)
output = output * input
return output
def _perform_spatial_attention(self, input):
output = self.spatial_attention(input)
# Visualize TODO
#import matplotlib.pyplot as plt
#plt.figure()
#plt.imshow(output.detach().cpu().reshape([128, -1, 26, 26]).mean(-3).numpy()[1, :, :])
#plt.show()
output = output * input
return output
##########################################################################
########################## AttentiveConvGGLayer ##########################
##########################################################################
# Start of group_conv class
class AttConvGGLayer(ConvGGLayer, torch.nn.Module):
# The initialization of both layers is equal up to the additional parameters
def __init__(
self,
group,
N_in,
N_out,
kernel_size,
h_grid,
channel_attention,
spatial_attention,
input_h_grid,
stride,
padding,
dilation,
wscale):
super(AttConvGGLayer, self).__init__(group, N_in, N_out, kernel_size, h_grid, input_h_grid,stride, padding, dilation, conv_groups=N_in, wscale=wscale)
self.channel_attention = channel_attention
self.spatial_attention = spatial_attention
############################ Compute the output ##########################
# Method overriding:
def forward(self, input):
return self.att_conv_G_G(input)
def att_conv_G_G(self, input):
# Generate the full stack of convolution kernels (all transformed copies)
kernel_stack = torch.stack([self.kernel(self.h_grid.grid[i]) for i in range(self.N_h)], dim=1)
kernel_stack = torch.reshape(kernel_stack, [self.N_h * self.N_out, self.N_in * self.N_h_in, self.kernel_size, self.kernel_size])
kernel_stack = kernel_stack.transpose(0, 1) # Required for convolutions with parameter group to work accurately.
# Reshape input tensor and kernel as if they were Rn tensors
kernel_stack_as_if_Rn = torch.reshape(kernel_stack, [self.N_h * self.N_out * self.N_in * self.N_h_in, 1, self.kernel_size, self.kernel_size])
input_tensor_as_if_Rn = torch.reshape(input, [input.shape[0], self.N_in * self.N_h_in, input.shape[-2], input.shape[-1]])
# And apply them all at once
output = torch.conv2d(
input=input_tensor_as_if_Rn,
weight=kernel_stack_as_if_Rn,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.N_in * self.N_h_in)
# Reshape the last channel to create a vector valued RnxH feature map
output = output.reshape([output.shape[0], self.N_in * self.N_h_in, self.N_out * self.N_h, output.shape[-2], output.shape[-1]])
output = output.transpose(1, 2) # Required for convolutions with parameter group to work accurately.
output = output.reshape([output.shape[0], self.N_out, self.N_h, self.N_in, self.N_h_in, output.shape[-2], output.shape[-1]])
# Perform attention
# Check if there's attention mechanism. Otherwise, omit.
if (self.channel_attention is not None) or (self.spatial_attention is not None):
# Do channel-wise
if (self.channel_attention is not None):
output_att = self._perform_channel_attention(output)
else: output_att = output
# Do spatial-wise
if (self.spatial_attention is not None):
output_att = self._perform_spatial_attention(output_att)
# Checkout with responses -- To ensure good gradient propagation we model output * (1 - opposite_attention)
output = output - output_att
# Sum over the attented dimensions N_in, N_h_in
output = output.sum(dim=[-3, -4])
# The above includes integration over S1, take discretization into account
output = (2 * np.pi / self.N_h) * output
# Return the output
return output
def _perform_channel_attention(self, input):
output = self.channel_attention(input)
output = output * input
return output
def _perform_spatial_attention(self, input):
output = self.spatial_attention(input)
output = output * input
return output
###############################################################################
######################### FeatAttentiveConvRnGLayer ###########################
###############################################################################
# Start of lifting_layer class
class fAttConvRnGLayer(ConvRnGLayer):
# The initialization of both layers is equal up to the additional parameters
def __init__(self,
group,
N_in,
N_out,
kernel_size,
h_grid,
channel_attention,
spatial_attention,
stride,
padding,
wscale):
super(fAttConvRnGLayer, self).__init__(group, N_in, N_out, kernel_size, h_grid, stride, padding, dilation=1, conv_groups=1, wscale=wscale)
self.channel_attention = channel_attention
self.spatial_attention = spatial_attention
# Method overriding:
def forward(self, input):
return self.feat_att_conv_Rn_G(input)
def feat_att_conv_Rn_G(self, input):
# Apply attention on the input feature maps
# Check if there's attention mechanism. Otherwise, omit.
if (self.channel_attention is not None) or (self.spatial_attention is not None):
# Do channel-wise
if (self.channel_attention is not None):
input_att = self._perform_channel_attention(input)
else: input_att = input
# Do spatial-wise
if (self.spatial_attention is not None):
input_att = self._perform_spatial_attention(input_att)
# Checkout with responses -- To ensure good gradient propagation we model output * (1 - opposite_attention)
input = input - input_att
# Perform convolution
output = self.conv_Rn_G(input)
# Return the output
return output
def _perform_channel_attention(self, input):
output = self.channel_attention(input)
output = output * input
return output
def _perform_spatial_attention(self, input):
output = self.spatial_attention(input)
output = output * input
return output
##########################################################################
######################## FeatAttentiveConvGGLayer ########################
##########################################################################
# Start of group_conv class
class fAttConvGGLayer(ConvGGLayer):
# The initialization of both layers is equal up to the additional parameters
def __init__(
self,
group,
N_in,
N_out,
kernel_size,
h_grid,
channel_attention,
spatial_attention,
input_h_grid,
stride,
padding,
wscale):
super(fAttConvGGLayer, self).__init__(group, N_in, N_out, kernel_size, h_grid, input_h_grid,stride, padding, dilation=1, conv_groups=1, wscale=wscale)
self.channel_attention = channel_attention
self.spatial_attention = spatial_attention
############################ Compute the output ##########################
# Method overriding:
def forward(self, input):
return self.feat_att_conv_G_G(input)
def feat_att_conv_G_G(self, input):
# Apply attention on the input feature maps
# Check if there's attention mechanism. Otherwise, omit.
if (self.channel_attention is not None) or (self.spatial_attention is not None):
# Do channel-wise
if (self.channel_attention is not None):
input_att = self._perform_channel_attention(input)
else: input_att = input
# Do spatial-wise
if (self.spatial_attention is not None):
input_att = self._perform_spatial_attention(input_att)
# Checkout with responses -- To ensure good gradient propagation we model output * (1 - opposite_attention)
input = input - input_att
# Perform convolution
output = self.conv_G_G(input)
# Return the output
return output
def _perform_channel_attention(self, input):
output = self.channel_attention(input)
output = output * input
return output
def _perform_spatial_attention(self, input):
output = self.spatial_attention(input)
output = output * input
return output
| 47.034582 | 241 | 0.579591 | 3,941 | 32,642 | 4.587414 | 0.071302 | 0.023231 | 0.007301 | 0.014437 | 0.887438 | 0.862658 | 0.820344 | 0.815642 | 0.808618 | 0.800819 | 0 | 0.006993 | 0.28592 | 32,642 | 693 | 242 | 47.102453 | 0.76863 | 0.241284 | 0 | 0.728665 | 0 | 0.004376 | 0.030229 | 0 | 0 | 0 | 0 | 0.001443 | 0.078775 | 1 | 0.115974 | false | 0 | 0.006565 | 0.030635 | 0.229759 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
aea45cfdb0051cba8b186948457582f65c9ac4d7 | 143 | py | Python | batchout/outputs/__init__.py | ilia-khaustov/batchout | e916a1b0bfac771e6c96d0ff2478dc3f44804a94 | [
"MIT"
] | 8 | 2019-11-05T06:54:30.000Z | 2021-12-14T14:52:24.000Z | batchout/outputs/__init__.py | ilia-khaustov/batchout | e916a1b0bfac771e6c96d0ff2478dc3f44804a94 | [
"MIT"
] | null | null | null | batchout/outputs/__init__.py | ilia-khaustov/batchout | e916a1b0bfac771e6c96d0ff2478dc3f44804a94 | [
"MIT"
] | 1 | 2020-05-05T09:31:14.000Z | 2020-05-05T09:31:14.000Z | from batchout.outputs.base import Output
from batchout.outputs.logger import LoggerOutput
from batchout.outputs.postgres import PostgresOutput
| 35.75 | 52 | 0.874126 | 18 | 143 | 6.944444 | 0.555556 | 0.288 | 0.456 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083916 | 143 | 3 | 53 | 47.666667 | 0.954198 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
aeb27f2e2af637ec90348370541e12763705e47b | 5,746 | py | Python | dynetx/test/test_read_write.py | GiulioRossetti/dynet | 0db7cb040d599aeb71c0551fea9a5003748a25b1 | [
"BSD-2-Clause"
] | 76 | 2017-12-16T22:59:28.000Z | 2022-03-29T08:37:29.000Z | dynetx/test/test_read_write.py | GiulioRossetti/dynet | 0db7cb040d599aeb71c0551fea9a5003748a25b1 | [
"BSD-2-Clause"
] | 127 | 2017-08-24T08:41:54.000Z | 2022-03-24T01:43:58.000Z | dynetx/test/test_read_write.py | GiulioRossetti/dynet | 0db7cb040d599aeb71c0551fea9a5003748a25b1 | [
"BSD-2-Clause"
] | 22 | 2017-09-17T10:27:02.000Z | 2021-11-24T07:56:30.000Z | import unittest
import dynetx as dn
from dynetx.readwrite import json_graph
import os
class ReadWriteTestCase(unittest.TestCase):
def test_snapshots_interactions(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_snapshots(g, "test.txt", delimiter=" ")
h = dn.read_snapshots("test.txt", nodetype=int, timestamptype=int)
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
dn.write_interactions(h, "test.txt", delimiter=" ")
h = dn.read_interactions("test.txt", nodetype=int, timestamptype=int, keys=True)
dn.write_snapshots(h, "test.txt", delimiter=" ")
os.remove("test.txt")
def test_snapshots(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_snapshots(g, "test.txt", delimiter=" ")
h = dn.read_snapshots("test.txt", nodetype=int, timestamptype=int)
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
os.remove("test.txt")
def test_snapshots_directed(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_snapshots(g, "test.txt", delimiter=" ")
h = dn.read_snapshots("test.txt", directed=True, nodetype=int, timestamptype=int)
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
os.remove("test.txt")
def test_interaction_graph(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_interactions(g, "test2.txt", delimiter=" ")
h = dn.read_interactions("test2.txt", nodetype=int, timestamptype=int)
self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
os.remove("test2.txt")
def test_interaction_graph_directed(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_interactions(g, "test2.txt", delimiter=" ")
h = dn.read_interactions("test2.txt", directed=True, nodetype=int, timestamptype=int)
self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
os.remove("test2.txt")
def test_interaction_graph_flag(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_interactions(g, "test3.txt", delimiter=" ")
h = dn.read_interactions("test3.txt", nodetype=int, timestamptype=int, keys=True)
# self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
os.remove("test3.txt")
def test_snapshot_graph_flag(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
dn.write_snapshots(g, "test4.txt", delimiter=" ")
h = dn.read_snapshots("test4.txt", nodetype=int, timestamptype=int, keys=True)
# self.assertEqual(list(g.stream_interactions()), list(h.stream_interactions()))
self.assertEqual(g.number_of_interactions(), h.number_of_interactions())
os.remove("test4.txt")
def test_json_directed(self):
g = dn.DynDiGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(2, 1, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
data = json_graph.node_link_data(g)
h = json_graph.node_link_graph(data)
self.assertIsInstance(h, dn.DynDiGraph)
def test_json_undirected(self):
g = dn.DynGraph()
g.add_interaction(1, 2, 2)
g.add_interaction(1, 2, 2, e=6)
g.add_interaction(1, 2, 7, e=11)
g.add_interaction(1, 2, 8, e=15)
g.add_interaction(1, 2, 18)
g.add_interaction(1, 2, 19)
data = json_graph.node_link_data(g)
h = json_graph.node_link_graph(data)
self.assertIsInstance(h, dn.DynGraph)
if __name__ == '__main__':
unittest.main()
| 41.637681 | 93 | 0.626349 | 840 | 5,746 | 4.1 | 0.075 | 0.063879 | 0.239547 | 0.250871 | 0.900987 | 0.896341 | 0.867886 | 0.845238 | 0.845238 | 0.828688 | 0 | 0.054066 | 0.227463 | 5,746 | 137 | 94 | 41.941606 | 0.721784 | 0.027323 | 0 | 0.737705 | 0 | 0 | 0.039563 | 0 | 0 | 0 | 0 | 0 | 0.114754 | 1 | 0.07377 | false | 0 | 0.032787 | 0 | 0.114754 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
aeb7041daae7a3f395528799eea0500942c7fa4e | 74 | py | Python | ssockets/__init__.py | Maickii/python_sockets_and_encryption_testing | 7163dde52129e54c719e6764c54497d3e61eb71e | [
"MIT"
] | null | null | null | ssockets/__init__.py | Maickii/python_sockets_and_encryption_testing | 7163dde52129e54c719e6764c54497d3e61eb71e | [
"MIT"
] | null | null | null | ssockets/__init__.py | Maickii/python_sockets_and_encryption_testing | 7163dde52129e54c719e6764c54497d3e61eb71e | [
"MIT"
] | null | null | null | from ssockets.ssockets import server
from ssockets.ssockets import client
| 24.666667 | 36 | 0.864865 | 10 | 74 | 6.4 | 0.5 | 0.375 | 0.625 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 74 | 2 | 37 | 37 | 0.969697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
aee9711fc3fe612e3259c5021d5b57bacf054c6e | 252 | py | Python | src/spaceone/repository/connector/__init__.py | choonho/repository | 9522ecad06dd4e36c718e203864d1d58574cdbfc | [
"Apache-2.0"
] | null | null | null | src/spaceone/repository/connector/__init__.py | choonho/repository | 9522ecad06dd4e36c718e203864d1d58574cdbfc | [
"Apache-2.0"
] | null | null | null | src/spaceone/repository/connector/__init__.py | choonho/repository | 9522ecad06dd4e36c718e203864d1d58574cdbfc | [
"Apache-2.0"
] | null | null | null | from spaceone.repository.connector.identity_connector import *
from spaceone.repository.connector.registry_connector import *
from spaceone.repository.connector.repository_connector import *
from spaceone.repository.connector.secret_connector import *
| 50.4 | 64 | 0.873016 | 28 | 252 | 7.714286 | 0.285714 | 0.439815 | 0.407407 | 0.574074 | 0.638889 | 0.638889 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063492 | 252 | 4 | 65 | 63 | 0.915254 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
aefaffd28dfbed8527c402e87113f489672cb597 | 683 | py | Python | modules/metrics/base_metric.py | kaylode/tern | a85b7568c574515031a2a41e8c21df1002c05c64 | [
"MIT"
] | 3 | 2021-12-22T14:42:40.000Z | 2022-01-07T03:19:56.000Z | modules/metrics/base_metric.py | kaylode/tern | a85b7568c574515031a2a41e8c21df1002c05c64 | [
"MIT"
] | null | null | null | modules/metrics/base_metric.py | kaylode/tern | a85b7568c574515031a2a41e8c21df1002c05c64 | [
"MIT"
] | null | null | null |
class TemplateMetric():
"""
Abstract template for metric
"""
def __init__(self):
pass
def compute(self, output, target):
raise NotImplementedError("This is an abtract method")
def update(self, output, target):
raise NotImplementedError("This is an abtract method")
def reset(self):
raise NotImplementedError("This is an abtract method")
def value(self):
raise NotImplementedError("This is an abtract method")
def __str__(self):
raise NotImplementedError("This is an abtract method")
def __len__(self):
raise NotImplementedError("This is an abtract method")
| 20.088235 | 62 | 0.63836 | 74 | 683 | 5.72973 | 0.337838 | 0.339623 | 0.396226 | 0.424528 | 0.785377 | 0.785377 | 0.785377 | 0.785377 | 0.669811 | 0.301887 | 0 | 0 | 0.27672 | 683 | 33 | 63 | 20.69697 | 0.8583 | 0.040996 | 0 | 0.4 | 0 | 0 | 0.235479 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.466667 | false | 0.066667 | 0 | 0 | 0.533333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 9 |
4e3abdc0c1320d4153b21f648e43d2533e8ec3be | 4,953 | py | Python | raw_voltage_dev/gen_lower_sampling.py | bbrzycki/setigen_development | 37e2c83e70ec8b693be08ecc3957a9a735b2ce5a | [
"MIT"
] | null | null | null | raw_voltage_dev/gen_lower_sampling.py | bbrzycki/setigen_development | 37e2c83e70ec8b693be08ecc3957a9a735b2ce5a | [
"MIT"
] | null | null | null | raw_voltage_dev/gen_lower_sampling.py | bbrzycki/setigen_development | 37e2c83e70ec8b693be08ecc3957a9a735b2ce5a | [
"MIT"
] | null | null | null | import numpy as np
import blimpy as bl
import pandas as pd
from astropy import units as u
try:
import cupy as xp
except ImportError:
import numpy as xp
import sys, os, glob, errno
import csv
import json
import h5py
import time
from astropy.stats import sigma_clip
from scipy.signal import butter, lfilter, filtfilt
import scipy.signal
sys.path.insert(0, "/home/bryanb/setigen/")
import setigen as stg
def db(x):
""" Convert linear value to dB value """
return 10*np.log10(x)
start = time.time()
sample_rate = 3e9
num_taps = 8
num_branches = 1024
fftlength = 1024
chan_bw = sample_rate/num_branches
digitizer = stg.voltage.RealQuantizer(target_fwhm=32,
num_bits=8)
filterbank = stg.voltage.PolyphaseFilterbank(num_taps=num_taps,
num_branches=num_branches)
requantizer = stg.voltage.ComplexQuantizer(target_fwhm=32,
num_bits=8)
num_pols = 2
antenna = stg.voltage.Antenna(sample_rate=sample_rate,
fch1=0,
ascending=True,
num_pols=num_pols)
rvb = stg.voltage.RawVoltageBackend(antenna,
digitizer=digitizer,
filterbank=filterbank,
requantizer=requantizer,
start_chan=0,
num_chans=64,
block_size=134217728,
blocks_per_file=128,
num_subblocks=16)
signal_level = stg.get_intensity(10,
rvb,
num_blocks=1,
length_mode='num_blocks',
fftlength=fftlength,
int_factor=1)
for stream in antenna.streams:
stream.add_noise(v_mean=0,
v_std=1)
stream.add_constant_signal(f_start=chan_bw / fftlength * int((2.4-0.5)*fftlength),
drift_rate=0*u.Hz/u.s,
level=signal_level * stream.noise_std**0.5)
print(f'frequency is {chan_bw / fftlength * int(2.2*fftlength)}')
# Record to file
rvb.record(raw_file_stem='/datax/scratch/bbrzycki/data/raw_files/test_lower_sampling_0',
num_blocks=1,
length_mode='num_blocks',
header_dict={'HELLO': 'test_value',
'TELESCOP': 'GBT'})
print(rvb.sample_stage, rvb.digitizer_stage, rvb.filterbank_stage, rvb.requantizer_stage)
############################
sample_rate = int(3e9 // 8)
num_taps = 8
num_branches = int(1024 // 8)
fftlength = 1024
chan_bw = sample_rate/num_branches
digitizer = stg.voltage.RealQuantizer(target_fwhm=32,
num_bits=8)
filterbank = stg.voltage.PolyphaseFilterbank(num_taps=num_taps,
num_branches=num_branches)
requantizer = stg.voltage.ComplexQuantizer(target_fwhm=32,
num_bits=8)
num_pols = 2
antenna = stg.voltage.Antenna(sample_rate=sample_rate,
fch1=0,
ascending=True,
num_pols=num_pols)
rvb = stg.voltage.RawVoltageBackend(antenna,
digitizer=digitizer,
filterbank=filterbank,
requantizer=requantizer,
start_chan=0,
num_chans=64,
block_size=134217728,
blocks_per_file=128,
num_subblocks=16)
signal_level = stg.get_intensity(10,
rvb,
num_blocks=1,
length_mode='num_blocks',
fftlength=fftlength,
int_factor=1)
for stream in antenna.streams:
stream.add_noise(v_mean=0,
v_std=1)
stream.add_constant_signal(f_start=chan_bw / fftlength * int((2.4-0.5)*fftlength),
drift_rate=0*u.Hz/u.s,
level=signal_level * stream.noise_std**0.5)
print(f'frequency is {chan_bw / fftlength * int(2.2*fftlength)}')
# Record to file
rvb.record(raw_file_stem='/datax/scratch/bbrzycki/data/raw_files/test_lower_sampling_1',
num_blocks=1,
length_mode='num_blocks',
header_dict={'HELLO': 'test_value',
'TELESCOP': 'GBT'})
print(rvb.sample_stage, rvb.digitizer_stage, rvb.filterbank_stage, rvb.requantizer_stage) | 30.574074 | 89 | 0.513224 | 518 | 4,953 | 4.685328 | 0.262548 | 0.041203 | 0.019778 | 0.024722 | 0.824887 | 0.80923 | 0.80923 | 0.80923 | 0.80923 | 0.80923 | 0 | 0.039269 | 0.403594 | 4,953 | 162 | 90 | 30.574074 | 0.782329 | 0.012921 | 0 | 0.752294 | 0 | 0 | 0.070663 | 0.029048 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009174 | false | 0 | 0.146789 | 0 | 0.165138 | 0.036697 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9dc3de2aeb211b51d5a2e295ac32e1af464a71ad | 58 | py | Python | InvolutionGAN/FID/__main__.py | XiaozhuFang/InvolutionGAN | a11f7d246b0d722bf75c0f963099d9ab196c4b3b | [
"MIT"
] | 3 | 2021-05-18T05:46:10.000Z | 2021-05-26T07:37:23.000Z | InvolutionGAN/FID/__main__.py | XiaozhuFang/InvolutionGAN | a11f7d246b0d722bf75c0f963099d9ab196c4b3b | [
"MIT"
] | null | null | null | InvolutionGAN/FID/__main__.py | XiaozhuFang/InvolutionGAN | a11f7d246b0d722bf75c0f963099d9ab196c4b3b | [
"MIT"
] | 1 | 2021-05-17T14:31:03.000Z | 2021-05-17T14:31:03.000Z | import pytorch_fid.fid_score
pytorch_fid.fid_score.main() | 19.333333 | 28 | 0.862069 | 10 | 58 | 4.6 | 0.5 | 0.434783 | 0.565217 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.051724 | 58 | 3 | 29 | 19.333333 | 0.836364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
d1b8b287938eef0ebeb8278cd7cc6ee6c00fde62 | 1,104 | py | Python | o/soft_robot/derivation_of_dynamics/sumi_maclaurin_2/R_2_0.py | YoshimitsuMatsutaIe/ctrlab2021_soudan | 7841c981e6804cc92d34715a00e7c3efce41d1d0 | [
"MIT"
] | null | null | null | o/soft_robot/derivation_of_dynamics/sumi_maclaurin_2/R_2_0.py | YoshimitsuMatsutaIe/ctrlab2021_soudan | 7841c981e6804cc92d34715a00e7c3efce41d1d0 | [
"MIT"
] | null | null | null | o/soft_robot/derivation_of_dynamics/sumi_maclaurin_2/R_2_0.py | YoshimitsuMatsutaIe/ctrlab2021_soudan | 7841c981e6804cc92d34715a00e7c3efce41d1d0 | [
"MIT"
] | null | null | null | import numpy
def f(q, xi):
l1, l2, l3 = q[0,0], q[1,0], q[2,0]
return 0.5*(l1 - 0.0001)**2*(-6.07837243596557e-5*xi**2*1 - 1.4723987711447*xi*0 - 121438.163869571*1 + 121438.163869571) + 0.5*(l1 - 0.0001)*(l2 - 0.002)*(8.8412689977681e-5*xi**2*1 - 4.09059149510746*xi*0 - 387780.362371481*1 + 387780.362371481) + 0.5*(l1 - 0.0001)*(l3 - 0.0015)*(3.31547587416304e-5*xi**2*1 + 7.03538903739686*xi*0 + 630656.690110624*1 - 630656.690110624) + (l1 - 0.0001)*(0.00754103754629469*xi*0 - 73.0683388245297*1 + 73.0683388245297) + 0.5*(l2 - 0.002)**2*(-3.21500690827931e-5*xi**2*1 + 0.0899501794735671*xi*0 - 133709.809902707*1 + 133709.809902707) + 0.5*(l2 - 0.002)*(l3 - 0.0015)*(-2.41125518120948e-5*xi**2*1 + 3.91069113616032*xi*0 + 655199.982176895*1 - 655199.982176895) + (l2 - 0.002)*(-0.00548439094275978*xi*0 - 204.591348708683*1 + 204.591348708683) + 0.5*(l3 - 0.0015)**2*(-4.52110346476778e-6*xi**2*1 - 5.47304008677859*xi*0 - 642928.33614376*1 + 642928.336143759) + (l3 - 0.0015)*(-0.00205664660353492*xi*0 + 277.659687533213*1 - 277.659687533213) + 0.935567010309278*1 + 0.0644329896907217 | 220.8 | 1,036 | 0.673007 | 192 | 1,104 | 3.869792 | 0.302083 | 0.036339 | 0.032301 | 0.033647 | 0.057873 | 0 | 0 | 0 | 0 | 0 | 0 | 0.671717 | 0.103261 | 1,104 | 5 | 1,036 | 220.8 | 0.078788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
ae4df76cc333c1bdd6073a54457679ed8a5231c1 | 15,909 | py | Python | data/source/HTMLAtoms.py | bayotop/firefox-names | fbfad20e4b571bfdb591f3d0918da83524db58ca | [
"MIT"
] | 3 | 2019-05-20T10:43:34.000Z | 2019-10-01T14:09:01.000Z | data/source/HTMLAtoms.py | bayotop/firefox-names | fbfad20e4b571bfdb591f3d0918da83524db58ca | [
"MIT"
] | null | null | null | data/source/HTMLAtoms.py | bayotop/firefox-names | fbfad20e4b571bfdb591f3d0918da83524db58ca | [
"MIT"
] | null | null | null | # THIS FILE IS GENERATED BY THE HTML PARSER TRANSLATOR AND WILL BE OVERWRITTEN!
from Atom import Atom
HTML_PARSER_ATOMS = [
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink", "xlink"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xml_space", "xml:space"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xml_lang", "xml:lang"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("aria_grab", "aria-grab"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("aria_channel", "aria-channel"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("aria_secret", "aria-secret"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("aria_templateid", "aria-templateid"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("aria_datatype", "aria-datatype"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("local", "local"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xchannelselector", "xchannelselector"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("ychannelselector", "ychannelselector"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("enable_background", "enable-background"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("calcmode", "calcmode"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("specularexponent", "specularexponent"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("specularconstant", "specularconstant"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("gradienttransform", "gradienttransform"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("gradientunits", "gradientunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("rendering_intent", "rendering-intent"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("stddeviation", "stddeviation"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("basefrequency", "basefrequency"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("baseprofile", "baseprofile"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("baseProfile", "baseProfile"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("edgemode", "edgemode"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("repeatcount", "repeatcount"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("repeatdur", "repeatdur"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("spreadmethod", "spreadmethod"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("diffuseconstant", "diffuseconstant"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("surfacescale", "surfacescale"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("lengthadjust", "lengthadjust"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("origin", "origin"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("targetx", "targetx"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("targety", "targety"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("pathlength", "pathlength"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("definitionurl", "definitionurl"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("limitingconeangle", "limitingconeangle"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("markerheight", "markerheight"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("markerwidth", "markerwidth"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("maskunits", "maskunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("markerunits", "markerunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("maskcontentunits", "maskcontentunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("tablevalues", "tablevalues"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("primitiveunits", "primitiveunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("zoomandpan", "zoomandpan"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("kernelmatrix", "kernelmatrix"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("kerning", "kerning"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("kernelunitlength", "kernelunitlength"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("pointsatx", "pointsatx"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("pointsaty", "pointsaty"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("pointsatz", "pointsatz"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_href", "xlink:href"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_title", "xlink:title"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_role", "xlink:role"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_arcrole", "xlink:arcrole"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("arcrole", "arcrole"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xmlns_xlink", "xmlns:xlink"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_type", "xlink:type"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_show", "xlink:show"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("xlink_actuate", "xlink:actuate"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("color_rendering", "color-rendering"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("numoctaves", "numoctaves"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("onmousewheel", "onmousewheel"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("clippathunits", "clippathunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("glyph_orientation_vertical", "glyph-orientation-vertical"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("glyph_orientation_horizontal", "glyph-orientation-horizontal"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("glyphref", "glyphref"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("keypoints", "keypoints"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("attributename", "attributename"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("attributetype", "attributetype"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("startoffset", "startoffset"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("keysplines", "keysplines"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("preservealpha", "preservealpha"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("preserveaspectratio", "preserveaspectratio"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("alttext", "alttext"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("filterunits", "filterunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("keytimes", "keytimes"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("patterntransform", "patterntransform"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("patternunits", "patternunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("patterncontentunits", "patterncontentunits"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("stitchtiles", "stitchtiles"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("systemlanguage", "systemlanguage"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("textlength", "textlength"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("requiredfeatures", "requiredfeatures"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("requiredextensions", "requiredextensions"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("viewtarget", "viewtarget"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("viewbox", "viewbox"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("refx", "refx"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("refy", "refy"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fefunca", "fefunca"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fefuncb", "fefuncb"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feblend", "feblend"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feflood", "feflood"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feturbulence", "feturbulence"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("femergenode", "femergenode"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feimage", "feimage"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("femerge", "femerge"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fetile", "fetile"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fecomposite", "fecomposite"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altglyphdef", "altglyphdef"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altGlyphDef", "altGlyphDef"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fefuncg", "fefuncg"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fediffuselighting", "fediffuselighting"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fespecularlighting", "fespecularlighting"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altglyph", "altglyph"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altGlyph", "altGlyph"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("clippath", "clippath"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("textpath", "textpath"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altglyphitem", "altglyphitem"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("altGlyphItem", "altGlyphItem"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("animatetransform", "animatetransform"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("animatemotion", "animatemotion"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fedisplacementmap", "fedisplacementmap"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("animatecolor", "animatecolor"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fefuncr", "fefuncr"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fecomponenttransfer", "fecomponenttransfer"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fegaussianblur", "fegaussianblur"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("foreignobject", "foreignobject"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feoffset", "feoffset"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fespotlight", "fespotlight"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fepointlight", "fepointlight"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fedistantlight", "fedistantlight"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("lineargradient", "lineargradient"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("radialgradient", "radialgradient"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fedropshadow", "fedropshadow"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("fecolormatrix", "fecolormatrix"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("feconvolvematrix", "feconvolvematrix"),
# ATOM GENERATED BY HTML PARSER TRANSLATOR (WILL BE AUTOMATICALLY OVERWRITTEN):
Atom("femorphology", "femorphology"),
]
| 61.662791 | 83 | 0.735496 | 1,702 | 15,909 | 6.860752 | 0.085194 | 0.109617 | 0.217522 | 0.205018 | 0.769376 | 0.769376 | 0.769376 | 0.769376 | 0.769376 | 0.769376 | 0 | 0 | 0.167767 | 15,909 | 257 | 84 | 61.902724 | 0.881949 | 0.622604 | 0 | 0 | 1 | 0 | 0.498639 | 0.01838 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007752 | 0 | 0.007752 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ae8d7e716556fbc8eec549ef2fe9f871386902ce | 179 | py | Python | btrdbextras/eventproc/__init__.py | PingThingsIO/btrdbextras | 95299923011e57150f8987dbc88bffc0283d9f0b | [
"BSD-3-Clause"
] | null | null | null | btrdbextras/eventproc/__init__.py | PingThingsIO/btrdbextras | 95299923011e57150f8987dbc88bffc0283d9f0b | [
"BSD-3-Clause"
] | 3 | 2020-10-23T22:12:47.000Z | 2021-08-05T17:18:05.000Z | btrdbextras/eventproc/__init__.py | PingThingsIO/btrdbextras | 95299923011e57150f8987dbc88bffc0283d9f0b | [
"BSD-3-Clause"
] | null | null | null | from .eventproc import hooks, list_handlers, register, deregister, upload_file, _uploads
__all__ = ['hooks', 'list_handlers', 'register', 'deregister', 'upload_file', '_uploads'] | 59.666667 | 89 | 0.759777 | 20 | 179 | 6.3 | 0.6 | 0.142857 | 0.269841 | 0.396825 | 0.825397 | 0.825397 | 0.825397 | 0.825397 | 0 | 0 | 0 | 0 | 0.094972 | 179 | 3 | 89 | 59.666667 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0.305556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
8815841d1b5528e5472e5a32aa304dcfdf93dc7d | 108 | py | Python | utils/time_helpers.py | PingLu8/django-twitter | ef5c47a9b2cd6796d67028946b2ed1c7b0af51be | [
"Apache-2.0"
] | null | null | null | utils/time_helpers.py | PingLu8/django-twitter | ef5c47a9b2cd6796d67028946b2ed1c7b0af51be | [
"Apache-2.0"
] | null | null | null | utils/time_helpers.py | PingLu8/django-twitter | ef5c47a9b2cd6796d67028946b2ed1c7b0af51be | [
"Apache-2.0"
] | null | null | null | from datetime import datetime
import pytz
def utc_now():
return datetime.now().replace(tzinfo=pytz.utc) | 21.6 | 50 | 0.768519 | 16 | 108 | 5.125 | 0.625 | 0.341463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12963 | 108 | 5 | 50 | 21.6 | 0.87234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
884a69514c106ece420558f1d3c36bc48967a3f0 | 2,217 | py | Python | tests/parser/bug.82_working.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/bug.82_working.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/bug.82_working.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
% Sent by Michael Fink 2005-06-20, current release computes wrong answer sets
% having weight 0 (also wrong).
%#maxint=200.
%%
% Guess depots for highway (at restaurant locations)
%%
mcDepot(H,K) | -mcDepot(H,K) :- mcBurger(H,K).
%%
% Check required number of depots
%%
:- nDepots(H,N), not #count{K : mcDepot(H,K)} = N.
%%
% Optimize local distance per highway
%%
%:- loc_dist(H,I,D). [D:H]
%%
% Auxiliary predicates
%%
% For each highway, compute all distances between restaurants and depots
dist(H,K,K,0) :- mcBurger(H,K), mcDepot(H,K).
dist(H,I,J,D) :- mcBurger(H,I), mcDepot(H,J), I>J, D=I-J.
dist(H,I,J,D) :- mcBurger(H,I), mcDepot(H,J), J>I, D=J-I.
% For each highway and any restaurant, determine local distance (to next depot)
loc_dist(H,I,D) :- dist(H,I,J,D), #min{S : dist(H,I,K,S)} = D.
%%
% Input for Highway A1
%%
highway(1).
km(1,25).
nDepots(1,3).
%%
% Restaurants along A1
%%
mcBurger(1, 4).
mcBurger(1, 9).
mcBurger(1,12).
mcBurger(1,21).
mcBurger(1,24).
%%
% Just for Testing
%%
mcDepot(1,12).
"""
output = """
% Sent by Michael Fink 2005-06-20, current release computes wrong answer sets
% having weight 0 (also wrong).
%#maxint=200.
%%
% Guess depots for highway (at restaurant locations)
%%
mcDepot(H,K) | -mcDepot(H,K) :- mcBurger(H,K).
%%
% Check required number of depots
%%
:- nDepots(H,N), not #count{K : mcDepot(H,K)} = N.
%%
% Optimize local distance per highway
%%
%:- loc_dist(H,I,D). [D:H]
%%
% Auxiliary predicates
%%
% For each highway, compute all distances between restaurants and depots
dist(H,K,K,0) :- mcBurger(H,K), mcDepot(H,K).
dist(H,I,J,D) :- mcBurger(H,I), mcDepot(H,J), I>J, D=I-J.
dist(H,I,J,D) :- mcBurger(H,I), mcDepot(H,J), J>I, D=J-I.
% For each highway and any restaurant, determine local distance (to next depot)
loc_dist(H,I,D) :- dist(H,I,J,D), #min{S : dist(H,I,K,S)} = D.
%%
% Input for Highway A1
%%
highway(1).
km(1,25).
nDepots(1,3).
%%
% Restaurants along A1
%%
mcBurger(1, 4).
mcBurger(1, 9).
mcBurger(1,12).
mcBurger(1,21).
mcBurger(1,24).
%%
% Just for Testing
%%
mcDepot(1,12).
"""
| 17.736 | 80 | 0.601263 | 370 | 2,217 | 3.591892 | 0.210811 | 0.024078 | 0.054176 | 0.045147 | 0.991723 | 0.991723 | 0.991723 | 0.991723 | 0.991723 | 0.991723 | 0 | 0.041974 | 0.204781 | 2,217 | 124 | 81 | 17.879032 | 0.711855 | 0 | 0 | 0.977273 | 0 | 0.136364 | 0.985217 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
886bd9ecc5195a2517fc412453adc7f5369458fc | 62,616 | py | Python | syncope/__init__.py | dj-wasabi/python-syncope | 56d7b17e942af4f2d0c4be3783f6f81bba3ad20c | [
"Apache-2.0"
] | 2 | 2015-10-11T21:05:41.000Z | 2019-12-18T03:21:01.000Z | syncope/__init__.py | dj-wasabi/python-syncope | 56d7b17e942af4f2d0c4be3783f6f81bba3ad20c | [
"Apache-2.0"
] | null | null | null | syncope/__init__.py | dj-wasabi/python-syncope | 56d7b17e942af4f2d0c4be3783f6f81bba3ad20c | [
"Apache-2.0"
] | 3 | 2015-10-12T15:42:40.000Z | 2020-02-11T02:15:13.000Z | """python-syncope is an python wrapper for the Syncope Rest API."""
__author__ = 'Werner Dijkerman'
__version__ = '0.0.6'
__license__ = "Apache License 2.0"
__email__ = "ikben@werner-dijkerman.nl"
import requests
import json
class Syncope(object):
"""Syncope Rest Interface."""
def __init__(self, syncope_url='', username=None, password=None, timeout=10):
"""
Will initialize the syncope module.
:param syncope_url: the URL to the Syncope server.
:param username: The username to login.
:param password: The password for the user configured in username.
:param timeout: HTTP requests timeout in seconds.
"""
if not syncope_url:
raise ValueError('This interface needs an Syncope URL to work!')
if not username:
raise ValueError('This interface needs an username to work!')
if not password:
raise ValueError('This interface needs an password to work!')
self.syncope_url = syncope_url
self.headers = {'Content-Type': 'application/json'}
self.username = username
self.password = password
self.timeout = int(timeout)
self.rest_configurations = 'syncope/cxf/configurations'
self.cxf_account_policies = 'syncope/cxf/policies/account'
self.cxf_sync_policies = 'syncope/cxf/policies/sync'
self.cxf_password_policies = 'syncope/cxf/policies/password'
self.rest_account_policies = 'syncope/rest/policy/account'
self.rest_sync_policies = 'syncope/rest/policy/sync'
self.rest_password_policies = 'syncope/rest/policy/password'
self.rest_entitlements = 'syncope/cxf/entitlements'
self.rest_logging = 'syncope/cxf/logger/normal'
self.rest_log_audit = 'syncope/cxf/logger/audit'
self.rest_notifications = 'syncope/cxf/notifications'
self.rest_audit = 'syncope/cxf/audit'
self.rest_connectors = 'syncope/cxf/connectors'
self.rest_resources = 'syncope/cxf/resources'
self.rest_roles = 'syncope/cxf/roles'
self.rest_users = 'syncope/cxf/users'
def _get(self, rest_path, arguments=None):
"""Will GET the information from the syncope server. This function will be called from the actual actions.
:param rest_path: uri of the rest action.
:param arguments: Optional arguments.
:return: Returns the data in json from the GET request.
"""
if arguments is not None:
syncope_path = "{0}/{1}.json{2}".format(self.syncope_url, rest_path, arguments)
else:
syncope_path = "{0}/{1}.json".format(self.syncope_url, rest_path)
return requests.get(syncope_path, auth=(self.username, self.password), headers=self.headers, timeout=self.timeout)
def _get_xml(self, rest_path, arguments=None):
"""Will GET the information from the syncope server with XML. This function will be called from the actual actions.
:param rest_path: uri of the rest action.
:param arguments: Optional arguments.
:return: Returns the data in XML from the GET request.
"""
headers = {'Content-Type': 'application/xml'}
if arguments is not None:
syncope_path = "{0}/{1}{2}".format(self.syncope_url, rest_path, arguments)
else:
syncope_path = "{0}/{1}".format(self.syncope_url, rest_path)
return requests.get(syncope_path, auth=(self.username, self.password), headers=headers, timeout=self.timeout)
def _delete(self, rest_path, arguments=None):
"""Will DELETE the information from the syncope server. This function will be called from the actual actions.
:param rest_path: uri of the rest action.
:param arguments: Optional arguments.
:return: Returns the data in json (if any) from the DELETE request.
"""
syncope_path = "{0}/{1}.json".format(self.syncope_url, rest_path)
return requests.delete(syncope_path, auth=(self.username, self.password), headers=self.headers, data=arguments, timeout=self.timeout)
def _post(self, rest_path, arguments=None, params=None):
"""Will do an POST action for creating or to update the information from the syncope server. This function will be called from the actual actions.
:param rest_path: uri of the rest action.
:param arguments: Optional arguments in JSON format.
:param params: Optional parameters to the uri, like: ?username=something.
:return: Returns the data in json (if any)from the POST request.
"""
if arguments is None:
raise ValueError('No arguments are given to POST.')
if params is not None:
syncope_path = "{0}/{1}.json{2}".format(self.syncope_url, rest_path, params)
else:
syncope_path = "{0}/{1}.json".format(self.syncope_url, rest_path)
try:
data = requests.post(syncope_path, auth=(self.username, self.password), headers=self.headers, data=arguments, timeout=self.timeout)
except requests.exceptions.RequestException as e:
print e
return data
def _put(self, rest_path, arguments=None, params=None):
"""Will do an PUT action for creating or to update the information from the syncope server. This function will be called from the actual actions.
:param rest_path: uri of the rest action.
:param arguments: Optional arguments in JSON format.
:param params: Optional parameters to the uri, like: ?username=something.
:return: Returns the data in json (if any)from the POST request.
"""
if arguments is None:
raise ValueError('No arguments are given to PUT.')
if params is not None:
syncope_path = "{0}/{1}.json{2}".format(self.syncope_url, rest_path, params)
else:
syncope_path = "{0}/{1}.json".format(self.syncope_url, rest_path)
return requests.put(syncope_path, auth=(self.username, self.password), headers=self.headers, data=arguments, timeout=self.timeout)
def create_user(self, arguments):
"""Will create an user.
:param arguments: An JSON structure for creating the user. An example can be found in the 'examples' folder.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the just created user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_user = '{"attributes": [{"schema": "aLong","values": [],"readonly": false},{"schema": "activationDate","values": [""],"readonly": false},{"schema": "cool","values": ["false"],"readonly": false},{"schema": "email","values": ["werner@dj-wasabi.nl"],"readonly": false},{"schema": "firstname","values": ["Werner"],"readonly": false},{"schema": "fullname","values": ["Werner Dijkerman"],"readonly": false},{"schema": "gender","values": ["M"],"readonly": false},{"schema": "loginDate","values": [""],"readonly": false},{"schema": "makeItDouble","values": [],"readonly": false},{"schema": "surname","values": ["Dijkerman"],"readonly": false},{"schema": "type","values": ["account"],"readonly": false},{"schema": "uselessReadonly","values": [""],"readonly": true},{"schema": "userId","values": ["werner@dj-wasabi.nl"],"readonly": false}],"id": 0,"derivedAttributes": [{"schema": "cn","values": [],"readonly": false}],"virtualAttributes": [],"password": "password1234","status": null,"token": null,"tokenExpireTime": null,"username": "wedijkerman","lastLoginDate": null,"creationDate": null,"changePwdDate": null,"failedLogins": null}'
>>> print syn.create_users(create_user)
{u'status': u'active', u'username': u'wedijkerman', u'creationDate': 1444152747171, <cut>}
"""
data = self._post(self.rest_users, arguments)
if data.status_code == 201:
return data.json()
else:
return False
def update_user(self, arguments):
"""Will update an user.
:param arguments: An JSON structure for updating the user. An example can be found in the 'examples' folder.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the just updated user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_user = '{"id":137,"attributesToBeUpdated":[{"schema":"uselessReadonly","valuesToBeAdded":[],"valuesToBeRemoved":[]},{"schema":"loginDate","valuesToBeAdded":[],"valuesToBeRemoved":[]},{"schema":"activationDate","valuesToBeAdded":[],"valuesToBeRemoved":[]}],"attributesToBeRemoved":["aLong","makeItDouble"],"derivedAttributesToBeAdded":[],"derivedAttributesToBeRemoved":[],"virtualAttributesToBeUpdated":[],"virtualAttributesToBeRemoved":[],"resourcesToBeAdded":[],"resourcesToBeRemoved":[],"password":null,"username":"wdijkerman","membershipsToBeAdded":[],"membershipsToBeRemoved":[],"pwdPropRequest":{"resources":[],"onSyncope":false}}'
>>> print syn.update_user(update_user)
{u'status': u'active', u'username': u'wdijkerman', u'creationDate': 1444676322330, <cut>}
"""
data = self._post("/syncope/rest/user/update", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def get_users(self):
"""Get information from all users in JSON.
:return: False when something went wrong, or json data with all information from all users.
"""
data = self._get(self.rest_users)
if data.status_code == 200:
return data.json()
else:
return False
def get_user_by_id(self, id=None):
"""Will get all data from specific user, specified via id.
:param id: The id of the user to get information.
:type id: int
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_user_by_id(5)
{u'status': u'active', u'username': u'puccini', <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get(self.rest_users + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def get_users_by_query(self, arguments=None):
"""Will search an user. It will require an python dict to be used for the searching.
:param arguments: An JSON structure. See example for more information.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the search request.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> search_req = '{"type":"LEAF","attributableCond":{"type":"EQ","schema":"username","expression":"vivaldi"}}'
>>> print syn.get_users_by_query(search_req)
{u'status': u'active', u'username': u'vivaldi', <cut>}
>>> search_req = '{"type":"LEAF","resourceCond":{"resourceName":"ws-target-resource-1"}}'
>>> print syn.get_users_by_query(search_req)
{u'status': u'active', u'username': u'vivaldi', <cut>}
"""
if arguments is None:
raise ValueError('This search needs an dict to work!')
data = self._post(self.rest_users +"/search", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def get_user_count_by_query(self, arguments=None):
"""Will count the users matching the search request.
:param arguments: An JSON structure. See example for more information.
:type arguments: JSON
:return: False when something went wrong, or the amount of users matching the request.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> search_req = '{"type":"LEAF","attributableCond":{"type":"EQ","schema":"username","expression":"vivaldi"}}'
>>> print syn.get_user_count_by_query(search_req)
5
>>> search_req = '{"type":"LEAF","resourceCond":{"resourceName":"ws-target-resource-1"}}'
>>> print syn.get_user_count_by_query(search_req)
1
"""
if arguments is None:
raise ValueError('This search needs an dict to work!')
data = self._post(self.rest_users +"/search/count", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def get_paged_users_by_query(self, arguments=None, page=None, size=None):
"""Will search an user and will return the data by pages.
:param arguments: An JSON structure. See example for more information.
:type arguments: JSON
:param page: The page it should return.
:type page: int
:param size: The amount of results per page.
:type size: int
:return: False when something went wrong, or json data with all information from the search request.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> search_req = '{"type":"LEAF","attributableCond":{"type":"EQ","schema":"status","expression":"active"}}'
>>> print syn.get_paged_users_by_query(search_user, 1, 1)
{u'status': u'active', u'username': u'rossini', <cut>}
>>> print syn.get_paged_users_by_query(search_user, 3, 1)
{u'status': u'active', u'username': u'vivaldi', <cut>}
"""
if arguments is None:
raise ValueError('This search needs an JSON to work!')
if page is None:
raise ValueError('This search needs an page to work!')
if size is None:
raise ValueError('This search needs an size to work!')
data = self._post(self.rest_users +"/search", arguments, "?page=" + str(page) + "&size=" + str(size))
if data.status_code == 200:
return data.json()
else:
return False
def get_user_by_name(self, username=None):
"""Will get all data from specific user, specified via username.
:param username: The username of the user to get information.
:type username: string
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_user_by_name("puccini")
{u'status': u'active', u'username': u'puccini', <cut>}
"""
if username is None:
raise ValueError('This search needs an username to work!')
data = self._get(self.rest_users, "?username=" + str(username))
if data.status_code == 200:
return data.json()
else:
return False
def get_users_count(self):
"""Will count all users found in Syncope and return an number.
:return: False when something went wrong, or the amount of users.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_users_count()
5
"""
data = self._get(self.rest_users + "/count")
if data.status_code == 200:
return data.json()
else:
return False
def enable_user_by_id(self, id=None):
"""Will activate an user.
:param id: The id of the user to activate.
:type id: int
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.enable_user_by_id(1)
{u'status': u'active', u'username': u'rossini', <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._post(self.rest_users + "/" + str(id) + "/status/activate", '{}')
if data.status_code == 200:
return data.json()
else:
return False
def enable_user_by_name(self, username=None):
"""Will activate an user.
:param username: The username of the user to activate.
:type username: string
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.enable_user_by_name("rossini")
{u'status': u'active', u'username': u'rossini', <cut>}
"""
if username is None:
raise ValueError('This search needs an username to work!')
data = self._post(self.rest_users + "/activateByUsername/" + username, '{}')
if data.status_code == 200:
return data.json()
else:
return False
def reactivate_user_by_id(self, id=None):
"""Will reactivate an user.
:param id: The id of the user to reactivate.
:type id: int
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.reactivate_user_by_id(1)
{u'status': u'active', u'username': u'rossini', <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._post(self.rest_users + "/" + str(id) + "/status/reactivate", '{}')
if data.status_code == 200:
return data.json()
else:
return False
def reactivate_user_by_name(self, username=None):
"""Will reactivate an user.
:param username: The username of the user to reactivate.
:type username: string
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.reactivate_user_by_name("rossini")
{u'status': u'active', u'username': u'rossini', <cut>}
"""
if username is None:
raise ValueError('This search needs an username to work!')
data = self._post(self.rest_users + "/reactivateByUsername/" + username, '{}')
if data.status_code == 200:
return data.json()
else:
return False
def suspend_user_by_id(self, id=None):
"""Will suspend an user.
:param id: The id of the user to suspend.
:type id: int
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.suspend_user_by_id(1)
{u'status': u'suspended', u'username': u'rossini', <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._post(self.rest_users + "/" + str(id) + "/status/suspend", '{}')
if data.status_code == 200:
return data.json()
else:
return False
def suspend_user_by_name(self, username=None):
"""Will suspend an user.
:param username: The username of the user to suspend.
:type username: string
:return: False when something went wrong, or json data with all information from this specific user.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.suspend_user_by_("rossini")
{u'status': u'suspended', u'username': u'rossini', <cut>}
"""
if username is None:
raise ValueError('This search needs an username to work!')
data = self._post(self.rest_users + "/suspendByUsername/" + username, '{}')
if data.status_code == 200:
return data.json()
else:
return False
def delete_user_by_id(self, id=None):
"""Will delete an user.
:param id: The id of the user to delete.
:type id: int
:return: True when user is deleted, False when user don't exists or something failed.
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get("/syncope/rest/user/delete/" + str(id))
if data.status_code == 200:
return True
else:
return False
def get_roles(self):
"""Get information from all roles in JSON.
:return: False when something went wrong, or json data with all information from all roles.
"""
data = self._get(self.rest_roles)
if data.status_code == 200:
return data.json()
else:
return False
def get_role_by_id(self, id=None):
"""Will get all data from specific role, specified via id.
:param id: The id of the role to get information.
:type id: int
:return: False when something went wrong, or json data with all information from this specific role.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_role_by_id(2)
{u'inheritVirtualAttributes': False, u'inheritDerivedAttributes': False, u'roleOwner': None, u'name': u'child', u'parent': 1, <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get(self.rest_roles + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def get_parent_role_by_id(self, id=None):
"""Will get all data for the parent of the provided role id.
:param id: The id of the role to get the parent information.
:type id: int
:return: False when something went wrong, or json data with all information from this specific role.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_parent_role_by_id(2)
{u'inheritVirtualAttributes': False, u'inheritDerivedAttributes': False, u'roleOwner': None, u'name': u'root', u'parent': 0, <cut>}
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get(self.rest_roles + "/" + str(id) + "/parent")
if data.status_code == 200:
return data.json()
else:
return False
def get_children_role_by_id(self, id=None):
"""Will get all data for the parent of the provided role id.
:param id: The id of the role to get the parent information.
:type id: int
:return: False when something went wrong, or json data with all information from this specific role.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_children_role_by_id(4)
[{u'inheritVirtualAttributes': False, u'inheritDerivedAttributes': False, u'roleOwner': None, u'name': u'secretary', u'parent': 4, <cut>}]
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get(self.rest_roles + "/" + str(id) + "/children")
if data.status_code == 200:
return data.json()
else:
return False
def create_role(self, arguments=None):
"""Will create an role.
:param arguments: An JSON structure for creating the role. An example can be found in the 'examples' folder.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the just created role.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_role = '{"attributes":[{"schema":"icon","values":[],"readonly":false},{"schema":"rderived_dx","values":[],"readonly":false},{"schema":"rderived_sx","values":[],"readonly":false},{"schema":"show","values":["false"],"readonly":false},{"schema":"title","values":["My new attribute Title."],"readonly":false}],"id":0,"derivedAttributes":[],"virtualAttributes":[],"resources":["ws-target-resource-2","ws-target-resource-1"],"propagationStatusTOs":[],"name":"my_new_role","parent":1,"userOwner":null,"roleOwner":null,"inheritOwner":true,"inheritAttributes":false,"inheritDerivedAttributes":false,"inheritVirtualAttributes":false,"inheritPasswordPolicy":false,"inheritAccountPolicy":false,"entitlements":["CONFIGURATION_CREATE","CONFIGURATION_DELETE"],"passwordPolicy":4,"accountPolicy":6}'
>>> print syn.create_role(create_role)
{u'inheritVirtualAttributes': False, u'inheritDerivedAttributes': False, u'roleOwner': None, u'name': u'my_new_role', u'parent': 1, <cut>}
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
data = self._post(self.rest_roles, arguments)
if data.status_code == 201:
return data.json()
else:
return False
def delete_role_by_id(self, id=None):
"""Will delete an role.
:param id: The id of the role to delete.
:type id: int
:return: True when role is deleted, False when role don't exists or something failed.
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get("/syncope/rest/role/delete/" + str(id))
if data.status_code == 200:
return True
else:
return False
def update_role(self, arguments=None):
"""Will update an role.
:param arguments: An JSON structure for updating the role.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the just updated role.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_role = '{"id":102,"attributesToBeUpdated":[{"schema":"title","valuesToBeAdded":["My next new attribute Title."],"valuesToBeRemoved":["My new attribute Title."]}],"attributesToBeRemoved":["icon","rderived_sx","title","rderived_dx"],"derivedAttributesToBeAdded":[],"derivedAttributesToBeRemoved":[],"virtualAttributesToBeUpdated":[{"schema":"rvirtualdata","valuesToBeAdded":["virtual"],"valuesToBeRemoved":[]}],"virtualAttributesToBeRemoved":[],"resourcesToBeAdded":[],"resourcesToBeRemoved":["ws-target-resource-2"],"name":null,"userOwner":{"id":null},"roleOwner":{"id":null},"inheritOwner":true,"inheritAttributes":false,"inheritDerivedAttributes":false,"inheritVirtualAttributes":false,"inheritAccountPolicy":false,"inheritPasswordPolicy":false,"entitlements":["CONFIGURATION_CREATE","CONFIGURATION_DELETE","CONNECTOR_DELETE"],"passwordPolicy":{"id":4},"accountPolicy":{"id":6}}'
>>> print syn.update_role(update_role)
{u'inheritVirtualAttributes': False, u'inheritDerivedAttributes': False, u'roleOwner': None, u'name': u'my_new_role', u'parent': 1, <cut>}
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
data = self._post("/syncope/rest/role/update", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def get_log_levels(self):
"""Get information from all log levels in JSON.
:return: False when something went wrong, or json data with all information from all log levels.
"""
data = self._get(self.rest_logging)
if data.status_code == 200:
return data.json()
else:
return False
def get_log_level_by_name(self, name=None):
"""Get information for specific log level.
:param name: The name of the log level.
:type name: String
:return: False when something went wrong, or json data with information of the log level.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_log_level_by_name("ROOT")
{u'name': u'ROOT', u'level': u'OFF'}
"""
if name is None:
raise ValueError('This search needs log level name to work!')
data = self._get(self.rest_logging + "/" + name)
if data.status_code == 200:
return data.json()
else:
return False
def create_or_update_log_level(self, arguments=None):
"""Will create or update an log level.
:param arguments: An JSON structure for creating or updating the log level.
:type arguments: JSON
:return: False when something went wrong, or json data with all information from the just updated log level.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_loglevel = '{"name": "ROOT", "level": "INFO"}'
>>> print syn.create_or_update_log_level(update_loglevel)
{u'name': u'ROOT', u'level': u'INFO'}
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
json_data = json.loads(arguments)
if json_data:
log_name = json_data['name']
log_level = json_data['level']
else:
return False
data = self._post("syncope/rest/logger/log/" + log_name + "/" + log_level, arguments)
if data.status_code == 200:
return data.json()
else:
return False
def delete_log_level_by_name(self, name=None):
"""Will delete an log level by the name.
:param name: The name of the log level.
:type name: String
:return: False when something went wrong, or True when log level is deleted successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_log_level_by_name("SYNCOPE")
True
"""
if name is None:
raise ValueError('This search needs log level name to work!')
data = self._delete(self.rest_logging + "/" + name)
if data.status_code == 204:
return True
else:
return False
def get_audit(self):
"""Get information from all audit rules in JSON.
:return: False when something went wrong, or json data with information from all audit rule.
"""
data = self._get(self.rest_log_audit)
if data.status_code == 200:
return data.json()
else:
return False
def create_audit(self, arguments=None):
"""Will create an log level.
:param arguments: An JSON structure for creating the audit rule.
:type arguments: JSON
:return: False when something went wrong, or True when audit rule is created.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> add_audit_rule = '{"type":"REST","category":"LoggerController","subcategory":null,"event":"listAudits","result":"SUCCESS"}'
>>> print syn.create_audit(add_audit_rule)
True
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
data = self._put("syncope/rest/logger/audit/enable", arguments)
if data.status_code == 200:
return True
else:
return False
def delete_audit(self, arguments=None):
"""Will delete an audit rule.
:param arguments: An JSON structure for deleting the audit rule.
:type arguments: JSON
:return: False when something went wrong, True when delete of audit rule is successful.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> delete_audit_rule = '{"type":"REST","category":"LoggerController","subcategory":null,"event":"listAudits","result":"SUCCESS"}'
>>> print syn.delete_audit(delete_audit_rule)
True
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
data = self._put("syncope/rest/logger/audit/disable", arguments)
if data.status_code == 200:
return True
else:
return False
def get_configurations(self):
"""Will get all configured configuration options.
:return: False when something went wrong, or json data with all information from all configurations.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_configurations()
[{u'value': u'SHA1', u'key': u'password.cipher.algorithm'}, {u'value': u'not-existing', <cut>
"""
data = self._get(self.rest_configurations)
if data.status_code == 200:
return data.json()
else:
return False
def get_configuration_by_key(self, key=None):
"""Will get the info for specific configuration key.
:param key: The "key" name of the configuration item.
:type key: String
:return: False when something went wrong, or json data with all information from the configuration.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_configuration_by_key("password.cipher.algorithm")
{u'value': u'SHA1', u'key': u'password.cipher.algorithm'}
"""
if key is None:
raise ValueError('This search needs an configuration key to work!')
data = self._get(self.rest_configurations + "/" + key)
if data.status_code == 200:
return data.json()
else:
return False
def create_configuration(self, arguments=None):
"""Will create an configuration item.
:param arguments: An JSON structure for creating the configuration.
:type arguments: JSON
:return: False when something went wrong, or True when configuration is created.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> add_configuration = '{"value": true, "key": "we.all.love.pizza"}'
>>> print syn.create_configuration(add_configuration)
True
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
data = self._post(self.rest_configurations, arguments)
if data.status_code == 201:
return True
else:
return False
def update_configuration(self, arguments=None):
"""Will update the configuration.
:param arguments: An JSON structure for updating the configuration.
:type arguments: JSON
:return: False when something went wrong, or True when the configuration is updated.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_configuration = '{"value": false, "key": "we.all.love.pizza"}'
>>> print syn.update_configuration(update_configuration)
True
"""
if arguments is None:
raise ValueError('This search needs JSON data to work!')
json_data = json.loads(arguments)
if "key" in json_data:
config_key = json_data['key']
else:
return False
data = self._put(self.rest_configurations + "/" + config_key, arguments)
if data.status_code == 204:
return True
else:
return False
def delete_configuration_by_key(self, key=None):
"""Will delete an configuration item..
:param key: The "key" name of the configuration item.
:type key: JSON
:return: False when something went wrong, or True when configuration is deleted.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_configuration_by_key("we.all.love.pizza")
True
"""
if key is None:
raise ValueError('This search needs JSON data to work!')
data = self._delete(self.rest_configurations + "/" + key)
if data.status_code == 204:
return True
else:
return False
def get_configuration_validators(self):
"""Will get the info for the configuration validators.
:return: False when something went wrong, or json data with all information from the validators.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_configuration_validators()
[{u'name': u'org.apache.syncope.core.persistence.validation.attrvalue.AlwaysTrueValidator'}, <cut>
"""
data = self._get(self.rest_configurations + "/validators")
if data.status_code == 200:
return data.json()
else:
return False
def get_configuration_mailtemplates(self):
"""Will get the info for the mailtemplates.
:return: False when something went wrong, or json data with all information about the mailtemplates.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_configuration_mailtemplates()
[{u'name': u'optin'}]
"""
data = self._get(self.rest_configurations + "/mailTemplates")
if data.status_code == 200:
return data.json()
else:
return False
def get_configuration_stream(self):
"""Returns configuration as an downloadable content.xml database export file.
:return: False when something went wrong, or XML data.
>>> import xml.etree.ElementTree as ET
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> response = syn.get_configuration_stream()
>>> tree = ET.parse(response.text)
>>> root = tree.getroot()
>>> print tree
"""
data = self._get_xml(self.rest_configurations + "/stream")
if data.status_code == 200:
return data
else:
return False
def get_entitlements(self):
"""Will return a list of all known entitlements.
:return: False when something went wrong, or json data with all know entitlements.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_entitlements()
[{u'name': u'NOTIFICATION_UPDATE'}, {u'name': u'SCHEMA_CREATE'}, <cut>
"""
data = self._get(self.rest_entitlements)
if data.status_code == 200:
return data.json()
else:
return False
def get_own_entitlements(self):
"""Will return a list of all known entitlements.
:return: False when something went wrong, or json data with all know entitlements.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_own_entitlements()
[{u'name': u'NOTIFICATION_UPDATE'}, {u'name': u'SCHEMA_CREATE'}, <cut>
"""
data = self._get(self.rest_entitlements + "/own")
if data.status_code == 200:
return data.json()
else:
return False
def get_notifications(self):
"""Will return a list of all notifications.
:return: False when something went wrong, or json data with all notifications.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_notifications()
[{u'recipientAttrType': u'UserSchema', u'about': {u'membershipCond': None, <cut>
"""
data = self._get(self.rest_notifications)
if data.status_code == 200:
return data.json()
else:
return False
def get_notification_by_id(self, id=None):
"""Will return information for notification by id.
:param id: The notification ID.
:type id: Int.
:return: False when something went wrong, or json data with all notifications.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_notification_by_id(1)
[{u'recipientAttrType': u'UserSchema', u'about': {u'membershipCond': None, <cut>
"""
if id is None:
raise ValueError('This search needs an id to work!')
data = self._get(self.rest_notifications + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def create_notification(self, arguments=None):
"""Get create an notification.
:param arguments: An JSON structure for creating the notification.
:type arguments: JSON
:return: False when something went wrong, or True when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_notification = '{"events":["[REST]:[LoggerController]:[]:[deleteLog]:[SUCCESS]","[REST]:[LoggerController]:[]:[disableAudit]:[SUCCESS]"],"recipientAttrType":"Username","recipientAttrName":"Username","selfAsRecipient":true,"sender":"me@home.nl","subject":"this is something very important","template":"optin","traceLevel":"FAILURES"}'
>>> print syn.create_notification(create_notification)
True
"""
if arguments is None:
raise ValueError('This search needs an JSON to work!')
data = self._post(self.rest_notifications, arguments)
if data.status_code == 201:
return True
else:
return False
def update_notification_by_id(self, arguments=None):
"""Get information for specific log level.
:param arguments: An JSON structure for updating the notification.
:type arguments: JSON
:return: False when something went wrong, or True when updated successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_notification = '{"id":104,"events":["[REST]:[LoggerController]:[]:[deleteLog]:[SUCCESS]"],"about":null,"recipients":null,"recipientAttrType":"Username","recipientAttrName":"Username","selfAsRecipient":true,"sender":"me@home.nl","subject":"this is something very important","template":"optin","traceLevel":"FAILURES"}'
>>> print syn.update_notification_by_id(update_notification)
True
"""
if arguments is None:
raise ValueError('This search needs an JSON to work!')
data = self._post("syncope/rest/notification/update", arguments)
if data.status_code == 200:
return True
else:
return False
def delete_notification_by_id(self, id=None):
"""Get information for specific log level.
:param name: The id for the notification.
:type name: Int
:return: False when something went wrong, or True when deleted successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_notification_by_id(104)
True
"""
if id is None:
raise ValueError('This search needs an JSON to work!')
data = self._delete(self.rest_notifications + "/" + str(id))
if data.status_code == 204:
return True
else:
return False
def get_account_policies(self):
"""Will return a list of account policies.
:return: False when something went wrong, or json data with all account policies.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_account_policies()
[{u'usedByResources': [], u'description': u'sample account policy', <cut>
"""
data = self._get(self.cxf_account_policies)
if data.status_code == 200:
return data.json()
else:
return False
def get_account_policy_by_id(self, id=None):
"""Will return information with account policy for id.
:return: False when something went wrong, or json data with information for account policie.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_account_policy_by_id(5)
{u'usedByResources': [u'ws-target-resource-1', u'ws-target-resource-2', u'ws-target-resource-timeout', <cut>
"""
if id is None:
raise ValueError('This needs an ID to work!')
data = self._get(self.cxf_account_policies + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def create_account_policy(self, arguments=None):
"""Will create an account policy.
:param arguments: An JSON structure for creating the account policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_account_policy = '{"description":"My Description","type":"ACCOUNT","usedByResources":[],"usedByRoles":[],"specification":{"maxLength":0,"minLength":0,"pattern":null,"wordsNotPermitted":[],"schemasNotPermitted":["firstname","email"],"prefixesNotPermitted":[],"suffixesNotPermitted":[],"allUpperCase":false,"allLowerCase":false,"propagateSuspension":false,"permittedLoginRetries":0}}'
>>> print syn.create_account_policy(create_account_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This create needs an JSON to work!')
data = self._post(self.rest_account_policies + "/create", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def update_account_policy(self, arguments=None):
"""Will update an account policy.
:param arguments: An JSON structure for updating the account policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when updated successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_account_policy = '{"id":1001,"description":"My Description 2","type":"ACCOUNT","usedByResources":[],"usedByRoles":[],"specification":{"maxLength":0,"minLength":0,"pattern":null,"wordsNotPermitted":[],"schemasNotPermitted":["firstname"],"prefixesNotPermitted":[],"suffixesNotPermitted":[],"allUpperCase":false,"allLowerCase":false,"propagateSuspension":false,"permittedLoginRetries":0}}'
>>> print syn.create_account_policy(update_account_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This update needs an JSON to work!')
data = self._post(self.rest_account_policies + "/update", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def delete_account_policy(self, id=None):
"""Will delete an account policy.
:param id: The account policy id.
:type id: Int
:return: False when something went wrong, or True when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_account_policy(1001)
True
"""
if id is None:
raise ValueError('This delete needs an id to work!')
data = self._delete(self.cxf_account_policies + "/" + str(id))
if data.status_code == 204:
return True
else:
return False
def get_sync_policies(self):
"""Will return a list of sync policies.
:return: False when something went wrong, or json data with all sync policies.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_account_policies()
[{u'usedByResources': [u'resource-csv'], u'description': u'sync policy 2' <cut>
"""
data = self._get(self.cxf_sync_policies)
if data.status_code == 200:
return data.json()
else:
return False
def get_sync_policy_by_id(self, id=None):
"""Will return information with sync policy for id.
:return: False when something went wrong, or json data with information for sync policy.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_sync_policy_by_id(9)
{u'usedByResources': [u'ws-target-resource-2'], u'description': u'sync policy for java rule', <cut>
"""
if id is None:
raise ValueError('This needs an ID to work!')
data = self._get(self.cxf_account_policies + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def create_sync_policy(self, arguments=None):
"""Will create an sync policy.
:param arguments: An JSON structure for creating the sync policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_sync_policy = '{"description":"My First Sync","type":"SYNC","usedByResources":[],"usedByRoles":[],"specification":{"uAltSearchSchemas":["loginDate"],"userJavaRule":null,"rAltSearchSchemas":[],"roleJavaRule":null,"conflictResolutionAction":"FIRSTMATCH"}}'
>>> print syn.create_sync_policy(create_sync_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This create needs an JSON to work!')
data = self._post(self.rest_sync_policies + "/create", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def update_sync_policy(self, arguments=None):
"""Will update an sync policy.
:param arguments: An JSON structure for updating the account policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when updated successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_sync_policy = '{"id":1001, "description":"My First Sync","type":"SYNC","usedByResources":[],"usedByRoles":[],"specification":{"uAltSearchSchemas":["loginDate"],"userJavaRule":null,"rAltSearchSchemas":[],"roleJavaRule":null,"conflictResolutionAction":"FIRSTMATCH"}}'
>>> print syn.create_sync_policy(update_sync_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This update needs an JSON to work!')
data = self._post(self.rest_sync_policies + "/update", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def delete_sync_policy(self, id=None):
"""Will delete an account policy.
:param id: The account policy id.
:type id: Int
:return: False when something went wrong, or True when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_sync_policy(1001)
True
"""
if id is None:
raise ValueError('This delete needs an id to work!')
data = self._delete(self.cxf_sync_policies + "/" + str(id))
if data.status_code == 204:
return True
else:
return False
def get_password_policies(self):
"""Will return a list of password policies.
:return: False when something went wrong, or json data with all password policies.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_account_policies()
[{u'usedByResources': [u'resource-csv'], u'description': u'password policy 2' <cut>
"""
data = self._get(self.cxf_password_policies)
if data.status_code == 200:
return data.json()
else:
return False
def get_password_policy_by_id(self, id=None):
"""Will return information with password policy for id.
:return: False when something went wrong, or json data with information for password policy.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.get_password_policy_by_id(9)
{u'usedByResources': [u'ws-target-resource-2'], u'description': u'password policy for java rule', <cut>
"""
if id is None:
raise ValueError('This needs an ID to work!')
data = self._get(self.cxf_account_policies + "/" + str(id))
if data.status_code == 200:
return data.json()
else:
return False
def create_password_policy(self, arguments=None):
"""Will create an password policy.
:param arguments: An JSON structure for creating the password policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> create_password_policy = '{"description":"My First Sync","type":"SYNC","usedByResources":[],"usedByRoles":[],"specification":{"uAltSearchSchemas":["loginDate"],"userJavaRule":null,"rAltSearchSchemas":[],"roleJavaRule":null,"conflictResolutionAction":"FIRSTMATCH"}}'
>>> print syn.create_password_policy(create_password_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This create needs an JSON to work!')
data = self._post(self.rest_password_policies + "/create", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def update_password_policy(self, arguments=None):
"""Will update an password policy.
:param arguments: An JSON structure for updating the account policy.
:type arguments: JSON
:return: False when something went wrong, or JSON data when updated successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> update_password_policy = '{"id":1001, "description":"My First Sync","type":"SYNC","usedByResources":[],"usedByRoles":[],"specification":{"uAltSearchSchemas":["loginDate"],"userJavaRule":null,"rAltSearchSchemas":[],"roleJavaRule":null,"conflictResolutionAction":"FIRSTMATCH"}}'
>>> print syn.create_password_policy(update_password_policy)
{u'usedByResources': [], u'description': u'My Description', u'specification': { <cut>
"""
if arguments is None:
raise ValueError('This update needs an JSON to work!')
data = self._post(self.rest_password_policies + "/update", arguments)
if data.status_code == 200:
return data.json()
else:
return False
def delete_password_policy(self, id=None):
"""Will delete an account policy.
:param id: The account policy id.
:type id: Int
:return: False when something went wrong, or True when created successfully.
:Example:
>>> import syncope
>>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
>>> print syn.delete_password_policy(1001)
True
"""
if id is None:
raise ValueError('This delete needs an id to work!')
data = self._delete(self.cxf_password_policies + "/" + str(id))
if data.status_code == 204:
return True
else:
return False
# def get_resources(self):
# """Will search an user and will return the data by pages.
#
# :return: False when something went wrong, or json data with all information from all resources.
# :Example:
#
# >>> import syncope
# >>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
# >>> print syn.get_resources()
# [{u'rmapping': None, u'randomPwdIfNotProvided': False, u'propagationPrimary': True, u'enforceMandatoryCondition': False <cut>
# """
# data = self._get(self.rest_resources)
#
# if data.status_code == 200:
# return data.json()
# else:
# return False
#
# def create_resource(self, arguments=None):
# """Will search an user and will return the data by pages.
#
# :return: False when something went wrong, or json data with all information from all resources.
# :Example:
#
# >>> import syncope
# >>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
# >>> print syn.get_resources()
# [{u'rmapping': None, u'randomPwdIfNotProvided': False, u'propagationPrimary': True, u'enforceMandatoryCondition': False <cut>
# """
# data = self._post(self.rest_resources, arguments)
#
# if data.status_code == 200:
# return data.json()
# else:
# return False
#
# def create_connector(self, arguments=None):
# """Will search an user and will return the data by pages.
#
# :return: False when something went wrong, or json data with all information from all resources.
# :Example:
#
# >>> import syncope
# >>> syn = syncope.Syncope(syncope_url="http://192.168.10.13:9080", username="admin", password="password")
# >>> print syn.get_resources()
# [{u'rmapping': None, u'randomPwdIfNotProvided': False, u'propagationPrimary': True, u'enforceMandatoryCondition': False <cut>
# """
# data = self._post(self.rest_connectors, arguments)
#
# if data.status_code == 200:
# return data.json()
# else:
# return False
| 42.480326 | 1,145 | 0.6208 | 7,498 | 62,616 | 5.086423 | 0.054014 | 0.036342 | 0.025565 | 0.02643 | 0.835282 | 0.810792 | 0.785621 | 0.758456 | 0.734491 | 0.716582 | 0 | 0.024112 | 0.250224 | 62,616 | 1,473 | 1,146 | 42.509165 | 0.788234 | 0.031541 | 0 | 0.659774 | 0 | 0 | 0.12728 | 0.026604 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.037594 | 0.003759 | null | null | 0.00188 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
31f082cf6a2710e573251d2496c0356543ffd89c | 1,239 | py | Python | simrd/tests/test_runtime.py | uwsampl/dtr-prototype | eff53cc4804cc7d6246a6e5086861ce2b846f62b | [
"Linux-OpenIB"
] | 90 | 2020-06-18T05:32:06.000Z | 2022-03-28T13:05:17.000Z | simrd/tests/test_runtime.py | merrymercy/dtr-prototype | bf40e182453a7d8d23581ea68f32a9d7d2037d62 | [
"Linux-OpenIB"
] | 5 | 2020-07-02T02:25:16.000Z | 2022-03-24T05:50:30.000Z | simrd/tests/test_runtime.py | uwsampl/dtr-prototype | eff53cc4804cc7d6246a6e5086861ce2b846f62b | [
"Linux-OpenIB"
] | 13 | 2020-06-27T07:01:54.000Z | 2022-01-18T07:31:01.000Z | from simrd.runtime import *
from simrd.heuristic import DTRUnopt
OP1 = Operator(2, (1,), (-1,), name='op1')
OP2 = Operator(2, (1,1), (-1,-1), name='op2')
OPA1 = Operator(1, (0,), (0,), name='opa1')
def test_simple_V1():
rt = RuntimeV1(math.inf, DTRUnopt)
(x,) = rt.compute([], OP1)
assert x.storage.material and x.defined
assert x.storage.ref_ext == 1 and x.storage.ref_int == 0
assert x.storage.root_id == x.id
(y,) = rt.compute([x], OP1)
assert x.storage.material and x.defined
assert y.storage.material and y.defined
assert x.storage.ref_ext == 1 and x.storage.ref_int == 0
assert y.storage.ref_ext == 1 and y.storage.ref_int == 0
assert x in y.parents
assert y.storage.root_id == y.id
def test_simple_V2():
rt = RuntimeV2(math.inf, DTRUnopt)
(x,) = rt.compute([], OP1)
assert x.storage.material and x.defined
assert x.storage.ref_ext == 1 and x.storage.ref_int == 0
assert x.storage.root_id == x.id
(y,) = rt.compute([x], OP1)
assert x.storage.material and x.defined
assert y.storage.material and y.defined
assert x.storage.ref_ext == 1 and x.storage.ref_int == 0
assert y.storage.ref_ext == 1 and y.storage.ref_int == 0
assert x in y.parents
assert y.storage.root_id == y.id
| 28.813953 | 58 | 0.673931 | 220 | 1,239 | 3.704545 | 0.177273 | 0.137423 | 0.171779 | 0.103067 | 0.78773 | 0.78773 | 0.78773 | 0.78773 | 0.78773 | 0.78773 | 0 | 0.035922 | 0.168684 | 1,239 | 42 | 59 | 29.5 | 0.75534 | 0 | 0 | 0.709677 | 0 | 0 | 0.008071 | 0 | 0 | 0 | 0 | 0 | 0.580645 | 1 | 0.064516 | false | 0 | 0.064516 | 0 | 0.129032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ee1647bdb95ac27c3c764da1818a0f554216db61 | 8,723 | py | Python | authors/tests/test_articles.py | andela/ah-the-unsullied | 7821c96f16880fb5c3cbd2d51fbd43669c2ee7fd | [
"BSD-3-Clause"
] | 7 | 2019-01-15T16:11:27.000Z | 2019-02-26T19:56:29.000Z | authors/tests/test_articles.py | andela/ah-the-unsullied | 7821c96f16880fb5c3cbd2d51fbd43669c2ee7fd | [
"BSD-3-Clause"
] | 14 | 2019-01-07T17:00:00.000Z | 2019-02-05T09:58:02.000Z | authors/tests/test_articles.py | andela/ah-the-unsullied | 7821c96f16880fb5c3cbd2d51fbd43669c2ee7fd | [
"BSD-3-Clause"
] | 2 | 2019-02-24T05:50:28.000Z | 2019-09-17T10:28:15.000Z | import json
from rest_framework.views import status
from django.urls import reverse
from authors.apps.articles.response_messages import error_messages, \
success_messages
# local imports
from .base_test import TestBase
class TestArticles(TestBase):
def test_create_valid_article(self):
""" Test create an article with valid data """
token = self.authentication_token()
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
self.client.get(self.get_verify_url(self.user_data))
response = self.client.post(
self.article_url,
data=json.dumps(self.valid_article_data),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_article_unverified_user(self):
""" Test create an article with valid data """
self.create_article()
token = self.authentication_token_2()
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
response = self.client.post(
self.article_url,
data=json.dumps(self.valid_article_data),
content_type='application/json'
)
self.assertIn(response.data, error_messages['email_verification'])
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_invalid_article(self):
""" Test create an article with invalid data """
token = self.authentication_token()
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
self.client.get(self.get_verify_url(self.user_data))
response = self.client.post(
self.article_url,
data=json.dumps(self.invalid_article_data),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_article_unauthorized(self):
""" Test create an article as an unauthorized user"""
self.client.get(self.get_verify_url(self.user_data))
response = self.client.post(
self.article_url,
data=json.dumps(self.invalid_article_data),
content_type='application/json'
)
self.assertIn(error_messages['authentication'],
response.data['detail'])
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_all_articles(self):
"""Test to get all articles """
self.create_article()
self.client.get(self.get_verify_url(self.user_data))
response = self.client.get(
self.article_url,
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_unauthorized_get(self):
response = self.client.get(
self.article_url,
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_single_article(self):
"""Test to get a single article """
slug = self.create_article().data['slug']
self.client.get(self.get_verify_url(self.user_data))
response = self.client.get(
reverse(
'articles:detail_article',
kwargs={'slug': slug}
),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_nonexistent_single_article(self):
"""Test to get a nonexistent article """
self.create_article()
self.client.get(self.get_verify_url(self.user_data))
response = self.client.get(
reverse(
'articles:detail_article',
kwargs={'slug': "hshshsh"},
),
content_type='application/json'
)
self.assertEqual(error_messages['article_404'], response.data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_article(self):
"""Test updating an article """
slug = self.create_article().data['slug']
self.client.get(self.get_verify_url(self.user_data))
response = self.client.put(
reverse(
'articles:detail_article',
kwargs={'slug': slug},
),
data=json.dumps(self.invalid_article_data),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_nonexistent_article(self):
"""Test updating a nonexistent article """
self.create_article()
self.client.get(self.get_verify_url(self.user_data))
response = self.client.put(
reverse(
'articles:detail_article',
kwargs={'slug': "hshshs"},
),
data=json.dumps(self.invalid_article_data),
content_type='application/json'
)
self.assertIn(error_messages['article_404'], response.data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_article(self):
"""Test deleting an article """
slug = self.create_article().data['slug']
self.client.get(self.get_verify_url(self.user_data))
response = self.client.delete(
reverse(
'articles:detail_article',
kwargs={'slug': slug},
),
content_type='application/json'
)
self.assertIn(success_messages['deleted'], response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_nonexistent_article(self):
"""Test delete a non-existing article """
slug = self.create_article().data['slug']
self.client.get(self.get_verify_url(self.user_data))
response = self.client.delete(
reverse(
'articles:detail_article',
kwargs={'slug': "hshshs"},
),
content_type='application/json'
)
self.assertIn(error_messages['article_404'], response.data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_article_none_author(self):
""" Test delete if article is not owner """
slug = self.create_article().data['slug']
token = self.authentication_token_2()
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
response = self.client.delete(
reverse(
'articles:detail_article',
kwargs={'slug': slug},
),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(response.data, error_messages['unauthorised'])
def test_update_article_none_author(self):
""" Test update if article is not owner """
slug = self.create_article().data['slug']
token = self.authentication_token_2()
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
response = self.client.put(
reverse(
'articles:detail_article',
kwargs={'slug': slug},
),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(response.data, error_messages['unauthorised'])
def test_update_article_null_data(self):
"""Test updating an article with null inputs"""
slug = self.create_article().data['slug']
self.client.get(self.get_verify_url(self.user_data))
response = self.client.put(
reverse(
'articles:detail_article',
kwargs={'slug': slug},
),
data=json.dumps(self.null_article_data),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(response.data, error_messages['null_update'])
def test_get_article_returns_article_read_time(self):
"""
Tests an article read time is returned
"""
# create the article
slug = self.create_article().data['slug']
# get the article
self.client.get(self.get_verify_url(self.user_data))
response = self.client.get(
reverse(
'articles:detail_article',
kwargs={'slug': slug}
),
content_type='application/json'
)
self.assertEqual(response.data['read_time'], '1 min read')
self.assertEqual(response.status_code, status.HTTP_200_OK)
| 38.427313 | 76 | 0.623524 | 964 | 8,723 | 5.400415 | 0.108921 | 0.063388 | 0.042451 | 0.079908 | 0.848828 | 0.82962 | 0.808298 | 0.781022 | 0.766423 | 0.754898 | 0 | 0.009582 | 0.270205 | 8,723 | 226 | 77 | 38.597345 | 0.8082 | 0.067637 | 0 | 0.735135 | 0 | 0 | 0.092641 | 0.028639 | 0 | 0 | 0 | 0 | 0.140541 | 1 | 0.086486 | false | 0 | 0.027027 | 0 | 0.118919 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ee307fad8d7bf66cb2f069e41bbbe724fe351e67 | 7,517 | py | Python | api/users/tests.py | jungjun-Sung/BToolTek_Project | b1e3d79d4332f00922541ef8b9db5c6a7b5a13e2 | [
"MIT"
] | null | null | null | api/users/tests.py | jungjun-Sung/BToolTek_Project | b1e3d79d4332f00922541ef8b9db5c6a7b5a13e2 | [
"MIT"
] | null | null | null | api/users/tests.py | jungjun-Sung/BToolTek_Project | b1e3d79d4332f00922541ef8b9db5c6a7b5a13e2 | [
"MIT"
] | null | null | null | import json
from django.contrib.auth import authenticate
from django.contrib.auth import get_user_model
from rest_framework.test import APITestCase
from rest_framework_jwt.settings import api_settings
User = get_user_model()
class SignUpTest(APITestCase):
signup_url = "http://localhost:8000/users/signup"
def test_signupview_post_user_registration_success(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(201, response.status_code)
def test_signupview_post_invalid_password(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "1234",
"name" : "testuser",
"department" : "4",
"job_title" : "5"
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_duplicated_employee_number(self):
exist_user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, exist_user_data)
self.assertEqual(201, response.status_code)
new_user_data = {
"employee_number" : "testuser",
"phone_number" : "0000000000001",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, new_user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_duplicated_phone_number(self):
exist_user_data = {
"employee_number" : "testuser1",
"phone_number" : "00",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, exist_user_data)
self.assertEqual(201, response.status_code)
new_user_data = {
"employee_number" : "testuser2",
"phone_number" : "00",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, new_user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_employee_number(self):
user_data = {
"phone_number" : "000000000000",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_phone_number(self):
user_data = {
"employee_number" : "testuser",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_password(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"name" : "testuser",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_name(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "12345678",
"department" : 4,
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_department(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "12345678",
"name" : "testuser",
"job_title" : 5
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signupview_post_keyerror_job_title(self):
user_data = {
"employee_number" : "testuser",
"phone_number" : "000000000000",
"password" : "12345678",
"name" : "testuser",
"department" : 4,
}
response = self.client.post(self.signup_url, data=user_data)
self.assertEqual(400, response.status_code)
JWT_PAYLOAD_HANDLER = api_settings.JWT_PAYLOAD_HANDLER
JWT_ENCODE_HANDLER = api_settings.JWT_ENCODE_HANDLER
class SignInTest(APITestCase):
signin_url = "http://localhost:8000/users/signin"
def setUp(self):
self.user = User.objects.create_user(
employee_number = "testuser",
phone_number = "000000000000",
password = "12345678",
name = "testuser",
department = 4,
job_title = 5)
self.authentication = authenticate(
employee_number = "testuser",
phone_number = "000000000000",
password = "12345678",
name = "testuser",
)
def tearDown(self):
self.user.delete()
def test_signinview_post_success(self):
user_data = {
"employee_number" : "testuser",
"password" : "12345678"
}
payload = JWT_PAYLOAD_HANDLER(self.authentication)
jwt_token = JWT_ENCODE_HANDLER(payload)
response = self.client.post(self.signin_url, data=user_data)
self.assertEqual(200, response.status_code)
self.assertTrue(response.json(),
{
"message": "SUCCESS",
"employee_name": "testuser",
"token": jwt_token
}
)
def test_signinview_post_password_does_not_match(self):
user_data = {
"employee_number" : "testuser",
"password" : "00"
}
response = self.client.post(self.signin_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signinview_post_user_does_not_exists(self):
user_data = {
"employee_number" : "doesnotexist",
"password" : "12345678"
}
response = self.client.post(self.signin_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signinview_post_keyerror_employee_number(self):
user_data = {
"password" : "12345678"
}
response = self.client.post(self.signin_url, data=user_data)
self.assertEqual(400, response.status_code)
def test_signinview_post_keyerror_password(self):
user_data = {
"employee_number" : "testuser",
}
response = self.client.post(self.signin_url, data=user_data)
self.assertEqual(400, response.status_code) | 33.261062 | 69 | 0.58361 | 747 | 7,517 | 5.583668 | 0.111111 | 0.065212 | 0.073364 | 0.089667 | 0.812515 | 0.781347 | 0.781347 | 0.733397 | 0.708223 | 0.698154 | 0 | 0.066769 | 0.306638 | 7,517 | 226 | 70 | 33.261062 | 0.7335 | 0 | 0 | 0.639175 | 0 | 0 | 0.179436 | 0 | 0 | 0 | 0 | 0 | 0.092784 | 1 | 0.087629 | false | 0.108247 | 0.025773 | 0 | 0.134021 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
c9a12d8ac6a82f5bb7c10105cb277cac2354beca | 7,269 | py | Python | tests/conftest.py | Chandru0001/qp-driver | 8c542c4ba3873c787feb41e16f7810e06c2f22fc | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | tests/conftest.py | Chandru0001/qp-driver | 8c542c4ba3873c787feb41e16f7810e06c2f22fc | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | tests/conftest.py | Chandru0001/qp-driver | 8c542c4ba3873c787feb41e16f7810e06c2f22fc | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | import pytest
@pytest.fixture
def ue_metrics():
return {
"UEID": "12345",
"ServingCellID": "310-680-200-555002",
"MeasTimestampUEPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodUEPDCPBytes": 20,
"UEPDCPBytesDL": 250000,
"UEPDCPBytesUL": 100000,
"MeasTimestampUEPRBUsage": "2020-03-18 02:23:18.220",
"MeasPeriodUEPRBUsage": 20,
"UEPRBUsageDL": 10,
"UEPRBUsageUL": 30,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"ServingCellRF": {"RSRP": -115, "RSRQ": -16, "RSSINR": -5},
"NeighborCellRF": [
{"CID": "310-680-200-555001", "CellRF": {"RSRP": -90, "RSRQ": -13, "RSSINR": -2.5}},
{"CID": "310-680-200-555003", "CellRF": {"RSRP": -140, "RSRQ": -17, "RSSINR": -6}},
],
"FAKE_BAD_DATA_TEST": "THIS SHOULD GET DELETED",
}
@pytest.fixture
def ue_metrics_with_bad_cell():
return {
"UEID": "8675309",
"ServingCellID": "310-680-200-555002",
"MeasTimestampUEPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodUEPDCPBytes": 20,
"UEPDCPBytesDL": 250000,
"UEPDCPBytesUL": 100000,
"MeasTimestampUEPRBUsage": "2020-03-18 02:23:18.220",
"MeasPeriodUEPRBUsage": 20,
"UEPRBUsageDL": 10,
"UEPRBUsageUL": 30,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"ServingCellRF": {"RSRP": -115, "RSRQ": -16, "RSSINR": -5},
"NeighborCellRF": [
{"CID": "310-680-200-555001", "CellRF": {"RSRP": -90, "RSRQ": -13, "RSSINR": -2.5}},
{"CID": "CANTTOUCHTHIS", "CellRF": {"RSRP": -140, "RSRQ": -17, "RSSINR": -6}},
],
}
@pytest.fixture
def cell_metrics_1():
return {
"CellID": "310-680-200-555001",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 2000000,
"PDCPBytesUL": 1200000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 50,
}
@pytest.fixture
def cell_metrics_2():
return {
"CellID": "310-680-200-555002",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 800000,
"PDCPBytesUL": 400000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 45,
"FAKE_BAD_DATA_TEST": "THIS SHOULD GET DELETED",
}
@pytest.fixture
def cell_metrics_3():
return {
"CellID": "310-680-200-555003",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 1900000,
"PDCPBytesUL": 1000000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 60,
"AvailPRBUL": 80,
}
@pytest.fixture
def qpd_to_qp():
return {
"PredictionUE": "12345",
"UEMeasurements": {
"ServingCellID": "310-680-200-555002",
"MeasTimestampUEPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodUEPDCPBytes": 20,
"UEPDCPBytesDL": 250000,
"UEPDCPBytesUL": 100000,
"MeasTimestampUEPRBUsage": "2020-03-18 02:23:18.220",
"MeasPeriodUEPRBUsage": 20,
"UEPRBUsageDL": 10,
"UEPRBUsageUL": 30,
},
"CellMeasurements": [
{
"CellID": "310-680-200-555001",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 2000000,
"PDCPBytesUL": 1200000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 50,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"RFMeasurements": {"RSRP": -90, "RSRQ": -13, "RSSINR": -2.5},
},
{
"CellID": "310-680-200-555003",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 1900000,
"PDCPBytesUL": 1000000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 60,
"AvailPRBUL": 80,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"RFMeasurements": {"RSRP": -140, "RSRQ": -17, "RSSINR": -6},
},
{
"CellID": "310-680-200-555002",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 800000,
"PDCPBytesUL": 400000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 45,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"RFMeasurements": {"RSRP": -115, "RSRQ": -16, "RSSINR": -5},
},
],
}
@pytest.fixture
def qpd_to_qp_bad_cell():
return {
"PredictionUE": "8675309",
"UEMeasurements": {
"ServingCellID": "310-680-200-555002",
"MeasTimestampUEPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodUEPDCPBytes": 20,
"UEPDCPBytesDL": 250000,
"UEPDCPBytesUL": 100000,
"MeasTimestampUEPRBUsage": "2020-03-18 02:23:18.220",
"MeasPeriodUEPRBUsage": 20,
"UEPRBUsageDL": 10,
"UEPRBUsageUL": 30,
},
"CellMeasurements": [
{
"CellID": "310-680-200-555001",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 2000000,
"PDCPBytesUL": 1200000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 50,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"RFMeasurements": {"RSRP": -90, "RSRQ": -13, "RSSINR": -2.5},
},
{
"CellID": "310-680-200-555002",
"MeasTimestampPDCPBytes": "2020-03-18 02:23:18.220",
"MeasPeriodPDCPBytes": 20,
"PDCPBytesDL": 800000,
"PDCPBytesUL": 400000,
"MeasTimestampAvailPRB": "2020-03-18 02:23:18.220",
"MeasPeriodAvailPRB": 20,
"AvailPRBDL": 30,
"AvailPRBUL": 45,
"MeasTimestampRF": "2020-03-18 02:23:18.210",
"MeasPeriodRF": 40,
"RFMeasurements": {"RSRP": -115, "RSRQ": -16, "RSSINR": -5},
},
],
}
| 35.458537 | 96 | 0.507222 | 648 | 7,269 | 5.654321 | 0.146605 | 0.050764 | 0.067686 | 0.084607 | 0.961517 | 0.929585 | 0.912664 | 0.898472 | 0.898472 | 0.898472 | 0 | 0.228518 | 0.33237 | 7,269 | 204 | 97 | 35.632353 | 0.526478 | 0 | 0 | 0.821053 | 0 | 0 | 0.457422 | 0.084743 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036842 | true | 0 | 0.005263 | 0.036842 | 0.078947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
c9b530a39afa2aa5de6e221e97e1bec49656bbab | 15,592 | py | Python | tests/test_fields_ming.py | tcmike/depot | f2999ab18f8bf3f5e6f0c3c3b058b9bd8d936d65 | [
"MIT"
] | 128 | 2015-02-18T05:53:25.000Z | 2022-03-02T10:23:27.000Z | tests/test_fields_ming.py | tcmike/depot | f2999ab18f8bf3f5e6f0c3c3b058b9bd8d936d65 | [
"MIT"
] | 68 | 2015-03-19T17:04:24.000Z | 2022-01-13T19:38:34.000Z | tests/test_fields_ming.py | tcmike/depot | f2999ab18f8bf3f5e6f0c3c3b058b9bd8d936d65 | [
"MIT"
] | 42 | 2015-04-24T15:05:42.000Z | 2022-01-13T18:23:17.000Z | # -*- coding: utf-8 -*-
import shutil
import tempfile, os, cgi, base64
from PIL import Image
from depot.fields.filters.thumbnails import WithThumbnailFilter
from depot.fields.ming import UploadedFileProperty
from depot.manager import DepotManager, get_file
from .base_ming import setup_database, clear_database, DBSession
from ming.odm.declarative import MappedClass
from ming.odm import FieldProperty
from ming import schema as s, Field
from .utils import create_cgifs
from depot.fields.specialized.image import UploadedImageWithThumb
from depot._compat import u_
from nose import SkipTest
def setup():
setup_database()
DepotManager._clear()
DepotManager.configure('default', {'depot.storage_path': './lfs'})
DepotManager.configure('another', {'depot.storage_path': './lfs'})
DepotManager.alias('another_alias', 'another')
DepotManager.make_middleware(None)
def teardown():
shutil.rmtree('./lfs', ignore_errors=True)
class Document(MappedClass):
class __mongometa__:
session = DBSession
name = 'depot_test_document'
_id = FieldProperty(s.ObjectId)
name = FieldProperty(str)
content = UploadedFileProperty()
photo = UploadedFileProperty(upload_type=UploadedImageWithThumb)
second_photo = UploadedFileProperty(filters=(WithThumbnailFilter((12, 12), 'PNG'),))
targeted_content = UploadedFileProperty(upload_storage='another_alias')
class TestMingAttachments(object):
def __init__(self):
self.file_content = b'this is the file content'
self.fake_file = tempfile.NamedTemporaryFile()
self.fake_file.write(self.file_content)
self.fake_file.flush()
def setup(self):
clear_database()
def test_accessing_class_property(self):
# This is to check for regression in a bug in property descriptor
prop = Document.content
assert isinstance(prop, FieldProperty), prop
def test_create_fromfile(self):
doc = Document(name='Foo', content = open(self.fake_file.name, 'rb'))
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.content.file.read() == self.file_content
assert d.content.file.filename == os.path.basename(self.fake_file.name)
def test_create_fromfile_flush_single_document(self):
doc = Document(name='Foo', content = open(self.fake_file.name, 'rb'))
DBSession.flush(doc)
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.content.file.read() == self.file_content
assert d.content.file.filename == os.path.basename(self.fake_file.name)
def test_edit_existing(self):
doc = Document(name=u_('Foo2'))
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo2')).first()
old_file = d.content.path
d.content = b'HELLO'
new_file = d.content.path
DBSession.flush()
DBSession.clear()
assert get_file(new_file).read() == b'HELLO'
try:
fold = get_file(old_file)
assert False, 'Should have raised IOError here'
except IOError:
pass
def test_edit_existing_rollback(self):
doc = Document(name=u_('Foo3'))
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo3')).first()
old_file = d.content.path
d.content = b'HELLO'
new_file = d.content.path
DBSession.clear()
assert get_file(old_file).read() == self.file_content
raise SkipTest("Currently Ming Doesn't provide a way to handle discarded documents")
try:
fold = get_file(new_file)
assert False, 'Should have raised IOError here'
except IOError:
pass
def test_create_fromfield(self):
field = create_cgifs('image/jpeg', self.fake_file, 'test.jpg')
doc = Document(name=u_('Foo'), content=field)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.content.file.read() == self.file_content
assert d.content.filename == 'test.jpg'
assert d.content.content_type == 'image/jpeg', d.content.content_type
assert d.content.url == '/depot/%s' % d.content.path
def test_create_empty(self):
doc = Document(name=u_('Foo'), content=None)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.content is None
def test_delete_existing(self):
doc = Document(name=u_('Foo2'))
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo2')).first()
old_file = d.content.path
d.query.delete()
DBSession.flush()
DBSession.clear()
try:
fold = get_file(old_file)
assert False, 'Should have raised IOError here'
except IOError:
pass
def test_delete_existing_rollback(self):
doc = Document(name=u_('Foo3'))
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo3')).first()
old_file = d.content.path
d.delete()
DBSession.clear()
assert get_file(old_file).read() == self.file_content
def test_create_with_alias(self):
doc = Document(name='Foo', targeted_content=open(self.fake_file.name, 'rb'))
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.targeted_content.file.read() == self.file_content
assert d.targeted_content.file.filename == os.path.basename(self.fake_file.name)
assert d.targeted_content.depot_name == 'another'
class TestMingImageAttachments(object):
def __init__(self):
self.file_content = b'''R0lGODlhEQAUAPcAAC4uLjAwMDIyMjMzMjQ0NDU1NDY2Njk2Mzg4ODo6Oj49Ozw8PD4+PkE+OEA/PkhAN0tCNk5JPFFGNV1KMFhNNVFHOFJJPVVLPVhOPXZfKXVcK2ZQNGZXNGtZMnNcNHZeNnldMHJfOn1hKXVjOH9oO0BAQEJCQkREREVFREZGRklGQ05KQ0hISEpKSkxMTE5OTlZRSlFQT19XSFBQUFJSUlRUVGFUQmFVQ2ZZQGtdQnNiQqJ/HI1uIYBnLIllKoZrK4FqLoVqL4luLIpsLpt7J515JJ50KZhzLYFnMIFlM4ZlMIFkNI1uNoJoOoVrPIlvO49yMolwPpB2O5p4Op98PaB3IKN4JqN8J6h7I6J5LaZ+LLF+ILGGG7+JG72OGLKEI7aHIrOEJL2JI7mMN76YNcGJG8SOG8WLHMONH86eEs+aFsGSG8eQHMySG9uVFduXFdeeE9eaFdScF96YE9yaFOKcEuOdEtWgFNiiEduhE96pEuqlD+qmD+KpD+yoDu6rDuysDvCuDfCvDeuwD/SzC/a2CvGwDfKxDPi5Cfi5Cvq8CeCjEuehEOagEeijEOKoEOStFMOOK8+TLM6YNNChItGgLtylKt6gMNqgON6jPOChLfi/JOSrNeGvN9KhRtykRNWkSOCnQOCpSOawQue1T+a6Su67SOGsUO/AVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAAAAAAALAAAAAARABQAAAj+AAEIHEiwoMAACBMqXIhQgMOHDwdAfEigYsUCT6KQScNjxwGLBAyINPBhCilUmxIV6uMlw0gEMJeMGWWqFCU8hA4JEgETQYIEDcRw6qRlwgMIGvJwkfAzwYIFXQBB8qHg6VMKFawuYNBBjaIqDhhI+cGgrNmyJXooGrShBJBKcIpwiFCibl0TehDdsWBCiBxLZuIwolMGiwcTJ4gUenThBAokSVRgGFJnzhYQJ1KsyRkkhWfPK87McWPEM4sRhgItCsGitQ5PmtxYUdK6BY4rf/zwYRNmUihRpyQdaUHchQsoX/Y4amTnUqZPoG7YMO7ihfUcYNC0eRMJExMqMKweW59BfkYMEk2ykHAio3x5GvDjy58Pv4b9+/jz2w8IADs='''
self.file_content = base64.b64decode(self.file_content)
self.fake_file = tempfile.NamedTemporaryFile()
self.fake_file.write(self.file_content)
self.fake_file.flush()
def setup(self):
clear_database()
self.fake_file.seek(0)
def test_create_fromfile(self):
doc = Document(name=u_('Foo'))
doc.photo = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.photo.file.read() == self.file_content
assert d.photo.filename == os.path.basename(self.fake_file.name)
def test_create_fromfield(self):
field = cgi.FieldStorage()
field.filename = u_('àèìòù.gif')
field.file = open(self.fake_file.name, 'rb')
doc = Document(name=u_('Foo'), photo=field)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.photo.file.read() == self.file_content
assert d.photo.filename == field.filename
assert d.photo.url == '/depot/%s' % d.photo.path
assert d.photo.thumb_url == '/depot/%s' % d.photo.thumb_path
assert d.photo.url != d.photo.thumb_url
def test_thumbnail(self):
field = create_cgifs('image/gif', self.fake_file, 'test.gif')
doc = Document(name=u_('Foo'), photo=field)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert os.path.exists(d.photo.thumb_file._file_path)
thumb = Image.open(d.photo.thumb_file._file_path)
thumb.verify()
assert thumb.format.upper() == d.photo.thumbnail_format.upper()
def test_public_url(self):
doc = Document(name=u_('Foo'))
doc.photo = open(self.fake_file.name, 'rb')
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
# This is to edit the saved data bypassing UploadedFileProperty
photo = FieldProperty(Field('photo', s.Anything)).__get__(d, Document)
photo['_public_url'] = 'PUBLIC_URL'
photo['_thumb_public_url'] = 'THUMB_PUBLIC_URL'
# Now check that depot does the right thing when public urls are available
assert d.photo.url == 'PUBLIC_URL'
assert d.photo.thumb_url == 'THUMB_PUBLIC_URL'
def test_rollback(self):
raise SkipTest("Currently Ming Doesn't provide a way to handle discarded documents")
doc = Document(name=u_('Foo3'))
doc.photo = open(self.fake_file.name, 'rb')
uploaded_file = doc.photo.path
uploaded_thumb = doc.photo.thumb_path
DBSession.clear()
try:
fold = get_file(uploaded_file)
assert False, 'Should have raised IOError here'
except IOError:
pass
try:
fold = get_file(uploaded_thumb)
assert False, 'Should have raised IOError here'
except IOError:
pass
class TestMingThumbnailFilter(object):
def __init__(self):
self.file_content = b'''R0lGODlhEQAUAPcAAC4uLjAwMDIyMjMzMjQ0NDU1NDY2Njk2Mzg4ODo6Oj49Ozw8PD4+PkE+OEA/PkhAN0tCNk5JPFFGNV1KMFhNNVFHOFJJPVVLPVhOPXZfKXVcK2ZQNGZXNGtZMnNcNHZeNnldMHJfOn1hKXVjOH9oO0BAQEJCQkREREVFREZGRklGQ05KQ0hISEpKSkxMTE5OTlZRSlFQT19XSFBQUFJSUlRUVGFUQmFVQ2ZZQGtdQnNiQqJ/HI1uIYBnLIllKoZrK4FqLoVqL4luLIpsLpt7J515JJ50KZhzLYFnMIFlM4ZlMIFkNI1uNoJoOoVrPIlvO49yMolwPpB2O5p4Op98PaB3IKN4JqN8J6h7I6J5LaZ+LLF+ILGGG7+JG72OGLKEI7aHIrOEJL2JI7mMN76YNcGJG8SOG8WLHMONH86eEs+aFsGSG8eQHMySG9uVFduXFdeeE9eaFdScF96YE9yaFOKcEuOdEtWgFNiiEduhE96pEuqlD+qmD+KpD+yoDu6rDuysDvCuDfCvDeuwD/SzC/a2CvGwDfKxDPi5Cfi5Cvq8CeCjEuehEOagEeijEOKoEOStFMOOK8+TLM6YNNChItGgLtylKt6gMNqgON6jPOChLfi/JOSrNeGvN9KhRtykRNWkSOCnQOCpSOawQue1T+a6Su67SOGsUO/AVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAAAAAAALAAAAAARABQAAAj+AAEIHEiwoMAACBMqXIhQgMOHDwdAfEigYsUCT6KQScNjxwGLBAyINPBhCilUmxIV6uMlw0gEMJeMGWWqFCU8hA4JEgETQYIEDcRw6qRlwgMIGvJwkfAzwYIFXQBB8qHg6VMKFawuYNBBjaIqDhhI+cGgrNmyJXooGrShBJBKcIpwiFCibl0TehDdsWBCiBxLZuIwolMGiwcTJ4gUenThBAokSVRgGFJnzhYQJ1KsyRkkhWfPK87McWPEM4sRhgItCsGitQ5PmtxYUdK6BY4rf/zwYRNmUihRpyQdaUHchQsoX/Y4amTnUqZPoG7YMO7ihfUcYNC0eRMJExMqMKweW59BfkYMEk2ykHAio3x5GvDjy58Pv4b9+/jz2w8IADs='''
self.file_content = base64.b64decode(self.file_content)
self.fake_file = tempfile.NamedTemporaryFile()
self.fake_file.write(self.file_content)
self.fake_file.flush()
def setup(self):
clear_database()
self.fake_file.seek(0)
def test_create_fromfile(self):
doc = Document(name=u_('Foo'))
doc.second_photo = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.second_photo.file.read() == self.file_content
assert d.second_photo.filename == os.path.basename(self.fake_file.name)
def test_create_fromfield(self):
field = cgi.FieldStorage()
field.filename = u_('àèìòù.gif')
field.file = open(self.fake_file.name, 'rb')
doc = Document(name=u_('Foo'), second_photo=field)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
assert d.second_photo.file.read() == self.file_content
assert d.second_photo.filename == field.filename
assert d.second_photo.url == '/depot/%s' % d.second_photo.path
assert d.second_photo.thumb_12x12_url == '/depot/%s' % d.second_photo.thumb_12x12_path
assert d.second_photo.url != d.second_photo.thumb_12x12_url
def test_thumbnail(self):
field = create_cgifs('image/gif', self.fake_file, 'test.gif')
doc = Document(name=u_('Foo'), second_photo=field)
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
thumbnail_local_path = DepotManager.get_file(d.second_photo.thumb_12x12_path)._file_path
assert os.path.exists(thumbnail_local_path)
thumb = Image.open(thumbnail_local_path)
thumb.verify()
assert thumb.format.upper() == 'PNG'
assert max(thumb.size) == 12
def test_public_url(self):
doc = Document(name=u_('Foo'))
doc.second_photo = open(self.fake_file.name, 'rb')
doc.content = open(self.fake_file.name, 'rb')
DBSession.flush()
DBSession.clear()
d = Document.query.find(dict(name='Foo')).first()
# This is to edit the saved data bypassing UploadedFileProperty
second_photo = FieldProperty(Field('second_photo', s.Anything)).__get__(d, Document)
second_photo['_public_url'] = 'PUBLIC_URL'
# Now check that depot does the right thing when public urls are available
assert d.second_photo.url == 'PUBLIC_URL'
assert d.second_photo.thumb_12x12_url.startswith('/depot/default/')
def test_rollback(self):
raise SkipTest("Currently Ming Doesn't provide a way to handle discarded documents")
doc = Document(name=u_('Foo3'))
doc.second_photo = open(self.fake_file.name, 'rb')
uploaded_file = doc.second_photo.path
uploaded_thumb = doc.second_photo.thumb_12x12_path
DBSession.clear()
try:
fold = get_file(uploaded_file)
assert False, 'Should have raised IOError here'
except IOError:
pass
try:
fold = get_file(uploaded_thumb)
assert False, 'Should have raised IOError here'
except IOError:
pass | 41.248677 | 1,487 | 0.703887 | 1,632 | 15,592 | 6.555147 | 0.127451 | 0.026921 | 0.040381 | 0.032903 | 0.819592 | 0.786128 | 0.755468 | 0.738362 | 0.729856 | 0.729295 | 0 | 0.02301 | 0.194459 | 15,592 | 378 | 1,488 | 41.248677 | 0.828742 | 0.022768 | 0 | 0.666667 | 0 | 0.007092 | 0.257107 | 0.190664 | 0 | 1 | 0 | 0 | 0.163121 | 1 | 0.099291 | false | 0.024823 | 0.049645 | 0 | 0.187943 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c9eec615fbfae02b190dcd52a9d7ca85b091270f | 213 | py | Python | drf_toolbox/serializers/fields/__init__.py | gregwym/drf-toolbox | c476388de959ecc080693331d89cfb44615e41ca | [
"BSD-3-Clause"
] | 1 | 2021-05-19T09:38:49.000Z | 2021-05-19T09:38:49.000Z | drf_toolbox/serializers/fields/__init__.py | gregwym/drf-toolbox | c476388de959ecc080693331d89cfb44615e41ca | [
"BSD-3-Clause"
] | null | null | null | drf_toolbox/serializers/fields/__init__.py | gregwym/drf-toolbox | c476388de959ecc080693331d89cfb44615e41ca | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import, unicode_literals
from drf_toolbox.serializers.fields.api import *
from drf_toolbox.serializers.fields.postgres import *
from drf_toolbox.serializers.fields.related import *
| 42.6 | 56 | 0.859155 | 28 | 213 | 6.214286 | 0.464286 | 0.12069 | 0.241379 | 0.431034 | 0.603448 | 0.425287 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079812 | 213 | 4 | 57 | 53.25 | 0.887755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a009ff528369a6e86c29d0f696bf93c6e15c6651 | 153 | py | Python | tests/test_hello.py | ccampguilhem/python-project-template | 264bf210ca330b17a5c1905b4b8a030bc200f9ea | [
"MIT"
] | null | null | null | tests/test_hello.py | ccampguilhem/python-project-template | 264bf210ca330b17a5c1905b4b8a030bc200f9ea | [
"MIT"
] | 1 | 2020-12-28T11:33:13.000Z | 2020-12-28T11:33:13.000Z | tests/test_hello.py | ccampguilhem/python-project-template | 264bf210ca330b17a5c1905b4b8a030bc200f9ea | [
"MIT"
] | null | null | null | #!coding: utf-8
import mylib
def test_hello():
assert mylib.hello.hello("Cédric") == "Hello Cédric!"
assert mylib.hello.hello("") == "Hello !"
| 19.125 | 57 | 0.640523 | 20 | 153 | 4.85 | 0.5 | 0.309278 | 0.329897 | 0.43299 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007874 | 0.169935 | 153 | 7 | 58 | 21.857143 | 0.755906 | 0.091503 | 0 | 0 | 0 | 0 | 0.188406 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.25 | true | 0 | 0.25 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a0203b55bd68f018b62fd3000ce668d11d8f6dac | 4,050 | py | Python | day-03/part-2/badouralix.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 8 | 2019-12-01T08:56:46.000Z | 2019-12-05T21:21:12.000Z | day-03/part-2/badouralix.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 10 | 2019-11-25T09:56:20.000Z | 2021-05-10T19:57:48.000Z | day-03/part-2/badouralix.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 5 | 2019-12-01T08:19:57.000Z | 2020-11-23T09:50:19.000Z | from sys import maxsize
from tool.runners.python import SubmissionPy
from typing import Dict, Set, Tuple
class BadouralixSubmission(SubmissionPy):
def run(self, s: str) -> int:
# :param s: input in string format
# :return: solution flag
# Parse input
split_one, split_two = s.splitlines()
path_one = split_one.split(",")
path_two = split_two.split(",")
# Walk path one and record steps
locations_one: Dict[Tuple[int, int], int] = dict()
current_location = (0, 0)
current_distance = 0
for branch in path_one:
direction = branch[0]
length = int(branch[1:])
if direction == "R":
for step in range(length):
current_location = (current_location[0] + 1, current_location[1])
current_distance += 1
if current_location not in locations_one:
locations_one[current_location] = current_distance
elif direction == "L":
for step in range(length):
current_location = (current_location[0] - 1, current_location[1])
current_distance += 1
if current_location not in locations_one:
locations_one[current_location] = current_distance
elif direction == "U":
for step in range(length):
current_location = (current_location[0], current_location[1] + 1)
current_distance += 1
if current_location not in locations_one:
locations_one[current_location] = current_distance
elif direction == "D":
for step in range(length):
current_location = (current_location[0], current_location[1] - 1)
current_distance += 1
if current_location not in locations_one:
locations_one[current_location] = current_distance
# Walk path two and find intersection
closest_distance = maxsize
current_location = (0, 0)
current_distance = 0
for branch in path_two:
direction = branch[0]
length = int(branch[1:])
if direction == "R":
for step in range(length):
current_location = (current_location[0] + 1, current_location[1])
current_distance += 1
if current_location in locations_one:
distance = locations_one[current_location] + current_distance
closest_distance = min(closest_distance, distance)
elif direction == "L":
for step in range(length):
current_location = (current_location[0] - 1, current_location[1])
current_distance += 1
if current_location in locations_one:
distance = locations_one[current_location] + current_distance
closest_distance = min(closest_distance, distance)
elif direction == "U":
for step in range(length):
current_location = (current_location[0], current_location[1] + 1)
current_distance += 1
if current_location in locations_one:
distance = locations_one[current_location] + current_distance
closest_distance = min(closest_distance, distance)
elif direction == "D":
for step in range(length):
current_location = (current_location[0], current_location[1] - 1)
current_distance += 1
if current_location in locations_one:
distance = locations_one[current_location] + current_distance
closest_distance = min(closest_distance, distance)
# Return result
return closest_distance
| 46.022727 | 85 | 0.552593 | 406 | 4,050 | 5.278325 | 0.152709 | 0.29398 | 0.164256 | 0.052263 | 0.808213 | 0.808213 | 0.808213 | 0.808213 | 0.808213 | 0.808213 | 0 | 0.016627 | 0.376296 | 4,050 | 87 | 86 | 46.551724 | 0.83175 | 0.036543 | 0 | 0.821918 | 0 | 0 | 0.002567 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013699 | false | 0 | 0.041096 | 0 | 0.082192 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4e82d668fb875ea47c7ce663a43c6801e2b3bbe2 | 481 | py | Python | extract_feats.py | LiahimRatman/StacMR | b5acea55c4eb8a2fa933edd7c648938d3e7cf7d9 | [
"Apache-2.0"
] | 15 | 2021-01-06T16:29:33.000Z | 2022-03-23T02:31:54.000Z | extract_feats.py | LiahimRatman/StacMR | b5acea55c4eb8a2fa933edd7c648938d3e7cf7d9 | [
"Apache-2.0"
] | 3 | 2021-01-27T07:14:12.000Z | 2021-10-18T10:37:42.000Z | extract_feats.py | LiahimRatman/StacMR | b5acea55c4eb8a2fa933edd7c648938d3e7cf7d9 | [
"Apache-2.0"
] | 4 | 2021-02-04T16:39:20.000Z | 2022-03-13T05:16:58.000Z | from vocab import Vocabulary
import evaluation
print ('Extracting features for CTC SPLITS!')
evaluation.extract_feats("/SSD/VSRN/runs/full_model3_newfeats/model_best.pth.tar", data_path='/SSD/Datasets/Coco-Text/ST_CMR_testdataset/New_Split/Flickr_Format/', split="dev", fold5=False)
evaluation.extract_feats("/SSD/VSRN/runs/full_model3_newfeats/model_best.pth.tar", data_path='/SSD/Datasets/Coco-Text/ST_CMR_testdataset/New_Split/Flickr_Format/', split="test", fold5=False)
| 68.714286 | 191 | 0.808732 | 72 | 481 | 5.152778 | 0.541667 | 0.091644 | 0.118598 | 0.134771 | 0.722372 | 0.722372 | 0.722372 | 0.722372 | 0.722372 | 0.722372 | 0 | 0.008811 | 0.056133 | 481 | 6 | 192 | 80.166667 | 0.80837 | 0 | 0 | 0 | 0 | 0 | 0.597895 | 0.509474 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.4 | 0 | 0.4 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
14cda9bacd57fa3edfe40bf35a4e2e29b3b22c33 | 134 | py | Python | app/component/stream/__init__.py | melphi/kafkaform | 391c4c70b6aff9fd5ad703dcf4b6e863e92d53b6 | [
"MIT"
] | null | null | null | app/component/stream/__init__.py | melphi/kafkaform | 391c4c70b6aff9fd5ad703dcf4b6e863e92d53b6 | [
"MIT"
] | null | null | null | app/component/stream/__init__.py | melphi/kafkaform | 391c4c70b6aff9fd5ad703dcf4b6e863e92d53b6 | [
"MIT"
] | null | null | null | from app.component.stream.parser import *
from app.component.stream.resolver import *
from app.component.stream.transitioner import *
| 33.5 | 47 | 0.820896 | 18 | 134 | 6.111111 | 0.444444 | 0.190909 | 0.436364 | 0.6 | 0.509091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089552 | 134 | 3 | 48 | 44.666667 | 0.901639 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
14df3790e16a051a611030f87405b9addc731754 | 24,758 | py | Python | binance-spot/impl/restapirequestimpl.py | AbdeenM/binance-spot | f48ab28dd837dd66bb8373e5e4b1bf24379e46ad | [
"MIT"
] | 2 | 2021-05-05T00:25:11.000Z | 2021-08-07T23:26:55.000Z | binance-spot/impl/restapirequestimpl.py | AbdeenM/binance-spot | f48ab28dd837dd66bb8373e5e4b1bf24379e46ad | [
"MIT"
] | null | null | null | binance-spot/impl/restapirequestimpl.py | AbdeenM/binance-spot | f48ab28dd837dd66bb8373e5e4b1bf24379e46ad | [
"MIT"
] | null | null | null | from common.scripts.binance_spot.impl import RestApiRequest
from common.scripts.binance_spot.impl.utils.urlparamsbuilder import UrlParamsBuilder
from common.scripts.binance_spot.impl.utils.apisignature import create_signature
from common.scripts.binance_spot.impl.utils.inputchecker import *
from common.scripts.binance_spot.impl.utils.timeservice import *
from common.scripts.binance_spot.model import *
# For develop
from common.scripts.binance_spot.base.printobject import *
class RestApiRequestImpl(object):
def __init__(self, api_key, secret_key, server_url='https://api.binance.com', debug=False):
self.__api_key = api_key
self.__secret_key = secret_key
self.__server_url = server_url
self.__debug = debug
def __create_request_by_get(self, url, builder):
request = RestApiRequest()
request.method = 'GET'
request.host = self.__server_url
request.header.update({'Content-Type': 'application/json'})
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def __create_request_by_get_with_apikey(self, url, builder):
request = RestApiRequest()
request.method = 'GET'
request.host = self.__server_url
request.header.update({'Content-Type': 'application/json'})
request.header.update({'X-MBX-APIKEY': self.__api_key})
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def __create_request_by_post_with_signature(self, url, builder):
request = RestApiRequest()
request.method = 'POST'
request.host = self.__server_url
builder.put_url('recvWindow', 60000)
builder.put_url('timestamp', str(get_current_timestamp() - 1000))
create_signature(self.__secret_key, builder)
request.header.update({'Content-Type': 'application/json'})
request.header.update({'X-MBX-APIKEY': self.__api_key})
request.post_body = builder.post_map
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def __create_request_by_delete_with_signature(self, url, builder):
request = RestApiRequest()
request.method = 'DELETE'
request.host = self.__server_url
builder.put_url('recvWindow', 60000)
builder.put_url('timestamp', str(get_current_timestamp() - 1000))
create_signature(self.__secret_key, builder)
request.header.update({'Content-Type': 'application/json'})
request.header.update({'X-MBX-APIKEY': self.__api_key})
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def __create_request_by_get_with_signature(self, url, builder):
request = RestApiRequest()
request.method = 'GET'
request.host = self.__server_url
builder.put_url('recvWindow', 60000)
builder.put_url('timestamp', str(get_current_timestamp() - 1000))
create_signature(self.__secret_key, builder)
request.header.update(
{'Content-Type': 'application/x-www-form-urlencoded'})
request.header.update({'X-MBX-APIKEY': self.__api_key})
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def __create_request_by_put_with_signature(self, url, builder):
request = RestApiRequest()
request.method = 'PUT'
request.host = self.__server_url
builder.put_url('recvWindow', 60000)
builder.put_url('timestamp', str(get_current_timestamp() - 1000))
create_signature(self.__secret_key, builder)
request.header.update({'Content-Type': 'application/json'})
request.header.update({'X-MBX-APIKEY': self.__api_key})
request.url = url + '?' + builder.build_url()
if self.__debug == True:
print('====== Request ======')
print(request)
PrintMix.print_data(request)
print('=====================')
return request
def test_connectivity(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_get('/api/v3/ping', builder)
def parse(json_wrapper):
result = 'OK'
return result
request.json_parser = parse
return request
def get_servertime(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_get('/api/v3/time', builder)
def parse(json_wrapper):
result = json_wrapper.get_int('serverTime')
return result
request.json_parser = parse
return request
def get_exchange_information(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_get(
'/api/v3/exchangeInfo', builder)
def parse(json_wrapper):
result = ExchangeInformation.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_order_book(self, symbol, limit):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('limit', limit)
request = self.__create_request_by_get('/api/v3/depth', builder)
def parse(json_wrapper):
result = OrderBook.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_recent_trades_list(self, symbol, limit):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('limit', limit)
request = self.__create_request_by_get('/api/v3/trades', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Trade.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_old_trade_lookup(self, symbol, limit, fromId):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('limit', limit)
builder.put_url('fromId', fromId)
request = self.__create_request_by_get_with_apikey(
'/api/v3/historicalTrades', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Trade.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_aggregate_trades_list(self, symbol, fromId, startTime, endTime, limit):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('fromId', fromId)
builder.put_url('startTime', startTime)
builder.put_url('endTime', endTime)
builder.put_url('limit', limit)
request = self.__create_request_by_get('/api/v3/aggTrades', builder)
def parse(json_wrapper):
aggregate_trades_list = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
trade = AggregateTrade.json_parse(item)
aggregate_trades_list.append(trade)
return aggregate_trades_list
request.json_parser = parse
return request
def get_candlestick_data(self, symbol, interval, startTime, endTime, limit):
check_should_not_none(symbol, 'symbol')
check_should_not_none(symbol, 'interval')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('interval', interval)
builder.put_url('startTime', startTime)
builder.put_url('endTime', endTime)
builder.put_url('limit', limit)
request = self.__create_request_by_get('/api/v3/klines', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Candlestick.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_current_average_price(self, symbol):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_get(
'/api/v3/avgPrice', builder)
def parse(json_wrapper):
result = AveragePrice.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_ticker_price_change_statistics(self, symbol):
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_get('/api/v3/ticker/24hr', builder)
def parse(json_wrapper):
result = list()
if symbol:
element = TickerPriceChangeStatistics.json_parse(json_wrapper)
result.append(element)
else:
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = TickerPriceChangeStatistics.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_symbol_price_ticker(self, symbol):
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_get(
'/api/v3/ticker/price', builder)
def parse(json_wrapper):
result = list()
if symbol:
element = SymbolPrice.json_parse(json_wrapper)
result.append(element)
else:
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = SymbolPrice.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_symbol_orderbook_ticker(self, symbol):
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_get(
'/api/v3/ticker/bookTicker', builder)
def parse(json_wrapper):
result = list()
if symbol:
element = SymbolOrderBook.json_parse(json_wrapper)
result.append(element)
else:
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = SymbolOrderBook.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def post_test_order(self, symbol, side, ordertype,
timeInForce, quantity, quoteOrderQty, price,
newClientOrderId, stopPrice, icebergQty, newOrderRespType):
check_should_not_none(symbol, 'symbol')
check_should_not_none(side, 'side')
check_should_not_none(ordertype, 'ordertype')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('side', side)
builder.put_url('type', ordertype)
builder.put_url('timeInForce', timeInForce)
builder.put_url('quantity', quantity)
builder.put_url('quoteOrderQty', quoteOrderQty)
builder.put_url('price', price)
builder.put_url('newClientOrderId', newClientOrderId)
builder.put_url('stopPrice', stopPrice)
builder.put_url('icebergQty', icebergQty)
builder.put_url('newOrderRespType', newOrderRespType)
request = self.__create_request_by_post_with_signature(
'/api/v3/order/test', builder)
def parse(json_wrapper):
result = 'OK'
return result
request.json_parser = parse
return request
def post_order(self, symbol, side, ordertype,
timeInForce, quantity, quoteOrderQty, price,
newClientOrderId, stopPrice, icebergQty, newOrderRespType):
check_should_not_none(symbol, 'symbol')
check_should_not_none(side, 'side')
check_should_not_none(ordertype, 'ordertype')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('side', side)
builder.put_url('type', ordertype)
builder.put_url('timeInForce', timeInForce)
builder.put_url('quantity', quantity)
builder.put_url('quoteOrderQty', quoteOrderQty)
builder.put_url('price', price)
builder.put_url('newClientOrderId', newClientOrderId)
builder.put_url('stopPrice', stopPrice)
builder.put_url('icebergQty', icebergQty)
builder.put_url('newOrderRespType', newOrderRespType)
request = self.__create_request_by_post_with_signature(
'/api/v3/order', builder)
def parse(json_wrapper):
result = Order.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def cancel_order(self, symbol, orderId, newClientOrderId):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('orderId', orderId)
builder.put_url('newClientOrderId', newClientOrderId)
request = self.__create_request_by_delete_with_signature(
'/api/v3/order', builder)
def parse(json_wrapper):
result = Order.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def cancel_all_orders(self, symbol):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_delete_with_signature(
'api/v3/openOrders', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Order.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_order(self, symbol, orderId):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('orderId', orderId)
request = self.__create_request_by_get_with_signature(
'/api/v3/order', builder)
def parse(json_wrapper):
result = Order.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_open_orders(self, symbol):
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
request = self.__create_request_by_get_with_signature(
'/api/v3/openOrders', builder)
def parse(json_wrapper):
result = list()
if symbol:
element = Order.json_parse(json_wrapper)
result.append(element)
else:
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Order.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_all_orders(self, symbol, orderId, startTime, endTime, limit):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('orderId', orderId)
builder.put_url('startTime', startTime)
builder.put_url('endTime', endTime)
builder.put_url('limit', limit)
request = self.__create_request_by_get_with_signature(
'/api/v3/allOrders', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = Order.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def post_oco_order(self, symbol, listClientOrderId, side,
quantity, limitClientOrderId, price,
limitIcebergQty, stopClientOrderId,
stopPrice, stopLimitPrice, stopIcebergQty,
stopLimitTimeInForce, newOrderRespType):
check_should_not_none(symbol, 'symbol')
check_should_not_none(side, 'side')
check_should_not_none(quantity, 'quantity')
check_should_not_none(price, 'price')
check_should_not_none(stopPrice, 'stopPrice')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('listClientOrderId', listClientOrderId)
builder.put_url('side', side)
builder.put_url('quantity', quantity)
builder.put_url('limitClientOrderId', limitClientOrderId)
builder.put_url('price', price)
builder.put_url('limitIcebergQty', limitIcebergQty)
builder.put_url('stopClientOrderId', stopClientOrderId)
builder.put_url('stopPrice', stopPrice)
builder.put_url('stopLimitPrice', stopLimitPrice)
builder.put_url('stopIcebergQty', stopIcebergQty)
builder.put_url('stopLimitTimeInForce', stopLimitTimeInForce)
builder.put_url('newOrderRespType', newOrderRespType)
request = self.__create_request_by_post_with_signature(
'/api/v3/order/oco', builder)
def parse(json_wrapper):
result = OrderOCO.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def cancel_oco_order(self, symbol, orderListId, listClientOrderId, newClientOrderId):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('orderListId', orderListId)
builder.put_url('listClientOrderId', listClientOrderId)
builder.put_url('newClientOrderId', newClientOrderId)
request = self.__create_request_by_delete_with_signature(
'/api/v3/order', builder)
def parse(json_wrapper):
result = OrderOCO.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_oco_order(self, orderListId, origClientOrderId):
builder = UrlParamsBuilder()
builder.put_url('orderListId', symbol)
builder.put_url('origClientOrderId', orderId)
request = self.__create_request_by_get_with_signature(
'/api/v3/orderList', builder)
def parse(json_wrapper):
result = OrderOCO.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_all_oco_orders(self, fromId, startTime, endTime, limit):
builder = UrlParamsBuilder()
builder.put_url('fromId', fromId)
builder.put_url('startTime', startTime)
builder.put_url('endTime', endTime)
builder.put_url('limit', limit)
request = self.__create_request_by_get_with_signature(
'/api/v3/allOrderList', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = OrderOCO.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_open_oco_orders(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_get_with_signature(
'/api/v3/openOrderList', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = OrderOCO.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def get_account_information(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_get_with_signature(
'/api/v3/account', builder)
def parse(json_wrapper):
result = AccountInformation.json_parse(json_wrapper)
return result
request.json_parser = parse
return request
def get_account_trades(self, symbol, startTime, endTime, fromId, limit):
check_should_not_none(symbol, 'symbol')
builder = UrlParamsBuilder()
builder.put_url('symbol', symbol)
builder.put_url('startTime', startTime)
builder.put_url('endTime', endTime)
builder.put_url('fromId', fromId)
builder.put_url('limit', limit)
request = self.__create_request_by_get_with_signature(
'/api/v3/myTrades', builder)
def parse(json_wrapper):
result = list()
data_list = json_wrapper.convert_2_array()
for item in data_list.get_items():
element = MyTrade.json_parse(item)
result.append(element)
return result
request.json_parser = parse
return request
def start_user_data_stream(self):
builder = UrlParamsBuilder()
request = self.__create_request_by_post_with_signature(
'/api/v3/userDataStream', builder)
def parse(json_wrapper):
result = json_wrapper.get_string('listenKey')
return result
request.json_parser = parse
return request
def keep_user_data_stream(self, listenKey):
check_should_not_none(listenKey, 'listenKey')
builder = UrlParamsBuilder()
builder.put_url('listenKey', listenKey)
request = self.__create_request_by_put_with_signature(
'/api/v3/userDataStream', builder)
def parse(json_wrapper):
result = 'OK'
return result
request.json_parser = parse
return request
def close_user_data_stream(self, listenKey):
check_should_not_none(listenKey, 'listenKey')
builder = UrlParamsBuilder()
builder.put_url('listenKey', listenKey)
request = self.__create_request_by_delete_with_signature(
'/api/v3/userDataStream', builder)
def parse(json_wrapper):
result = 'OK'
return result
request.json_parser = parse
return request
| 36.35536 | 96 | 0.604411 | 2,522 | 24,758 | 5.633228 | 0.070182 | 0.065461 | 0.085099 | 0.049553 | 0.865137 | 0.853593 | 0.837123 | 0.824382 | 0.802984 | 0.780249 | 0 | 0.004604 | 0.298166 | 24,758 | 680 | 97 | 36.408824 | 0.813018 | 0.000444 | 0 | 0.788991 | 0 | 0 | 0.085643 | 0.012258 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119266 | false | 0 | 0.012844 | 0 | 0.251376 | 0.045872 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
14f410b036570ab04c9f3c8b7208e22639c818b3 | 3,517 | py | Python | pysad/transform/postprocessing/postprocessors.py | selimfirat/pysad | dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede | [
"BSD-3-Clause"
] | 155 | 2020-08-17T12:52:38.000Z | 2022-03-19T02:59:26.000Z | pysad/transform/postprocessing/postprocessors.py | shubhsoni/pysad | dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede | [
"BSD-3-Clause"
] | 2 | 2020-10-22T09:50:28.000Z | 2021-02-15T02:01:44.000Z | pysad/transform/postprocessing/postprocessors.py | shubhsoni/pysad | dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede | [
"BSD-3-Clause"
] | 14 | 2020-10-09T17:08:23.000Z | 2022-03-25T11:30:12.000Z | from pysad.core.base_postprocessor import BasePostprocessor
from pysad.statistics.average_meter import AverageMeter
from pysad.statistics.max_meter import MaxMeter
from pysad.statistics.median_meter import MedianMeter
from pysad.statistics.variance_meter import VarianceMeter
import numpy as np
class AveragePostprocessor(BasePostprocessor):
"""A postprocessor that convert a score to the average of of all previous scores.
"""
def __init__(self):
self.meter = AverageMeter()
def fit_partial(self, score):
"""Fits the postprocessor to the (next) timestep's score.
Args:
score (float): Input score.
Returns:
object: self.
"""
self.meter.update(score)
return self
def transform_partial(self, score=None):
"""Gets the current average. This method should be used immediately after the fit_partial method with same score.
Args:
score (float): The input score.
Returns:
float: Transformed score.
"""
return self.meter.get()
class MaxPostprocessor(BasePostprocessor):
"""A postprocessor that convert a score to the maximum of of all previous scores.
"""
def __init__(self):
self.meter = MaxMeter()
def fit_partial(self, score):
"""Fits the postprocessor to the (next) timestep's score.
Args:
score (float): Input score.
Returns:
object: self.
"""
self.meter.update(score)
return self
def transform_partial(self, score=None):
"""Applies postprocessing to the score. This method should be used immediately after the fit_partial method with same score.
Args:
score (float): The input score.
Returns:
float: Transformed score.
"""
return self.meter.get()
class MedianPostprocessor(BasePostprocessor):
"""A postprocessor that convert a score to the median of of all previous scores.
"""
def __init__(self):
self.meter = MedianMeter()
def fit_partial(self, score):
"""Fits the postprocessor to the (next) timestep's score.
Args:
score (float): Input score.
Returns:
object: self.
"""
self.meter.update(score)
return self
def transform_partial(self, score=None):
"""Applies postprocessing to the score.
Args:
score (float): The input score.
Returns:
float: Transformed score.
"""
return self.meter.get()
class ZScorePostprocessor(BasePostprocessor):
"""A postprocessor that normalize the score via Z-score normalization.
"""
def __init__(self):
self.variance_meter = VarianceMeter()
self.average_meter = AverageMeter()
def fit_partial(self, score):
"""Fits the postprocessor to the (next) timestep's score.
Args:
score (float): Input score.
Returns:
object: self.
"""
self.variance_meter.update(score)
self.average_meter.update(score)
return self
def transform_partial(self, score):
"""Applies postprocessing to the score.
Args:
score (float): The input score.
Returns:
float: Transformed score.
"""
zscore = (score - self.average_meter.get()) / \
np.sqrt(self.variance_meter.get())
return zscore
| 24.943262 | 132 | 0.614728 | 386 | 3,517 | 5.505181 | 0.194301 | 0.023529 | 0.060235 | 0.071529 | 0.706353 | 0.706353 | 0.706353 | 0.706353 | 0.706353 | 0.631529 | 0 | 0 | 0.299687 | 3,517 | 140 | 133 | 25.121429 | 0.862769 | 0.412283 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
14f583d0fcbeab01afc167f0229da3fc20794fe7 | 10,353 | py | Python | shared/xml_classes/common_messages/__init__.py | EDF-Lab/EDF | 3ab2d9e1820dfb713bbd54c91ba72d7d32d998f9 | [
"MIT"
] | 16 | 2022-02-11T14:49:04.000Z | 2022-03-30T07:33:45.000Z | shared/xml_classes/common_messages/__init__.py | EDF-Lab/EDF | 3ab2d9e1820dfb713bbd54c91ba72d7d32d998f9 | [
"MIT"
] | 1 | 2022-02-16T15:23:50.000Z | 2022-02-21T15:30:21.000Z | shared/xml_classes/common_messages/__init__.py | EDF-Lab/EDF | 3ab2d9e1820dfb713bbd54c91ba72d7d32d998f9 | [
"MIT"
] | 1 | 2022-03-24T10:52:28.000Z | 2022-03-24T10:52:28.000Z | from shared.xml_classes.common_messages.v2_g_ci_common_messages import (
AbsolutePriceScheduleType,
AdditionalServiceListType,
AdditionalServiceType,
AuthorizationReq,
AuthorizationReqType,
AuthorizationRes,
AuthorizationResType,
AuthorizationSetupReq,
AuthorizationSetupReqType,
AuthorizationSetupRes,
AuthorizationSetupResType,
CertificateChainType,
CertificateInstallationReq,
CertificateInstallationReqType,
CertificateInstallationRes,
CertificateInstallationResType,
ChargingScheduleType,
ContractCertificateChainType,
DynamicEvpptcontrolModeType,
DynamicSereqControlModeType,
DynamicSeresControlModeType,
DynamicSmdtcontrolModeType,
EimAreqAuthorizationModeType,
EimAsresAuthorizationModeType,
EmaidlistType,
EvabsolutePriceScheduleType,
EvenergyOfferType,
EvpowerProfileEntryListType,
EvpowerProfileType,
EvpowerScheduleEntryListType,
EvpowerScheduleEntryType,
EvpowerScheduleType,
EvpriceRuleStackListType,
EvpriceRuleStackType,
EvpriceRuleType,
MeteringConfirmationReq,
MeteringConfirmationReqType,
MeteringConfirmationRes,
MeteringConfirmationResType,
OverstayRuleListType,
OverstayRuleType,
ParameterSetType,
ParameterType,
PnCAreqAuthorizationModeType,
PnCAsresAuthorizationModeType,
PowerDeliveryReq,
PowerDeliveryReqType,
PowerDeliveryRes,
PowerDeliveryResType,
PowerScheduleEntryListType,
PowerScheduleEntryType,
PowerScheduleType,
PriceLevelScheduleEntryListType,
PriceLevelScheduleEntryType,
PriceLevelScheduleType,
PriceRuleStackListType,
PriceRuleStackType,
PriceRuleType,
PriceScheduleType,
ScheduleExchangeReq,
ScheduleExchangeReqType,
ScheduleExchangeRes,
ScheduleExchangeResType,
ScheduleTupleType,
ScheduledEvpptcontrolModeType,
ScheduledSereqControlModeType,
ScheduledSeresControlModeType,
ScheduledSmdtcontrolModeType,
SelectedServiceListType,
SelectedServiceType,
ServiceDetailReq,
ServiceDetailReqType,
ServiceDetailRes,
ServiceDetailResType,
ServiceDiscoveryReq,
ServiceDiscoveryReqType,
ServiceDiscoveryRes,
ServiceDiscoveryResType,
ServiceIdlistType,
ServiceListType,
ServiceParameterListType,
ServiceSelectionReq,
ServiceSelectionReqType,
ServiceSelectionRes,
ServiceSelectionResType,
ServiceType,
SessionSetupReq,
SessionSetupReqType,
SessionSetupRes,
SessionSetupResType,
SessionStopReq,
SessionStopReqType,
SessionStopRes,
SessionStopResType,
SignedCertificateChainType,
SignedInstallationData,
SignedInstallationDataType,
SignedMeteringData,
SignedMeteringDataType,
SubCertificatesType,
SupportedProvidersListType,
TargetPositionType,
TaxRuleListType,
TaxRuleType,
VehicleCheckInReq,
VehicleCheckInReqType,
VehicleCheckInRes,
VehicleCheckInResType,
VehicleCheckOutReq,
VehicleCheckOutReqType,
VehicleCheckOutRes,
VehicleCheckOutResType,
AuthorizationType,
ChannelSelectionType,
ChargeProgressType,
ChargingSessionType,
EcdhCurveType,
EvCheckInStatusType,
EvCheckOutStatusType,
EvseCheckOutStatusType,
ParkingMethodType,
PowerToleranceAcceptanceType,
)
from shared.xml_classes.common_messages.v2_g_ci_common_types import (
ClreqControlMode,
ClreqControlModeType,
ClresControlMode,
ClresControlModeType,
ChargeLoopReqType,
ChargeLoopResType,
ChargeParameterDiscoveryReqType,
ChargeParameterDiscoveryResType,
DetailedCostType,
DetailedTaxType,
DisplayParametersType,
DynamicClreqControlModeType,
DynamicClresControlModeType,
EvsestatusType,
ListOfRootCertificateIdsType,
MessageHeaderType,
MeterInfoType,
RationalNumberType,
ReceiptType,
ScheduledClreqControlModeType,
ScheduledClresControlModeType,
V2GmessageType,
V2GrequestType,
V2GresponseType,
EvseNotificationType,
ProcessingType,
ResponseCodeType,
)
from shared.xml_classes.common_messages.xmldsig_core_schema import (
CanonicalizationMethod,
CanonicalizationMethodType,
DsakeyValue,
DsakeyValueType,
DigestMethod,
DigestMethodType,
DigestValue,
KeyInfo,
KeyInfoType,
KeyName,
KeyValue,
KeyValueType,
Manifest,
ManifestType,
MgmtData,
Object,
ObjectType,
Pgpdata,
PgpdataType,
RsakeyValue,
RsakeyValueType,
Reference,
ReferenceType,
RetrievalMethod,
RetrievalMethodType,
Spkidata,
SpkidataType,
Signature,
SignatureMethod,
SignatureMethodType,
SignatureProperties,
SignaturePropertiesType,
SignatureProperty,
SignaturePropertyType,
SignatureType,
SignatureValue,
SignatureValueType,
SignedInfo,
SignedInfoType,
Transform,
TransformType,
Transforms,
TransformsType,
X509Data,
X509DataType,
X509IssuerSerialType,
)
__all__ = [
"AbsolutePriceScheduleType",
"AdditionalServiceListType",
"AdditionalServiceType",
"AuthorizationReq",
"AuthorizationReqType",
"AuthorizationRes",
"AuthorizationResType",
"AuthorizationSetupReq",
"AuthorizationSetupReqType",
"AuthorizationSetupRes",
"AuthorizationSetupResType",
"CertificateChainType",
"CertificateInstallationReq",
"CertificateInstallationReqType",
"CertificateInstallationRes",
"CertificateInstallationResType",
"ChargingScheduleType",
"ContractCertificateChainType",
"DynamicEvpptcontrolModeType",
"DynamicSereqControlModeType",
"DynamicSeresControlModeType",
"DynamicSmdtcontrolModeType",
"EimAreqAuthorizationModeType",
"EimAsresAuthorizationModeType",
"EmaidlistType",
"EvabsolutePriceScheduleType",
"EvenergyOfferType",
"EvpowerProfileEntryListType",
"EvpowerProfileType",
"EvpowerScheduleEntryListType",
"EvpowerScheduleEntryType",
"EvpowerScheduleType",
"EvpriceRuleStackListType",
"EvpriceRuleStackType",
"EvpriceRuleType",
"MeteringConfirmationReq",
"MeteringConfirmationReqType",
"MeteringConfirmationRes",
"MeteringConfirmationResType",
"OverstayRuleListType",
"OverstayRuleType",
"ParameterSetType",
"ParameterType",
"PnCAreqAuthorizationModeType",
"PnCAsresAuthorizationModeType",
"PowerDeliveryReq",
"PowerDeliveryReqType",
"PowerDeliveryRes",
"PowerDeliveryResType",
"PowerScheduleEntryListType",
"PowerScheduleEntryType",
"PowerScheduleType",
"PriceLevelScheduleEntryListType",
"PriceLevelScheduleEntryType",
"PriceLevelScheduleType",
"PriceRuleStackListType",
"PriceRuleStackType",
"PriceRuleType",
"PriceScheduleType",
"ScheduleExchangeReq",
"ScheduleExchangeReqType",
"ScheduleExchangeRes",
"ScheduleExchangeResType",
"ScheduleTupleType",
"ScheduledEvpptcontrolModeType",
"ScheduledSereqControlModeType",
"ScheduledSeresControlModeType",
"ScheduledSmdtcontrolModeType",
"SelectedServiceListType",
"SelectedServiceType",
"ServiceDetailReq",
"ServiceDetailReqType",
"ServiceDetailRes",
"ServiceDetailResType",
"ServiceDiscoveryReq",
"ServiceDiscoveryReqType",
"ServiceDiscoveryRes",
"ServiceDiscoveryResType",
"ServiceIdlistType",
"ServiceListType",
"ServiceParameterListType",
"ServiceSelectionReq",
"ServiceSelectionReqType",
"ServiceSelectionRes",
"ServiceSelectionResType",
"ServiceType",
"SessionSetupReq",
"SessionSetupReqType",
"SessionSetupRes",
"SessionSetupResType",
"SessionStopReq",
"SessionStopReqType",
"SessionStopRes",
"SessionStopResType",
"SignedCertificateChainType",
"SignedInstallationData",
"SignedInstallationDataType",
"SignedMeteringData",
"SignedMeteringDataType",
"SubCertificatesType",
"SupportedProvidersListType",
"TargetPositionType",
"TaxRuleListType",
"TaxRuleType",
"VehicleCheckInReq",
"VehicleCheckInReqType",
"VehicleCheckInRes",
"VehicleCheckInResType",
"VehicleCheckOutReq",
"VehicleCheckOutReqType",
"VehicleCheckOutRes",
"VehicleCheckOutResType",
"AuthorizationType",
"ChannelSelectionType",
"ChargeProgressType",
"ChargingSessionType",
"EcdhCurveType",
"EvCheckInStatusType",
"EvCheckOutStatusType",
"EvseCheckOutStatusType",
"ParkingMethodType",
"PowerToleranceAcceptanceType",
"ClreqControlMode",
"ClreqControlModeType",
"ClresControlMode",
"ClresControlModeType",
"ChargeLoopReqType",
"ChargeLoopResType",
"ChargeParameterDiscoveryReqType",
"ChargeParameterDiscoveryResType",
"DetailedCostType",
"DetailedTaxType",
"DisplayParametersType",
"DynamicClreqControlModeType",
"DynamicClresControlModeType",
"EvsestatusType",
"ListOfRootCertificateIdsType",
"MessageHeaderType",
"MeterInfoType",
"RationalNumberType",
"ReceiptType",
"ScheduledClreqControlModeType",
"ScheduledClresControlModeType",
"V2GmessageType",
"V2GrequestType",
"V2GresponseType",
"EvseNotificationType",
"ProcessingType",
"ResponseCodeType",
"CanonicalizationMethod",
"CanonicalizationMethodType",
"DsakeyValue",
"DsakeyValueType",
"DigestMethod",
"DigestMethodType",
"DigestValue",
"KeyInfo",
"KeyInfoType",
"KeyName",
"KeyValue",
"KeyValueType",
"Manifest",
"ManifestType",
"MgmtData",
"Object",
"ObjectType",
"Pgpdata",
"PgpdataType",
"RsakeyValue",
"RsakeyValueType",
"Reference",
"ReferenceType",
"RetrievalMethod",
"RetrievalMethodType",
"Spkidata",
"SpkidataType",
"Signature",
"SignatureMethod",
"SignatureMethodType",
"SignatureProperties",
"SignaturePropertiesType",
"SignatureProperty",
"SignaturePropertyType",
"SignatureType",
"SignatureValue",
"SignatureValueType",
"SignedInfo",
"SignedInfoType",
"Transform",
"TransformType",
"Transforms",
"TransformsType",
"X509Data",
"X509DataType",
"X509IssuerSerialType",
]
| 25.8825 | 72 | 0.732638 | 425 | 10,353 | 17.8 | 0.494118 | 0.007403 | 0.005155 | 0.007931 | 0.993258 | 0.993258 | 0.988764 | 0.988764 | 0.988764 | 0.988764 | 0 | 0.003102 | 0.190283 | 10,353 | 399 | 73 | 25.947368 | 0.89932 | 0 | 0 | 0 | 0 | 0 | 0.356901 | 0.171641 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007538 | 0 | 0.007538 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
118e076fc11ef6b7375865b376286d5400297079 | 65,868 | py | Python | billforward/apis/analytics_api.py | billforward/bf-python | d2b812329ca3ed1fd94364d7f46f69ad74665596 | [
"Apache-2.0"
] | 2 | 2016-11-23T17:32:37.000Z | 2022-02-24T05:13:20.000Z | billforward/apis/analytics_api.py | billforward/bf-python | d2b812329ca3ed1fd94364d7f46f69ad74665596 | [
"Apache-2.0"
] | null | null | null | billforward/apis/analytics_api.py | billforward/bf-python | d2b812329ca3ed1fd94364d7f46f69ad74665596 | [
"Apache-2.0"
] | 1 | 2016-12-30T20:02:48.000Z | 2016-12-30T20:02:48.000Z | # coding: utf-8
"""
BillForward REST API
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AnalyticsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_account_debts(self, debts_per_account, **kwargs):
"""
Gets outstanding debts per account, within a date range.
{\"nickname\" : \"Get account debts\",\"response\" : \"getAccountDebts.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_debts(debts_per_account, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase debts_per_account: The payments-per-account object. (required)
:return: AccountPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_account_debts_with_http_info(debts_per_account, **kwargs)
else:
(data) = self.get_account_debts_with_http_info(debts_per_account, **kwargs)
return data
def get_account_debts_with_http_info(self, debts_per_account, **kwargs):
"""
Gets outstanding debts per account, within a date range.
{\"nickname\" : \"Get account debts\",\"response\" : \"getAccountDebts.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_debts_with_http_info(debts_per_account, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase debts_per_account: The payments-per-account object. (required)
:return: AccountPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['debts_per_account']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_debts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'debts_per_account' is set
if ('debts_per_account' not in params) or (params['debts_per_account'] is None):
raise ValueError("Missing the required parameter `debts_per_account` when calling `get_account_debts`")
resource_path = '/analytics/payments/accounts/outstanding'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'debts_per_account' in params:
body_params = params['debts_per_account']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountPaymentsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_account_ltv(self, account_id, end_datetime, **kwargs):
"""
Gets an account's life-time value, as of a given end date.
{\"nickname\":\"Get account life-time value\",\"response\":\"getAccountLTV.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_ltv(account_id, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The id of the account. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: AccountLTVResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_account_ltv_with_http_info(account_id, end_datetime, **kwargs)
else:
(data) = self.get_account_ltv_with_http_info(account_id, end_datetime, **kwargs)
return data
def get_account_ltv_with_http_info(self, account_id, end_datetime, **kwargs):
"""
Gets an account's life-time value, as of a given end date.
{\"nickname\":\"Get account life-time value\",\"response\":\"getAccountLTV.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_ltv_with_http_info(account_id, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The id of the account. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: AccountLTVResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_ltv" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_account_ltv`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_account_ltv`")
resource_path = '/analytics/account-ltv/{account-id}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['account-id'] = params['account_id']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountLTVResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_account_payments(self, payments_per_account, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per account\",\"response\" : \"getAccountPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_payments(payments_per_account, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_account: The payments-per-account object. (required)
:return: AccountPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_account_payments_with_http_info(payments_per_account, **kwargs)
else:
(data) = self.get_account_payments_with_http_info(payments_per_account, **kwargs)
return data
def get_account_payments_with_http_info(self, payments_per_account, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per account\",\"response\" : \"getAccountPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_payments_with_http_info(payments_per_account, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_account: The payments-per-account object. (required)
:return: AccountPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payments_per_account']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_payments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'payments_per_account' is set
if ('payments_per_account' not in params) or (params['payments_per_account'] is None):
raise ValueError("Missing the required parameter `payments_per_account` when calling `get_account_payments`")
resource_path = '/analytics/payments/accounts'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payments_per_account' in params:
body_params = params['payments_per_account']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountPaymentsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_billforward_managed_payments(self, start_datetime, end_datetime, **kwargs):
"""
Gets all payments managed by BillForward, within a date range.
{\"nickname\":\"Get managed payments\",\"response\":\"getManagedPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_billforward_managed_payments(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: BillforwardManagedPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_billforward_managed_payments_with_http_info(start_datetime, end_datetime, **kwargs)
else:
(data) = self.get_billforward_managed_payments_with_http_info(start_datetime, end_datetime, **kwargs)
return data
def get_billforward_managed_payments_with_http_info(self, start_datetime, end_datetime, **kwargs):
"""
Gets all payments managed by BillForward, within a date range.
{\"nickname\":\"Get managed payments\",\"response\":\"getManagedPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_billforward_managed_payments_with_http_info(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: BillforwardManagedPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_datetime', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_billforward_managed_payments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_datetime' is set
if ('start_datetime' not in params) or (params['start_datetime'] is None):
raise ValueError("Missing the required parameter `start_datetime` when calling `get_billforward_managed_payments`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_billforward_managed_payments`")
resource_path = '/analytics/billforward-managed-payments/{start-datetime}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'start_datetime' in params:
path_params['start-datetime'] = params['start_datetime']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BillforwardManagedPaymentsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_churn(self, start_datetime, end_datetime, **kwargs):
"""
Gets churn, within a date range.
{\"nickname\":\"Get churn\",\"response\":\"getChurn.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_churn(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassChurnResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_churn_with_http_info(start_datetime, end_datetime, **kwargs)
else:
(data) = self.get_churn_with_http_info(start_datetime, end_datetime, **kwargs)
return data
def get_churn_with_http_info(self, start_datetime, end_datetime, **kwargs):
"""
Gets churn, within a date range.
{\"nickname\":\"Get churn\",\"response\":\"getChurn.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_churn_with_http_info(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassChurnResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_datetime', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_churn" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_datetime' is set
if ('start_datetime' not in params) or (params['start_datetime'] is None):
raise ValueError("Missing the required parameter `start_datetime` when calling `get_churn`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_churn`")
resource_path = '/analytics/churn/{start-datetime}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'start_datetime' in params:
path_params['start-datetime'] = params['start_datetime']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CassChurnResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_debts(self, start_datetime, end_datetime, **kwargs):
"""
Gets debts within a date range.
{\"nickname\":\"Get debts\",\"response\":\"getDebts.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_debts(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: DebtsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_debts_with_http_info(start_datetime, end_datetime, **kwargs)
else:
(data) = self.get_debts_with_http_info(start_datetime, end_datetime, **kwargs)
return data
def get_debts_with_http_info(self, start_datetime, end_datetime, **kwargs):
"""
Gets debts within a date range.
{\"nickname\":\"Get debts\",\"response\":\"getDebts.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_debts_with_http_info(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: DebtsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_datetime', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_debts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_datetime' is set
if ('start_datetime' not in params) or (params['start_datetime'] is None):
raise ValueError("Missing the required parameter `start_datetime` when calling `get_debts`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_debts`")
resource_path = '/analytics/payments/outstanding/{start-datetime}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'start_datetime' in params:
path_params['start-datetime'] = params['start_datetime']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DebtsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_payments(self, start_datetime, end_datetime, **kwargs):
"""
Gets payments within a date range.
{\"nickname\":\"Get all payments\",\"response\":\"getPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_payments(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassPaymentResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_payments_with_http_info(start_datetime, end_datetime, **kwargs)
else:
(data) = self.get_payments_with_http_info(start_datetime, end_datetime, **kwargs)
return data
def get_payments_with_http_info(self, start_datetime, end_datetime, **kwargs):
"""
Gets payments within a date range.
{\"nickname\":\"Get all payments\",\"response\":\"getPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_payments_with_http_info(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassPaymentResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_datetime', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_payments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_datetime' is set
if ('start_datetime' not in params) or (params['start_datetime'] is None):
raise ValueError("Missing the required parameter `start_datetime` when calling `get_payments`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_payments`")
resource_path = '/analytics/payments/{start-datetime}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'start_datetime' in params:
path_params['start-datetime'] = params['start_datetime']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CassPaymentResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_product_payments(self, payments_per_product, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per product\",\"response\" : \"getProductPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_payments(payments_per_product, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_product: The payments-per-product object. (required)
:return: ProductPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_payments_with_http_info(payments_per_product, **kwargs)
else:
(data) = self.get_product_payments_with_http_info(payments_per_product, **kwargs)
return data
def get_product_payments_with_http_info(self, payments_per_product, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per product\",\"response\" : \"getProductPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_payments_with_http_info(payments_per_product, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_product: The payments-per-product object. (required)
:return: ProductPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payments_per_product']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_payments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'payments_per_product' is set
if ('payments_per_product' not in params) or (params['payments_per_product'] is None):
raise ValueError("Missing the required parameter `payments_per_product` when calling `get_product_payments`")
resource_path = '/analytics/payments-per-product'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payments_per_product' in params:
body_params = params['payments_per_product']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductPaymentsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_product_rate_plan_payments(self, payments_per_product_rate_plan, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per rate plan\",\"response\" : \"getRatePlanPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_rate_plan_payments(payments_per_product_rate_plan, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_product_rate_plan: The payments-per-product-rate-plan object. (required)
:return: ProductRatePlanPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_rate_plan_payments_with_http_info(payments_per_product_rate_plan, **kwargs)
else:
(data) = self.get_product_rate_plan_payments_with_http_info(payments_per_product_rate_plan, **kwargs)
return data
def get_product_rate_plan_payments_with_http_info(self, payments_per_product_rate_plan, **kwargs):
"""
Gets hourly payments per product, within a date range.
{\"nickname\" : \"Get payments per rate plan\",\"response\" : \"getRatePlanPayments.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_rate_plan_payments_with_http_info(payments_per_product_rate_plan, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BillingEntityBase payments_per_product_rate_plan: The payments-per-product-rate-plan object. (required)
:return: ProductRatePlanPaymentsResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payments_per_product_rate_plan']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_rate_plan_payments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'payments_per_product_rate_plan' is set
if ('payments_per_product_rate_plan' not in params) or (params['payments_per_product_rate_plan'] is None):
raise ValueError("Missing the required parameter `payments_per_product_rate_plan` when calling `get_product_rate_plan_payments`")
resource_path = '/analytics/payments/product-rate-plan'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payments_per_product_rate_plan' in params:
body_params = params['payments_per_product_rate_plan']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductRatePlanPaymentsResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_subscription_ltv(self, subscription_id, end_datetime, **kwargs):
"""
Gets a subscription's life-time value, as of a given end date.
{\"nickname\":\"Get sub life-time value\",\"response\":\"getSubscriptionLTV.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_subscription_ltv(subscription_id, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str subscription_id: The id of the subscription. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: SubscriptionLTVResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_subscription_ltv_with_http_info(subscription_id, end_datetime, **kwargs)
else:
(data) = self.get_subscription_ltv_with_http_info(subscription_id, end_datetime, **kwargs)
return data
def get_subscription_ltv_with_http_info(self, subscription_id, end_datetime, **kwargs):
"""
Gets a subscription's life-time value, as of a given end date.
{\"nickname\":\"Get sub life-time value\",\"response\":\"getSubscriptionLTV.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_subscription_ltv_with_http_info(subscription_id, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str subscription_id: The id of the subscription. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: SubscriptionLTVResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['subscription_id', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscription_ltv" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'subscription_id' is set
if ('subscription_id' not in params) or (params['subscription_id'] is None):
raise ValueError("Missing the required parameter `subscription_id` when calling `get_subscription_ltv`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_subscription_ltv`")
resource_path = '/analytics/subscription-ltv/{subscription-id}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'subscription_id' in params:
path_params['subscription-id'] = params['subscription_id']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubscriptionLTVResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_upgrades(self, start_datetime, end_datetime, **kwargs):
"""
Gets upgrades, within a date range.
{\"nickname\":\"Get upgrades\",\"response\":\"getUpgrades.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_upgrades(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassUpgradeResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_upgrades_with_http_info(start_datetime, end_datetime, **kwargs)
else:
(data) = self.get_upgrades_with_http_info(start_datetime, end_datetime, **kwargs)
return data
def get_upgrades_with_http_info(self, start_datetime, end_datetime, **kwargs):
"""
Gets upgrades, within a date range.
{\"nickname\":\"Get upgrades\",\"response\":\"getUpgrades.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_upgrades_with_http_info(start_datetime, end_datetime, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str start_datetime: The UTC DateTime specifying the start of the result period. (required)
:param str end_datetime: The UTC DateTime specifying the end of the result period. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first amendment to return.
:param int records: The maximum number of amendments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: The direction of any ordering, either ASC or DESC.
:return: CassUpgradeResultPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_datetime', 'end_datetime', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_upgrades" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_datetime' is set
if ('start_datetime' not in params) or (params['start_datetime'] is None):
raise ValueError("Missing the required parameter `start_datetime` when calling `get_upgrades`")
# verify the required parameter 'end_datetime' is set
if ('end_datetime' not in params) or (params['end_datetime'] is None):
raise ValueError("Missing the required parameter `end_datetime` when calling `get_upgrades`")
resource_path = '/analytics/upgrades/{start-datetime}/{end-datetime}'.replace('{format}', 'json')
path_params = {}
if 'start_datetime' in params:
path_params['start-datetime'] = params['start_datetime']
if 'end_datetime' in params:
path_params['end-datetime'] = params['end_datetime']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CassUpgradeResultPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 47.558123 | 141 | 0.606349 | 7,145 | 65,868 | 5.390763 | 0.036949 | 0.033985 | 0.022068 | 0.020562 | 0.957785 | 0.94309 | 0.932809 | 0.924631 | 0.915336 | 0.909624 | 0 | 0.00022 | 0.309847 | 65,868 | 1,384 | 142 | 47.592486 | 0.847071 | 0.394213 | 0 | 0.781011 | 1 | 0 | 0.210159 | 0.058305 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035222 | false | 0 | 0.01072 | 0 | 0.098009 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
11cb3ae5395c2a44ec3eb17eda9a30003b2a860b | 206 | py | Python | retweetcascade/__init__.py | guglielmocola/RetweetCascade | f96319d0107473715104acceb2ff0925d35dd9e3 | [
"MIT"
] | null | null | null | retweetcascade/__init__.py | guglielmocola/RetweetCascade | f96319d0107473715104acceb2ff0925d35dd9e3 | [
"MIT"
] | null | null | null | retweetcascade/__init__.py | guglielmocola/RetweetCascade | f96319d0107473715104acceb2ff0925d35dd9e3 | [
"MIT"
] | null | null | null | from retweetcascade.rt_cascade_interactions import rt_cascade_interactions
from retweetcascade.rt_cascade_friendships import rt_cascade_friendships
from retweetcascade.rt_cascade_info import rt_cascade_info | 68.666667 | 74 | 0.932039 | 27 | 206 | 6.666667 | 0.296296 | 0.3 | 0.333333 | 0.45 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053398 | 206 | 3 | 75 | 68.666667 | 0.923077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
ee96b004089d788400d39b90d6a05e3dfadf4dc1 | 47 | py | Python | tardis/io/tests/test_config_validator.py | saksham-kaushal/tardis | ad91bef1a4dc40b247dfc11951bb4e517a09545f | [
"BSD-3-Clause"
] | 176 | 2015-02-26T07:26:59.000Z | 2022-03-16T18:26:22.000Z | tardis/io/tests/test_config_validator.py | saksham-kaushal/tardis | ad91bef1a4dc40b247dfc11951bb4e517a09545f | [
"BSD-3-Clause"
] | 1,474 | 2015-02-12T13:02:16.000Z | 2022-03-31T09:05:54.000Z | tardis/io/tests/test_config_validator.py | saksham-kaushal/tardis | ad91bef1a4dc40b247dfc11951bb4e517a09545f | [
"BSD-3-Clause"
] | 434 | 2015-02-07T17:15:41.000Z | 2022-03-23T04:49:38.000Z | # TODO: Write tests for the new validator
pass
| 15.666667 | 41 | 0.765957 | 8 | 47 | 4.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191489 | 47 | 2 | 42 | 23.5 | 0.947368 | 0.829787 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 0 | 1 | 0 | true | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
eedefc4b93f92e38c8c265f1c6c61a84a85b4862 | 78 | py | Python | ttemplates/ano_utils/__init__.py | wroblewskipawel/pytorch-seg-tools | bbff85eb9665f09e10aa5205ad6f2d879eed26b7 | [
"MIT"
] | null | null | null | ttemplates/ano_utils/__init__.py | wroblewskipawel/pytorch-seg-tools | bbff85eb9665f09e10aa5205ad6f2d879eed26b7 | [
"MIT"
] | null | null | null | ttemplates/ano_utils/__init__.py | wroblewskipawel/pytorch-seg-tools | bbff85eb9665f09e10aa5205ad6f2d879eed26b7 | [
"MIT"
] | null | null | null | from . import ano_dataset
from . import ano_metrics
from . import ano_trainer
| 19.5 | 25 | 0.807692 | 12 | 78 | 5 | 0.5 | 0.5 | 0.65 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 78 | 3 | 26 | 26 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
01279bf5054e26c48fede356422f3feed7f829a2 | 47 | py | Python | SybilRanking/scraper/__init__.py | mikeitexpert/osn-sybilranking | 4f4fd65808d39f9e6d1a44ed5fe3b95f17e77aa3 | [
"MIT"
] | 3 | 2019-03-11T15:30:08.000Z | 2021-03-04T18:25:26.000Z | SybilRanking/scraper/__init__.py | mikeitexpert/osn-sybilranking | 4f4fd65808d39f9e6d1a44ed5fe3b95f17e77aa3 | [
"MIT"
] | null | null | null | SybilRanking/scraper/__init__.py | mikeitexpert/osn-sybilranking | 4f4fd65808d39f9e6d1a44ed5fe3b95f17e77aa3 | [
"MIT"
] | 3 | 2019-10-31T08:52:23.000Z | 2022-03-24T07:32:28.000Z | from .InstagramScraper import InstagramScraper
| 23.5 | 46 | 0.893617 | 4 | 47 | 10.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.976744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
012d76e35ec872f4697e8260bf4765f6f918a38b | 3,259 | py | Python | ursina/editor/scenes/untitled_scene[2,0].py | LyfeOnEdge/ursina | 04795ceb567eaef51cb36baa696da6646d1eb650 | [
"MIT"
] | 2 | 2018-01-27T14:25:22.000Z | 2018-05-17T20:06:04.000Z | ursina/editor/scenes/untitled_scene[2,0].py | flutterbuddy1/ursina | 07be59f749a3778c1ca2c0a624df984616feabb4 | [
"MIT"
] | 1 | 2022-02-11T18:37:31.000Z | 2022-02-11T18:37:31.000Z | ursina/editor/scenes/untitled_scene[2,0].py | flutterbuddy1/ursina | 07be59f749a3778c1ca2c0a624df984616feabb4 | [
"MIT"
] | null | null | null |
class Scene(Entity):
def __init__(self, **kwargs):
super().__init__(**kwargs)
Entity(parent=self, position=Vec3(0.0630372, -0.108902, 3.63031), scale=Vec3(1.12607, 0.782195, 1.73937), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(-1.18075, 0.0209864, 8.00681), scale=Vec3(26.2275, 26.2275, 26.2275), model='plane', texture='grass', color=Color(1.0, 0.9399999976158142, 0.9399999976158142, 1.0), )
Entity(parent=self, position=Vec3(-1.24483, 0, 1.68094), rotation=Vec3(0, 37.2974, -49.1139), model='cube', color=Color(1.0, 0.5, 0.0, 1.0), collider='box', )
Entity(parent=self, position=Vec3(1.40786, -0.107902, 3.69981), scale=Vec3(1.12607, 0.782195, 1.73937), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(2.76964, -0.107902, 3.69981), scale=Vec3(1.12607, 0.782195, 1.73937), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(4.1147, -0.107902, 3.69981), scale=Vec3(1.12607, 0.782195, 1.73937), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(4.1147, 1.30049, 3.69981), scale=Vec3(0.720138, 2.449, 1.11235), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(4.1147, 2.60157, 3.69981), scale=Vec3(0.882537, 0.411635, 1.36319), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(5.48133, -0.107902, 3.69981), scale=Vec3(1.12607, 0.782195, 1.73937), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(5.48133, 1.30049, 3.69981), scale=Vec3(0.720138, 2.449, 1.11235), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(5.48133, 2.60157, 3.69981), scale=Vec3(0.895863, 0.417851, 1.38378), model='cube', color=Color(0.7900000214576721, 0.5845999717712402, 0.48190000653266907, 1.0), collider='box', )
Entity(parent=self, position=Vec3(8.34962, -0.105901, 4.12458), rotation=Vec3(-16.2241, 42.4637, -86.0656), scale=Vec3(1.12607, 0.782196, 1.73937), model='cube', color=Color(0.7599999904632568, 0.6287733316421509, 0.4787999987602234, 1.0), collider='box', )
Entity(parent=self, position=Vec3(7.23614, 0.0134851, 4.98389), rotation=Vec3(-16.2241, 42.4637, -86.0656), scale=Vec3(0.720138, 2.449, 1.11235), model='cube', color=Color(0.7599999904632568, 0.6287733316421509, 0.4787999987602234, 1.0), collider='box', )
Entity(parent=self, position=Vec3(6.32048, 0.0726001, 5.91117), rotation=Vec3(-16.2241, 42.4637, -86.0656), scale=Vec3(1.12607, 0.782196, 1.73937), model='cube', color=Color(0.7599999904632568, 0.6287733316421509, 0.4787999987602234, 1.0), collider='box', )
| 171.526316 | 265 | 0.70359 | 494 | 3,259 | 4.625506 | 0.176113 | 0.014004 | 0.098031 | 0.147046 | 0.856018 | 0.832385 | 0.819694 | 0.800438 | 0.800438 | 0.800438 | 0 | 0.440985 | 0.103099 | 3,259 | 18 | 266 | 181.055556 | 0.340746 | 0 | 0 | 0 | 0 | 0 | 0.031001 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
019a4b5820d6b90d7eec7045433ed135e6e2e7da | 40 | py | Python | xfntr/command_line.py | zhul9311/XFNTR | 4f2e58775c6bb0df9a90e2854e7532f15f0e341a | [
"MIT"
] | null | null | null | xfntr/command_line.py | zhul9311/XFNTR | 4f2e58775c6bb0df9a90e2854e7532f15f0e341a | [
"MIT"
] | null | null | null | xfntr/command_line.py | zhul9311/XFNTR | 4f2e58775c6bb0df9a90e2854e7532f15f0e341a | [
"MIT"
] | null | null | null | import main
def run():
main.main()
| 8 | 15 | 0.6 | 6 | 40 | 4 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 40 | 4 | 16 | 10 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6dc119e86a1c92da542d4906b5f335be539533a3 | 40,425 | py | Python | swagger_client/api/api_v2___bots_api.py | Fates-List/fateslist.py-autogen | 0643434d9d0e71f781f99b2703a2ef52f49d8875 | [
"MIT"
] | null | null | null | swagger_client/api/api_v2___bots_api.py | Fates-List/fateslist.py-autogen | 0643434d9d0e71f781f99b2703a2ef52f49d8875 | [
"MIT"
] | null | null | null | swagger_client/api/api_v2___bots_api.py | Fates-List/fateslist.py-autogen | 0643434d9d0e71f781f99b2703a2ef52f49d8875 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Fates List
Current API: v2 beta 3 Default API: v2 API Docs: https://apidocs.fateslist.xyz Enum Reference: https://apidocs.fateslist.xyz/structures/enums.autogen # noqa: E501
OpenAPI spec version: 0.3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class APIV2BotsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def appeal_bot_api_v2_bots_bot_id_appeal_post(self, body, bot_id, **kwargs): # noqa: E501
"""Appeal Bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.appeal_bot_api_v2_bots_bot_id_appeal_post(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotAppeal body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.appeal_bot_api_v2_bots_bot_id_appeal_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
else:
(data) = self.appeal_bot_api_v2_bots_bot_id_appeal_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
return data
def appeal_bot_api_v2_bots_bot_id_appeal_post_with_http_info(self, body, bot_id, **kwargs): # noqa: E501
"""Appeal Bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.appeal_bot_api_v2_bots_bot_id_appeal_post_with_http_info(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotAppeal body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method appeal_bot_api_v2_bots_bot_id_appeal_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `appeal_bot_api_v2_bots_bot_id_appeal_post`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `appeal_bot_api_v2_bots_bot_id_appeal_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/appeal', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bot_exists_api_v2_bots_bot_id_head(self, bot_id, **kwargs): # noqa: E501
"""Bot Exists # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bot_exists_api_v2_bots_bot_id_head(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bot_exists_api_v2_bots_bot_id_head_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.bot_exists_api_v2_bots_bot_id_head_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def bot_exists_api_v2_bots_bot_id_head_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Bot Exists # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bot_exists_api_v2_bots_bot_id_head_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bot_exists_api_v2_bots_bot_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `bot_exists_api_v2_bots_bot_id_head`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bot_widget_api_v2_bots_bot_id_widget_get(self, bot_id, format, **kwargs): # noqa: E501
"""Bot Widget # noqa: E501
Returns a widget Unstable signifies whether an action is unstable or not. You will get a API error if this is the case and unstable is not set or the bot is not certified (only certified bots may use unstable endpoints) and the existence of the nyi key can be used to programatically detect this # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bot_widget_api_v2_bots_bot_id_widget_get(bot_id, format, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param WidgetFormat format: (required)
:param Bgcolor bgcolor:
:param Textcolor textcolor:
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bot_widget_api_v2_bots_bot_id_widget_get_with_http_info(bot_id, format, **kwargs) # noqa: E501
else:
(data) = self.bot_widget_api_v2_bots_bot_id_widget_get_with_http_info(bot_id, format, **kwargs) # noqa: E501
return data
def bot_widget_api_v2_bots_bot_id_widget_get_with_http_info(self, bot_id, format, **kwargs): # noqa: E501
"""Bot Widget # noqa: E501
Returns a widget Unstable signifies whether an action is unstable or not. You will get a API error if this is the case and unstable is not set or the bot is not certified (only certified bots may use unstable endpoints) and the existence of the nyi key can be used to programatically detect this # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bot_widget_api_v2_bots_bot_id_widget_get_with_http_info(bot_id, format, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param WidgetFormat format: (required)
:param Bgcolor bgcolor:
:param Textcolor textcolor:
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'format', 'bgcolor', 'textcolor'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bot_widget_api_v2_bots_bot_id_widget_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `bot_widget_api_v2_bots_bot_id_widget_get`") # noqa: E501
# verify the required parameter 'format' is set
if ('format' not in params or
params['format'] is None):
raise ValueError("Missing the required parameter `format` when calling `bot_widget_api_v2_bots_bot_id_widget_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'bgcolor' in params:
query_params.append(('bgcolor', params['bgcolor'])) # noqa: E501
if 'textcolor' in params:
query_params.append(('textcolor', params['textcolor'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/widget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def fetch_bot_api_v2_bots_bot_id_get(self, bot_id, **kwargs): # noqa: E501
"""Fetch Bot # noqa: E501
Fetches bot information given a bot ID. If not found, 404 will be returned. Setting compact to true (default) -> description, long_description, long_description_type, keep_banner_decor and css will be null Setting with_tags to false -> tags will be null Setting offline to true -> user will be null and no ownership info will be given. If the bot is no longer on discord, this endpoint will still return if offline is set to true # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_bot_api_v2_bots_bot_id_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param bool compact:
:param bool with_tags:
:param bool offline:
:return: Bot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.fetch_bot_api_v2_bots_bot_id_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.fetch_bot_api_v2_bots_bot_id_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def fetch_bot_api_v2_bots_bot_id_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Fetch Bot # noqa: E501
Fetches bot information given a bot ID. If not found, 404 will be returned. Setting compact to true (default) -> description, long_description, long_description_type, keep_banner_decor and css will be null Setting with_tags to false -> tags will be null Setting offline to true -> user will be null and no ownership info will be given. If the bot is no longer on discord, this endpoint will still return if offline is set to true # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_bot_api_v2_bots_bot_id_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param bool compact:
:param bool with_tags:
:param bool offline:
:return: Bot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'compact', 'with_tags', 'offline'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method fetch_bot_api_v2_bots_bot_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `fetch_bot_api_v2_bots_bot_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'compact' in params:
query_params.append(('compact', params['compact'])) # noqa: E501
if 'with_tags' in params:
query_params.append(('with_tags', params['with_tags'])) # noqa: E501
if 'offline' in params:
query_params.append(('offline', params['offline'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Bot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def fetch_random_bot_api_v2_bots_bot_id_random_get(self, bot_id, **kwargs): # noqa: E501
"""Fetch Random Bot # noqa: E501
Fetch a random bot. Bot ID should be the recursive/root bot 0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_random_bot_api_v2_bots_bot_id_random_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str lang:
:return: BotRandom
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.fetch_random_bot_api_v2_bots_bot_id_random_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.fetch_random_bot_api_v2_bots_bot_id_random_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def fetch_random_bot_api_v2_bots_bot_id_random_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Fetch Random Bot # noqa: E501
Fetch a random bot. Bot ID should be the recursive/root bot 0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_random_bot_api_v2_bots_bot_id_random_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str lang:
:return: BotRandom
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'lang'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method fetch_random_bot_api_v2_bots_bot_id_random_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `fetch_random_bot_api_v2_bots_bot_id_random_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'lang' in params:
query_params.append(('lang', params['lang'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/random', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotRandom', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_bot_token_api_v2_bots_bot_id_token_get(self, bot_id, user_id, **kwargs): # noqa: E501
"""Get Bot Token # noqa: E501
Gets a bot token given a user token. 401 = Invalid API Token, 403 = Forbidden (not owner of bot or staff) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_token_api_v2_bots_bot_id_token_get(bot_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_bot_token_api_v2_bots_bot_id_token_get_with_http_info(bot_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_bot_token_api_v2_bots_bot_id_token_get_with_http_info(bot_id, user_id, **kwargs) # noqa: E501
return data
def get_bot_token_api_v2_bots_bot_id_token_get_with_http_info(self, bot_id, user_id, **kwargs): # noqa: E501
"""Get Bot Token # noqa: E501
Gets a bot token given a user token. 401 = Invalid API Token, 403 = Forbidden (not owner of bot or staff) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_token_api_v2_bots_bot_id_token_get_with_http_info(bot_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_bot_token_api_v2_bots_bot_id_token_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_bot_token_api_v2_bots_bot_id_token_get`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_bot_token_api_v2_bots_bot_id_token_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('user_id', params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/token', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_raw_bot_api_v2_bots_bot_id_raw_get(self, bot_id, **kwargs): # noqa: E501
"""Get Raw Bot # noqa: E501
Gets the raw given to the template with a few differences (bot_id being string and not int and passing auth manually to the function (coming soon) as the API aims to be as stateless as possible) Note that you likely want the Get Bot API and not this in most cases This API is prone to change as render_bot will keep changing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_raw_bot_api_v2_bots_bot_id_raw_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_raw_bot_api_v2_bots_bot_id_raw_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.get_raw_bot_api_v2_bots_bot_id_raw_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def get_raw_bot_api_v2_bots_bot_id_raw_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Get Raw Bot # noqa: E501
Gets the raw given to the template with a few differences (bot_id being string and not int and passing auth manually to the function (coming soon) as the API aims to be as stateless as possible) Note that you likely want the Get Bot API and not this in most cases This API is prone to change as render_bot will keep changing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_raw_bot_api_v2_bots_bot_id_raw_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_raw_bot_api_v2_bots_bot_id_raw_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_raw_bot_api_v2_bots_bot_id_raw_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/raw', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def regenerate_bot_token_api_v2_bots_bot_id_token_patch(self, bot_id, **kwargs): # noqa: E501
"""Regenerate Bot Token # noqa: E501
Regenerates the Bot token **Bot Token**: You can get this by clicking your bot and clicking edit and clicking Show (under API Token section) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_bot_token_api_v2_bots_bot_id_token_patch(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.regenerate_bot_token_api_v2_bots_bot_id_token_patch_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.regenerate_bot_token_api_v2_bots_bot_id_token_patch_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def regenerate_bot_token_api_v2_bots_bot_id_token_patch_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Regenerate Bot Token # noqa: E501
Regenerates the Bot token **Bot Token**: You can get this by clicking your bot and clicking edit and clicking Show (under API Token section) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_bot_token_api_v2_bots_bot_id_token_patch_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method regenerate_bot_token_api_v2_bots_bot_id_token_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `regenerate_bot_token_api_v2_bots_bot_id_token_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/token', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_bot_stats_api_v2_bots_bot_id_stats_post(self, body, bot_id, **kwargs): # noqa: E501
"""Set Bot Stats # noqa: E501
This endpoint allows you to set the guild + shard counts for your bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_bot_stats_api_v2_bots_bot_id_stats_post(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotStats body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_bot_stats_api_v2_bots_bot_id_stats_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
else:
(data) = self.set_bot_stats_api_v2_bots_bot_id_stats_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
return data
def set_bot_stats_api_v2_bots_bot_id_stats_post_with_http_info(self, body, bot_id, **kwargs): # noqa: E501
"""Set Bot Stats # noqa: E501
This endpoint allows you to set the guild + shard counts for your bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_bot_stats_api_v2_bots_bot_id_stats_post_with_http_info(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotStats body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_bot_stats_api_v2_bots_bot_id_stats_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_bot_stats_api_v2_bots_bot_id_stats_post`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `set_bot_stats_api_v2_bots_bot_id_stats_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/stats', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.597471 | 454 | 0.624391 | 5,218 | 40,425 | 4.52568 | 0.050211 | 0.049757 | 0.032395 | 0.043193 | 0.963159 | 0.953462 | 0.952403 | 0.944908 | 0.938641 | 0.932331 | 0 | 0.018626 | 0.28945 | 40,425 | 948 | 455 | 42.642405 | 0.803509 | 0.347631 | 0 | 0.781676 | 0 | 0 | 0.196881 | 0.071906 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.007797 | 0 | 0.099415 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0996fa6b9039deb6c05318e40ef3401d500c22ac | 202 | py | Python | pypro/modules/tests/test_facade.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | 1 | 2020-06-30T01:30:31.000Z | 2020-06-30T01:30:31.000Z | pypro/modules/tests/test_facade.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | 102 | 2020-06-30T01:03:27.000Z | 2021-09-22T19:26:44.000Z | pypro/modules/tests/test_facade.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | null | null | null | from pypro.modules.facades import facade_module
def test_modules_sorted_by_order(modules):
assert list(sorted(modules, key=lambda module: module.order)) == facade_module.modules_sorted_by_order()
| 33.666667 | 108 | 0.816832 | 29 | 202 | 5.37931 | 0.551724 | 0.153846 | 0.192308 | 0.25641 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094059 | 202 | 5 | 109 | 40.4 | 0.852459 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0997e0fb176e20d1c2da231b55b2ed264aff66bc | 39 | py | Python | OMASS4/Opt_SNR/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | OMASS4/Opt_SNR/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | OMASS4/Opt_SNR/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | from .optimize_snr import Optimize_SNR
| 19.5 | 38 | 0.871795 | 6 | 39 | 5.333333 | 0.666667 | 0.6875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102564 | 39 | 1 | 39 | 39 | 0.914286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
09b51bbd9bd396f20b31714621c90a584072bd2a | 20,552 | py | Python | tests/unit/controllers/test_clone.py | senstb/aws-elastic-beanstalk-cli | ef27ae50e8be34ccbe29bc6dc421323bddc3f485 | [
"Apache-2.0"
] | 110 | 2020-01-15T22:58:46.000Z | 2022-03-27T20:47:33.000Z | tests/unit/controllers/test_clone.py | senstb/aws-elastic-beanstalk-cli | ef27ae50e8be34ccbe29bc6dc421323bddc3f485 | [
"Apache-2.0"
] | 89 | 2020-01-15T23:18:34.000Z | 2022-03-31T21:56:05.000Z | tests/unit/controllers/test_clone.py | senstb/aws-elastic-beanstalk-cli | ef27ae50e8be34ccbe29bc6dc421323bddc3f485 | [
"Apache-2.0"
] | 50 | 2020-01-15T22:58:53.000Z | 2022-02-11T17:39:28.000Z | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
import mock
import unittest
from ebcli.controllers import clone
from ebcli.core import fileoperations
from ebcli.core.ebcore import EB
from ebcli.objects.environment import Environment
from ebcli.objects.platform import PlatformVersion
from .. import mock_responses
def environment_from_mock_responses(environment_name):
all_available_environments = Environment.json_to_environment_objects_array(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments']
)
return [
environment for environment in all_available_environments if environment.name == environment_name
][0]
class TestClone(unittest.TestCase):
platform = PlatformVersion(
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5'
)
def setUp(self):
self.root_dir = os.getcwd()
if not os.path.exists('testDir'):
os.mkdir('testDir')
os.chdir('testDir')
fileoperations.create_config_file(
'my-application',
'us-west-2',
self.platform.name
)
def tearDown(self):
os.chdir(self.root_dir)
shutil.rmtree('testDir')
class TestErrorConditions(TestClone):
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
def test_clone__cname_specified_for_worker_tier__raises_exception(
self,
get_env_name_mock,
get_environment_mock
):
get_env_name_mock.return_value = 'environment-4'
get_environment_mock.return_value = environment_from_mock_responses('environment-4')
app = EB(argv=['clone', '--cname', 'some-cname'])
app.setup()
with self.assertRaises(clone.InvalidOptionsError) as context_manager:
app.run()
self.assertEqual(
'Worker tiers do not support a CNAME.',
str(context_manager.exception)
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
def test_clone__cname_already_taken(
self,
is_cname_available_mock,
get_env_name_mock,
get_environment_mock
):
is_cname_available_mock.return_value = False
get_env_name_mock.return_value = 'environment-1'
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
app = EB(argv=['clone', '--cname', 'in-use-cname'])
app.setup()
with self.assertRaises(clone.AlreadyExistsError) as context_manager:
app.run()
self.assertEqual(
'The CNAME prefix in-use-cname is already in use.',
str(context_manager.exception)
)
class TestCloneInteractive(TestClone):
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment_names')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.io.prompt_for_environment_name')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
@mock.patch('ebcli.controllers.clone.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
def test_clone__clone_name_not_specified_for_webserver_tier__prompts_customer_for_clone_name(
self,
get_app_name_mock,
get_env_name_mock,
clone_environment_request_mock,
find_solution_stack_from_string_mock,
make_cloned_env_mock,
prompt_for_environment_name_mock,
is_cname_available_mock,
get_environment_names_mock,
get_environment_mock
):
is_cname_available_mock.return_value = True
find_solution_stack_from_string_mock.return_value = self.platform
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
prompt_for_environment_name_mock.return_value = 'environment-1-clone'
get_environment_names_mock.return_value = Environment.json_to_environment_objects_array(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments']
)
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=['clone', '--cname', 'some-cname'])
app.setup()
app.run()
prompt_for_environment_name_mock.assert_called_once_with(
default_name='my-application-clone',
prompt_text='Enter name for Environment Clone'
)
clone_environment_request_mock.assert_called_once_with(
app_name='environment-1',
cname='some-cname',
env_name='environment-1-clone',
original_name='my-application',
platform=self.platform,
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment_names')
@mock.patch('ebcli.controllers.clone.io.prompt_for_environment_name')
@mock.patch('ebcli.controllers.clone.get_cname_from_customer')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.solution_stack_ops.find_solution_stack_from_string')
def test_clone__neither_clone_name_nor_cname_provided__customer_is_prompted_for_both(
self,
find_solution_stack_from_string_mock,
is_cname_available_mock,
get_app_name_mock,
get_env_name_mock,
make_cloned_env_mock,
clone_environment_request_mock,
get_cname_from_customer,
prompt_for_environment_name_mock,
get_environment_names_mock,
get_environment_mock
):
is_cname_available_mock.return_value = True
find_solution_stack_from_string_mock.return_value = self.platform
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
get_cname_from_customer.return_value = 'my-cname'
prompt_for_environment_name_mock.return_value = 'environment-1-clone'
get_environment_names_mock.return_value = Environment.json_to_environment_objects_array(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments']
)
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=['clone'])
app.setup()
app.run()
prompt_for_environment_name_mock.assert_called_once_with(
default_name='my-application-clone',
prompt_text='Enter name for Environment Clone'
)
clone_environment_request_mock.assert_called_once_with(
app_name='environment-1',
cname='my-cname',
env_name='environment-1-clone',
original_name='my-application',
platform=self.platform,
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment_names')
@mock.patch('ebcli.controllers.clone.io.prompt_for_environment_name')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.controllers.clone.utils.prompt_for_item_in_list')
def test_clone__prompt_for_choice_between_current_and_latest_platform_arn_in_interactive_mode(
self,
prompt_for_item_in_list_mock,
find_solution_stack_from_string_mock,
is_cname_available_mock,
get_app_name_mock,
get_env_name_mock,
make_cloned_env_mock,
clone_environment_request_mock,
prompt_for_environment_name_mock,
get_environment_names_mock,
get_environment_mock
):
is_cname_available_mock.return_value = True
find_solution_stack_from_string_mock.return_value = PlatformVersion(
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.2 running on 64bit Amazon Linux/2.6.5'
)
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
prompt_for_environment_name_mock.return_value = 'environment-1-clone'
get_environment_names_mock.return_value = Environment.json_to_environment_objects_array(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments']
)
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
prompt_for_item_in_list_mock.return_value = 'Latest (arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.2 running on 64bit Amazon Linux/2.6.5)'
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=['clone', '--cname', 'some-cname'])
app.setup()
app.run()
prompt_for_environment_name_mock.assert_called_once_with(
default_name='my-application-clone',
prompt_text='Enter name for Environment Clone'
)
prompt_for_item_in_list_mock.assert_called_once_with(
[
'Latest (arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.2 running on 64bit Amazon Linux/2.6.5)',
'Same (arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5)'
]
)
clone_environment_request_mock.assert_called_once_with(
app_name='environment-1',
cname='some-cname',
env_name='environment-1-clone',
original_name='my-application',
platform=PlatformVersion(
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.2 running on 64bit Amazon Linux/2.6.5'
),
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
class TestCloneNonInteractive(TestClone):
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
def test_clone__clone_name_provided_by_customer__exact_platform_version_as_original_requested(
self,
make_cloned_env_mock,
clone_environment_request_mock,
is_cname_available_mock,
get_env_name_mock,
get_app_name_mock,
get_environment_mock
):
is_cname_available_mock.return_value = True
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=[
'clone',
'--cname', 'available-cname',
'--clone_name', 'environment-1-clone',
'--exact'
])
app.setup()
app.run()
clone_environment_request_mock.assert_called_once_with(
app_name='my-application',
cname='available-cname',
env_name='environment-1-clone',
original_name='environment-1',
platform=None,
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
def test_clone__clone_name_provided_by_customer__exact_platform_version_as_original_requested__cname_not_provided(
self,
make_cloned_env_mock,
clone_environment_request_mock,
get_env_name_mock,
get_app_name_mock,
get_environment_mock
):
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=[
'clone',
'--clone_name', 'environment-1-clone',
'--exact'
])
app.setup()
app.run()
clone_environment_request_mock.assert_called_once_with(
app_name='my-application',
cname=None,
env_name='environment-1-clone',
original_name='environment-1',
platform=None,
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
@mock.patch('ebcli.controllers.clone.io.log_warning')
def test_clone__clone_name_provided_by_customer__exact_argument_specified__old_and_new_platforms_match(
self,
log_warning_mock,
make_cloned_env_mock,
clone_environment_request_mock,
find_solution_stack_from_string_mock,
is_cname_available_mock,
get_env_name_mock,
get_app_name_mock,
get_environment_mock
):
find_solution_stack_from_string_mock.return_value = self.platform
is_cname_available_mock.return_value = True
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=[
'clone',
'--cname', 'available-cname',
'--clone_name', 'environment-1-clone'
])
app.setup()
app.run()
log_warning_mock.assert_not_called()
clone_environment_request_mock.assert_called_once_with(
app_name='my-application',
cname='available-cname',
env_name='environment-1-clone',
original_name='environment-1',
platform=self.platform,
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.get_environment')
@mock.patch('ebcli.controllers.clone.CloneController.get_app_name')
@mock.patch('ebcli.controllers.clone.CloneController.get_env_name')
@mock.patch('ebcli.controllers.clone.elasticbeanstalk.is_cname_available')
@mock.patch('ebcli.controllers.clone.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.controllers.clone.CloneEnvironmentRequest')
@mock.patch('ebcli.controllers.clone.cloneops.make_cloned_env')
@mock.patch('ebcli.controllers.clone.io.log_warning')
def test_clone__clone_name_provided_by_customer__exact_argument_not_specified__old_and_new_platforms_match(
self,
log_warning_mock,
make_cloned_env_mock,
clone_environment_request_mock,
find_solution_stack_from_string_mock,
is_cname_available_mock,
get_env_name_mock,
get_app_name_mock,
get_environment_mock
):
find_solution_stack_from_string_mock.return_value = 'some dummy platform'
is_cname_available_mock.return_value = True
get_app_name_mock.return_value = 'my-application'
get_env_name_mock.return_value = 'environment-1'
get_environment_mock.return_value = environment_from_mock_responses('environment-1')
clone_environment_request_mock.return_value = mock.MagicMock()
clone_environment_request = clone_environment_request_mock.return_value
app = EB(argv=[
'clone',
'--cname', 'available-cname',
'--clone_name', 'environment-1-clone'
])
app.setup()
app.run()
log_warning_mock.assert_called_once_with(
'Launching environment clone on most recent platform version. Override this behavior by using the "--exact" option.'
)
clone_environment_request_mock.assert_called_once_with(
app_name='my-application',
cname='available-cname',
env_name='environment-1-clone',
original_name='environment-1',
platform='some dummy platform',
scale=None,
tags=[]
)
make_cloned_env_mock.assert_called_once_with(
clone_environment_request,
nohang=False,
timeout=None
)
| 42.288066 | 152 | 0.690687 | 2,377 | 20,552 | 5.580143 | 0.091292 | 0.074789 | 0.064385 | 0.114973 | 0.86437 | 0.854343 | 0.845823 | 0.830368 | 0.828559 | 0.817024 | 0 | 0.006067 | 0.222022 | 20,552 | 485 | 153 | 42.375258 | 0.823504 | 0.02608 | 0 | 0.771562 | 0 | 0.013986 | 0.273359 | 0.179923 | 0 | 0 | 0 | 0 | 0.055944 | 1 | 0.027972 | false | 0 | 0.02331 | 0 | 0.065268 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1110cebc95949cfc6c68414348a276198f6a7d52 | 10,858 | py | Python | src/envs/smart_man_sim/scenes.py | jh3ex/cirp | 9484de6f7b31e2d81da5efc999649f60a5f97bfe | [
"Apache-1.1"
] | null | null | null | src/envs/smart_man_sim/scenes.py | jh3ex/cirp | 9484de6f7b31e2d81da5efc999649f60a5f97bfe | [
"Apache-1.1"
] | null | null | null | src/envs/smart_man_sim/scenes.py | jh3ex/cirp | 9484de6f7b31e2d81da5efc999649f60a5f97bfe | [
"Apache-1.1"
] | null | null | null | import numpy as np
"""
buffer_size: 48
limit: 48
buffer_size: 32
limit: 48
buffer_size: 32
limit: 128
"""
map_param_registry = {}
def get_map_params(map_name):
return map_param_registry[map_name]
map_param_registry = {226:{'cells': [0, 1],
'machines': [0, 1, 2, 3, 4, 5],
'actions': [1000, 0], #action 0, 1, 2
'costs': [100, 50, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 6,
'n_cells': 2,
'n_actions': 2,
'limit': 128,
'sale_price':.5,
'continuous_trans': False,
'transitions': [np.array([[[.7, .29, .009, .001],
[0, .9, .09, .01],
[0, 0, .6, .4],
[0, 0, 0, 1]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]]),
np.array([[[.65, .34, .009, .001],
[0, .9, .099, .001],
[0, 0, .55, .45],
[0, 0, 0, 1]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]])]},
26:{'cells': [0, 1],
'machines': [0, 1, 2, 3, 4, 5],
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 80, 50, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 6,
'n_cells': 2,
'n_actions': 3,
'limit': 128,
'sale_price':.5,
'continuous_trans': False,
'transitions': [np.array([[[.7, .29, .009, .001],
[0, .95, .049, .001],
[0, 0, .6, .4],
[0, 0, 0, 1]],
[[.8, .19, .009, .001],
[0, 0.98, .019, .001],
[0, 0, 0.8, .2],
[0, 0, 0, 0]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]]),
np.array([[[.65, .34, .009, .001],
[0, .9, .099, .001],
[0, 0, .55, .45],
[0, 0, 0, 1]],
[[.75, .24, .009, .001],
[0, 0.93, .069, .001],
[0, 0, 0.75, .25],
[0, 0, 0, 0]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]])]},
261:{'cells': [0, 1],
'machines': [0, 1, 2, 3, 4, 5],
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 40, 65, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 6,
'n_cells': 2,
'n_actions': 3,
'limit': 48,
'sale_price':.5,
'continuous_trans': True,
'dist': 'gamma',
'first_params': [4, 12, 4, 11],
'second_params':[1, 1, 1, 0.5],
'lower_bounds':[1, 1, 1, 9]},
262:{'cells': [0, 1],
'machines': [0, 1, 2, 3, 4, 5],
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 40, 65, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 6,
'n_cells': 2,
'n_actions': 3,
'limit': 48,
'sale_price':.5,
'continuous_trans': True,
'dist': 'gamma',
'first_params': [8, 24, 8, 11],
'second_params':[.5, .5, .5, 0.5],
'lower_bounds':[1, 1, 1, 9]},
263:{'cells': [0, 1],
'machines': [0, 1, 2, 3, 4, 5],
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 40, 65, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 6,
'n_cells': 2,
'n_actions': 3,
'limit': 48,
'sale_price':.5,
'continuous_trans': True,
'dist': 'static',
'first_params': [4, 12, 4, 11],
'second_params':[1, 1, 1, 0.5],
'lower_bounds':[1, 1, 1, 9]},
361:{'cells': [0, 1, 2],
'machines': [0, 1, 2, 3, 4, 5, 6, 7, 8],
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 40, 65, 2000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'n_agents': 9,
'n_cells': 3,
'n_actions': 3,
'limit': 48,
'sale_price':.5,
'continuous_trans': True,
'dist': 'gamma',
'first_params': [4, 2, 4, 11],
'second_params':[1, 6, 1, 0.5],
'lower_bounds':[1, 1, 1, 9]},
39:{'cells': list(range(3)),
'machines': list(range(9)),
'actions': [1000, 0, 0], #action 0, 1, 2
'costs': [100, 50, 50, 1000], #cost of running, cost of not working, cost of maintaining, cost of system breaking down}
'limit': 48,
'n_agents': 9,
'n_actions': 3,
'n_cells': 3,
'sale_price':.9,
'transitions': [np.array([[[.7, .29, .009, .001],
[0, .95, .049, .001],
[0, 0, .6, .4],
[0, 0, 0, 1]],
[[.8, .19, .009, .001],
[0, 0.98, .019, .001],
[0, 0, 0.8, .2],
[0, 0, 0, 0]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]]),
np.array([[[.7, .29, .009, .001],
[0, .95, .049, .001],
[0, 0, .6, .4],
[0, 0, 0, 1]],
[[.8, .19, .009, .001],
[0, 0.98, .019, .001],
[0, 0, 0.8, .2],
[0, 0, 0, 0]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]]),
np.array([[[.65, .34, .009, .001],
[0, .9, .099, .001],
[0, 0, .55, .45],
[0, 0, 0, 1]],
[[.75, .24, .009, .001],
[0, 0.93, .069, .001],
[0, 0, 0.75, .25],
[0, 0, 0, 0]],
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0]]])]}}
if __name__ == '__main__':
print(get_map_params(26))
| 56.848168 | 147 | 0.216522 | 849 | 10,858 | 2.687868 | 0.116608 | 0.099036 | 0.065732 | 0.057844 | 0.869851 | 0.86284 | 0.843996 | 0.837862 | 0.820333 | 0.812445 | 0 | 0.210311 | 0.662369 | 10,858 | 190 | 148 | 57.147368 | 0.412166 | 0.065758 | 0 | 0.826347 | 0 | 0 | 0.076678 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005988 | false | 0 | 0.005988 | 0.005988 | 0.017964 | 0.005988 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
1115f3e039c83b86b02c632b85a29ab940850529 | 3,774 | py | Python | bugs/issue-201-object-files/test.py | xbabka01/retdec-regression-tests | 1ac40cca5165740364e6f7fb72b20820eac9bc7c | [
"MIT"
] | 8 | 2017-12-14T14:25:17.000Z | 2019-03-09T03:29:12.000Z | bugs/issue-201-object-files/test.py | xbabka01/retdec-regression-tests | 1ac40cca5165740364e6f7fb72b20820eac9bc7c | [
"MIT"
] | 10 | 2019-06-14T09:12:55.000Z | 2021-10-01T12:15:43.000Z | bugs/issue-201-object-files/test.py | xbabka01/retdec-regression-tests | 1ac40cca5165740364e6f7fb72b20820eac9bc7c | [
"MIT"
] | 8 | 2019-05-10T14:59:48.000Z | 2022-03-07T16:34:23.000Z | from regression_tests import *
class Test_arm_elf(Test):
settings = TestSettings(
input='hello-arm-elf.o'
)
def test_c(self):
assert self.out_c.has_string_literal('foo')
assert self.out_c.has_string_literal('bar')
assert self.out_c.has_string_literal('Hello World!')
assert self.out_c.has_string_literal('%d %d\\n')
foo = self.out_c.funcs['foo']
assert foo.calls('puts')
#assert foo.calls('sqrt')
bar = self.out_c.funcs['bar']
assert bar.calls('puts')
main = self.out_c.funcs['main']
assert main.calls('puts')
assert main.calls('foo')
assert main.calls('bar')
assert main.calls('printf')
def test_dsm(self):
assert self.out_dsm.contains('; function: foo at 0x0 -- 0x60')
assert self.out_dsm.contains('; function: bar at 0x64 -- 0x9c')
assert self.out_dsm.contains('; function: main at 0xa0 -- 0xe4')
class Test_mips_elf(Test):
settings = TestSettings(
input='hello-mips-elf.o'
)
def test_c(self):
assert self.out_c.has_string_literal('foo')
assert self.out_c.has_string_literal('bar')
assert self.out_c.has_string_literal('Hello World!')
assert self.out_c.has_string_literal('%d %d\\n')
foo = self.out_c.funcs['foo']
assert foo.calls('puts')
assert foo.calls('sqrt')
bar = self.out_c.funcs['bar']
assert bar.calls('puts')
main = self.out_c.funcs['main']
assert main.calls('puts')
assert main.calls('foo')
assert main.calls('bar')
assert main.calls('printf')
def test_dsm(self):
assert self.out_dsm.contains('; function: foo at 0x0 -- 0x7c')
assert self.out_dsm.contains('; function: bar at 0x7c -- 0xc8')
assert self.out_dsm.contains('; function: main at 0xc8 -- 0x140')
class Test_ppc_elf(Test):
settings = TestSettings(
input='hello-ppc-elf.o'
)
def test_c(self):
assert self.out_c.has_string_literal('foo')
#assert self.out_c.has_string_literal('bar')
assert self.out_c.has_string_literal('Hello World!')
assert self.out_c.has_string_literal('%d %d\\n')
foo = self.out_c.funcs['foo']
assert foo.calls('puts')
#assert foo.calls('sqrt')
bar = self.out_c.funcs['bar']
assert bar.calls('puts')
main = self.out_c.funcs['main']
assert main.calls('puts')
assert main.calls('foo')
assert main.calls('bar')
assert main.calls('printf')
def test_dsm(self):
assert self.out_dsm.contains('; function: foo at 0x0 -- 0x88')
assert self.out_dsm.contains('; function: bar at 0x88 -- 0xdc')
assert self.out_dsm.contains('; function: main at 0xdc -- 0x160')
class Test_x86_elf(Test):
settings = TestSettings(
input='hello-x86-elf.o'
)
def test_c(self):
assert self.out_c.has_string_literal('foo')
assert self.out_c.has_string_literal('bar')
assert self.out_c.has_string_literal('Hello World!')
assert self.out_c.has_string_literal('%d %d\\n')
foo = self.out_c.funcs['foo']
assert foo.calls('puts')
assert foo.calls('sqrt')
bar = self.out_c.funcs['bar']
assert bar.calls('puts')
main = self.out_c.funcs['main']
assert main.calls('puts')
assert main.calls('foo')
assert main.calls('bar')
assert main.calls('printf')
def test_dsm(self):
assert self.out_dsm.contains('; function: foo at 0x0 -- 0x28')
assert self.out_dsm.contains('; function: bar at 0x28 -- 0x42')
assert self.out_dsm.contains('; function: main at 0x42 -- 0x90')
| 31.983051 | 73 | 0.608903 | 528 | 3,774 | 4.183712 | 0.102273 | 0.126754 | 0.16478 | 0.101403 | 0.923495 | 0.923495 | 0.856496 | 0.856496 | 0.720688 | 0.720688 | 0 | 0.02223 | 0.249073 | 3,774 | 117 | 74 | 32.25641 | 0.757234 | 0.024112 | 0 | 0.722222 | 0 | 0 | 0.184783 | 0 | 0 | 0 | 0.025543 | 0 | 0.588889 | 1 | 0.088889 | false | 0 | 0.011111 | 0 | 0.188889 | 0.044444 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1138cf9e69ffaf02e5a10933a33ce08efe5e7aa3 | 56,093 | py | Python | demo/migrations/0001_initial.py | marceloboth/wagtail-cms | 610d74f18782fa05983952051c795c643db54cf9 | [
"BSD-3-Clause"
] | 1 | 2015-08-06T15:00:59.000Z | 2015-08-06T15:00:59.000Z | demo/migrations/0001_initial.py | marceloboth/wagtail-cms | 610d74f18782fa05983952051c795c643db54cf9 | [
"BSD-3-Clause"
] | null | null | null | demo/migrations/0001_initial.py | marceloboth/wagtail-cms | 610d74f18782fa05983952051c795c643db54cf9 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'AdvertPlacement'
db.create_table(u'demo_advertplacement', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='advert_placements', to=orm['wagtailcore.Page'])),
('advert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['demo.Advert'])),
))
db.send_create_signal(u'demo', ['AdvertPlacement'])
# Adding model 'Advert'
db.create_table(u'demo_advert', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='adverts', null=True, to=orm['wagtailcore.Page'])),
('url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('text', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal(u'demo', ['Advert'])
# Adding model 'HomePageCarouselItem'
db.create_table(u'demo_homepagecarouselitem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
('embed_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='carousel_items', to=orm['demo.HomePage'])),
))
db.send_create_signal(u'demo', ['HomePageCarouselItem'])
# Adding model 'HomePageRelatedLink'
db.create_table(u'demo_homepagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.HomePage'])),
))
db.send_create_signal(u'demo', ['HomePageRelatedLink'])
# Adding model 'HomePage'
db.create_table(u'demo_homepage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('body', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
))
db.send_create_signal(u'demo', ['HomePage'])
# Adding model 'StandardIndexPageRelatedLink'
db.create_table(u'demo_standardindexpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.StandardIndexPage'])),
))
db.send_create_signal(u'demo', ['StandardIndexPageRelatedLink'])
# Adding model 'StandardIndexPage'
db.create_table(u'demo_standardindexpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('intro', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['StandardIndexPage'])
# Adding model 'StandardPageCarouselItem'
db.create_table(u'demo_standardpagecarouselitem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
('embed_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='carousel_items', to=orm['demo.StandardPage'])),
))
db.send_create_signal(u'demo', ['StandardPageCarouselItem'])
# Adding model 'StandardPageRelatedLink'
db.create_table(u'demo_standardpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.StandardPage'])),
))
db.send_create_signal(u'demo', ['StandardPageRelatedLink'])
# Adding model 'StandardPage'
db.create_table(u'demo_standardpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('intro', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('body', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['StandardPage'])
# Adding model 'BlogIndexPageRelatedLink'
db.create_table(u'demo_blogindexpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.BlogIndexPage'])),
))
db.send_create_signal(u'demo', ['BlogIndexPageRelatedLink'])
# Adding model 'BlogIndexPage'
db.create_table(u'demo_blogindexpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('intro', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
))
db.send_create_signal(u'demo', ['BlogIndexPage'])
# Adding model 'BlogPageCarouselItem'
db.create_table(u'demo_blogpagecarouselitem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
('embed_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='carousel_items', to=orm['demo.BlogPage'])),
))
db.send_create_signal(u'demo', ['BlogPageCarouselItem'])
# Adding model 'BlogPageRelatedLink'
db.create_table(u'demo_blogpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.BlogPage'])),
))
db.send_create_signal(u'demo', ['BlogPageRelatedLink'])
# Adding model 'BlogPageTag'
db.create_table(u'demo_blogpagetag', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'demo_blogpagetag_items', to=orm['taggit.Tag'])),
('content_object', self.gf('modelcluster.fields.ParentalKey')(related_name='tagged_items', to=orm['demo.BlogPage'])),
))
db.send_create_signal(u'demo', ['BlogPageTag'])
# Adding model 'BlogPage'
db.create_table(u'demo_blogpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('body', self.gf('wagtail.wagtailcore.fields.RichTextField')()),
('date', self.gf('django.db.models.fields.DateField')()),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['BlogPage'])
# Adding model 'PersonPageRelatedLink'
db.create_table(u'demo_personpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.PersonPage'])),
))
db.send_create_signal(u'demo', ['PersonPageRelatedLink'])
# Adding model 'PersonPage'
db.create_table(u'demo_personpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('telephone', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('address_1', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('address_2', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('post_code', self.gf('django.db.models.fields.CharField')(max_length=10, blank=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('intro', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('biography', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['PersonPage'])
# Adding model 'ContactPage'
db.create_table(u'demo_contactpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('telephone', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('address_1', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('address_2', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('post_code', self.gf('django.db.models.fields.CharField')(max_length=10, blank=True)),
('body', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['ContactPage'])
# Adding model 'EventIndexPageRelatedLink'
db.create_table(u'demo_eventindexpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.EventIndexPage'])),
))
db.send_create_signal(u'demo', ['EventIndexPageRelatedLink'])
# Adding model 'EventIndexPage'
db.create_table(u'demo_eventindexpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('intro', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
))
db.send_create_signal(u'demo', ['EventIndexPage'])
# Adding model 'EventPageCarouselItem'
db.create_table(u'demo_eventpagecarouselitem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
('embed_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='carousel_items', to=orm['demo.EventPage'])),
))
db.send_create_signal(u'demo', ['EventPageCarouselItem'])
# Adding model 'EventPageRelatedLink'
db.create_table(u'demo_eventpagerelatedlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='related_links', to=orm['demo.EventPage'])),
))
db.send_create_signal(u'demo', ['EventPageRelatedLink'])
# Adding model 'EventPageSpeaker'
db.create_table(u'demo_eventpagespeaker', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('link_external', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('link_page', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtailcore.Page'])),
('link_document', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['wagtaildocs.Document'])),
('page', self.gf('modelcluster.fields.ParentalKey')(related_name='speakers', to=orm['demo.EventPage'])),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['EventPageSpeaker'])
# Adding model 'EventPage'
db.create_table(u'demo_eventpage', (
(u'page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wagtailcore.Page'], unique=True, primary_key=True)),
('date_from', self.gf('django.db.models.fields.DateField')()),
('date_to', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('time_from', self.gf('django.db.models.fields.TimeField')(null=True, blank=True)),
('time_to', self.gf('django.db.models.fields.TimeField')(null=True, blank=True)),
('audience', self.gf('django.db.models.fields.CharField')(max_length=255)),
('location', self.gf('django.db.models.fields.CharField')(max_length=255)),
('body', self.gf('wagtail.wagtailcore.fields.RichTextField')(blank=True)),
('cost', self.gf('django.db.models.fields.CharField')(max_length=255)),
('signup_link', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('feed_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['wagtailimages.Image'])),
))
db.send_create_signal(u'demo', ['EventPage'])
def backwards(self, orm):
# Deleting model 'AdvertPlacement'
db.delete_table(u'demo_advertplacement')
# Deleting model 'Advert'
db.delete_table(u'demo_advert')
# Deleting model 'HomePageCarouselItem'
db.delete_table(u'demo_homepagecarouselitem')
# Deleting model 'HomePageRelatedLink'
db.delete_table(u'demo_homepagerelatedlink')
# Deleting model 'HomePage'
db.delete_table(u'demo_homepage')
# Deleting model 'StandardIndexPageRelatedLink'
db.delete_table(u'demo_standardindexpagerelatedlink')
# Deleting model 'StandardIndexPage'
db.delete_table(u'demo_standardindexpage')
# Deleting model 'StandardPageCarouselItem'
db.delete_table(u'demo_standardpagecarouselitem')
# Deleting model 'StandardPageRelatedLink'
db.delete_table(u'demo_standardpagerelatedlink')
# Deleting model 'StandardPage'
db.delete_table(u'demo_standardpage')
# Deleting model 'BlogIndexPageRelatedLink'
db.delete_table(u'demo_blogindexpagerelatedlink')
# Deleting model 'BlogIndexPage'
db.delete_table(u'demo_blogindexpage')
# Deleting model 'BlogPageCarouselItem'
db.delete_table(u'demo_blogpagecarouselitem')
# Deleting model 'BlogPageRelatedLink'
db.delete_table(u'demo_blogpagerelatedlink')
# Deleting model 'BlogPageTag'
db.delete_table(u'demo_blogpagetag')
# Deleting model 'BlogPage'
db.delete_table(u'demo_blogpage')
# Deleting model 'PersonPageRelatedLink'
db.delete_table(u'demo_personpagerelatedlink')
# Deleting model 'PersonPage'
db.delete_table(u'demo_personpage')
# Deleting model 'ContactPage'
db.delete_table(u'demo_contactpage')
# Deleting model 'EventIndexPageRelatedLink'
db.delete_table(u'demo_eventindexpagerelatedlink')
# Deleting model 'EventIndexPage'
db.delete_table(u'demo_eventindexpage')
# Deleting model 'EventPageCarouselItem'
db.delete_table(u'demo_eventpagecarouselitem')
# Deleting model 'EventPageRelatedLink'
db.delete_table(u'demo_eventpagerelatedlink')
# Deleting model 'EventPageSpeaker'
db.delete_table(u'demo_eventpagespeaker')
# Deleting model 'EventPage'
db.delete_table(u'demo_eventpage')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'demo.advert': {
'Meta': {'object_name': 'Advert'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'adverts'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'demo.advertplacement': {
'Meta': {'object_name': 'AdvertPlacement'},
'advert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['demo.Advert']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'advert_placements'", 'to': u"orm['wagtailcore.Page']"})
},
u'demo.blogindexpage': {
'Meta': {'object_name': 'BlogIndexPage', '_ormbases': [u'wagtailcore.Page']},
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.blogindexpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'BlogIndexPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.BlogIndexPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.blogpage': {
'Meta': {'object_name': 'BlogPage', '_ormbases': [u'wagtailcore.Page']},
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {}),
'date': ('django.db.models.fields.DateField', [], {}),
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.blogpagecarouselitem': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'BlogPageCarouselItem'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'carousel_items'", 'to': u"orm['demo.BlogPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.blogpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'BlogPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.BlogPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.blogpagetag': {
'Meta': {'object_name': 'BlogPageTag'},
'content_object': ('modelcluster.fields.ParentalKey', [], {'related_name': "'tagged_items'", 'to': u"orm['demo.BlogPage']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'demo_blogpagetag_items'", 'to': u"orm['taggit.Tag']"})
},
u'demo.contactpage': {
'Meta': {'object_name': 'ContactPage', '_ormbases': [u'wagtailcore.Page']},
'address_1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'address_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'}),
'post_code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'demo.eventindexpage': {
'Meta': {'object_name': 'EventIndexPage', '_ormbases': [u'wagtailcore.Page']},
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.eventindexpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'EventIndexPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.EventIndexPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.eventpage': {
'Meta': {'object_name': 'EventPage', '_ormbases': [u'wagtailcore.Page']},
'audience': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'cost': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'date_from': ('django.db.models.fields.DateField', [], {}),
'date_to': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'}),
'signup_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'time_from': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'time_to': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.eventpagecarouselitem': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'EventPageCarouselItem'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'carousel_items'", 'to': u"orm['demo.EventPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.eventpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'EventPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.EventPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.eventpagespeaker': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'EventPageSpeaker'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'speakers'", 'to': u"orm['demo.EventPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.homepage': {
'Meta': {'object_name': 'HomePage', '_ormbases': [u'wagtailcore.Page']},
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.homepagecarouselitem': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'HomePageCarouselItem'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'carousel_items'", 'to': u"orm['demo.HomePage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.homepagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'HomePageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.HomePage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.personpage': {
'Meta': {'object_name': 'PersonPage', '_ormbases': [u'wagtailcore.Page']},
'address_1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'address_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'biography': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'}),
'post_code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'demo.personpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'PersonPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.PersonPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.standardindexpage': {
'Meta': {'object_name': 'StandardIndexPage', '_ormbases': [u'wagtailcore.Page']},
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.standardindexpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'StandardIndexPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.StandardIndexPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo.standardpage': {
'Meta': {'object_name': 'StandardPage', '_ormbases': [u'wagtailcore.Page']},
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'feed_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'demo.standardpagecarouselitem': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'StandardPageCarouselItem'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['wagtailimages.Image']"}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'carousel_items'", 'to': u"orm['demo.StandardPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'demo.standardpagerelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'StandardPageRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_external': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['demo.StandardPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'wagtailcore.page': {
'Meta': {'object_name': 'Page'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['contenttypes.ContentType']"}),
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'has_unpublished_changes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_pages'", 'null': 'True', 'to': u"orm['auth.User']"}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'seo_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'show_in_menus': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'wagtaildocs.document': {
'Meta': {'object_name': 'Document'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uploaded_by_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'wagtailimages.image': {
'Meta': {'object_name': 'Image'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'height': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uploaded_by_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['demo'] | 80.709353 | 204 | 0.608596 | 6,255 | 56,093 | 5.328537 | 0.033413 | 0.078728 | 0.137354 | 0.19622 | 0.849985 | 0.813591 | 0.793819 | 0.775248 | 0.761026 | 0.734473 | 0 | 0.008071 | 0.169486 | 56,093 | 695 | 205 | 80.709353 | 0.70738 | 0.030289 | 0 | 0.501701 | 0 | 0 | 0.526048 | 0.312073 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003401 | false | 0.001701 | 0.006803 | 0 | 0.015306 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fecae8bd96b5e24de8e13152826645e87ec75362 | 123 | py | Python | implementations/__init__.py | grecoe/CliSkin | 536b555a52e00b6a3b52ae7ab0da6e708ee8b65b | [
"MIT"
] | null | null | null | implementations/__init__.py | grecoe/CliSkin | 536b555a52e00b6a3b52ae7ab0da6e708ee8b65b | [
"MIT"
] | null | null | null | implementations/__init__.py | grecoe/CliSkin | 536b555a52e00b6a3b52ae7ab0da6e708ee8b65b | [
"MIT"
] | null | null | null | from implementations.utils.commandline import CmdUtils
from implementations.utils.credentials import Defaults, UserContext
| 41 | 67 | 0.886179 | 13 | 123 | 8.384615 | 0.692308 | 0.348624 | 0.440367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073171 | 123 | 2 | 68 | 61.5 | 0.95614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fedc09b4687c17bbb00cebae85775852f584c122 | 10,817 | py | Python | tests/test_flock.py | ProfessorQu/Boids | d5af843d6dda92ade41607f2c929c0c9097fb8df | [
"MIT"
] | null | null | null | tests/test_flock.py | ProfessorQu/Boids | d5af843d6dda92ade41607f2c929c0c9097fb8df | [
"MIT"
] | null | null | null | tests/test_flock.py | ProfessorQu/Boids | d5af843d6dda92ade41607f2c929c0c9097fb8df | [
"MIT"
] | null | null | null | from pygame import Vector2
import numpy as np
from boids import Flock
def test_create_boid():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=1,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=0,
turn_margin=100,
turn_factor=0,
loop_bounds=True
)
for _ in range(100):
boid = flock.create_boid(1, 360)
assert -flock.max_speed <= boid.dir.x <= flock.max_speed
assert -flock.max_speed <= boid.dir.y <= flock.max_speed
assert 0 <= boid.pos.x <= 100
assert 0 <= boid.pos.y <= 100
assert boid.type == 0
def test_keep_in_bounds_loop():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=1,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=0,
turn_margin=100,
turn_factor=0,
loop_bounds=True
)
boid = flock.create_boid(1, 360)
boid.pos = Vector2(0, 0)
boid.dir = Vector2(-1, 0)
boid.pos += boid.dir
flock.keep_in_bounds_loop(boid)
assert boid.pos == Vector2(100, 0)
boid.pos = Vector2(100, 0)
boid.dir = Vector2(1, 0)
boid.pos += boid.dir
flock.keep_in_bounds_loop(boid)
assert boid.pos == Vector2(0, 0)
boid.pos = Vector2(0, 0)
boid.dir = Vector2(0, -1)
boid.pos += boid.dir
flock.keep_in_bounds_loop(boid)
assert boid.pos == Vector2(0, 100)
boid.pos = Vector2(0, 100)
boid.dir = Vector2(0, 1)
boid.pos += boid.dir
flock.keep_in_bounds_loop(boid)
assert boid.pos == Vector2(0, 0)
def test_keep_in_bounds_turn():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=1,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=False
)
boid = flock.create_boid(1, 360)
boid.pos = Vector2(9, 50)
boid.dir = Vector2(-1, 0)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, 0)
boid.pos = Vector2(10, 50)
boid.dir = Vector2(-1, 0)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(-1, 0)
boid.pos = Vector2(90, 50)
boid.dir = Vector2(1, 0)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(1, 0)
boid.pos = Vector2(91, 50)
boid.dir = Vector2(1, 0)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, 0)
boid = flock.create_boid(1, 360)
boid.pos = Vector2(50, 9)
boid.dir = Vector2(0, -1)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, 0)
boid.pos = Vector2(50, 10)
boid.dir = Vector2(0, -1)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, -1)
boid.pos = Vector2(50, 90)
boid.dir = Vector2(0, 1)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, 1)
boid.pos = Vector2(50, 91)
boid.dir = Vector2(0, 1)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(0, 0)
boid.pos = Vector2(100, 100)
boid.dir = Vector2(2, 2)
flock.keep_in_bounds_turn(boid)
assert boid.dir == Vector2(1, 1)
def test_limit_speed():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
boid = flock.create_boid(1, 360)
for _ in range(100):
boid.dir = Vector2(np.random.uniform(-10, 10),
np.random.uniform(-10, 10))
speed = np.sqrt(boid.dir.x ** 2 + boid.dir.y ** 2)
if speed > flock.max_speed:
flock.limit_speed(boid)
current_speed = np.sqrt(boid.dir.x ** 2 + boid.dir.y ** 2)
assert current_speed < speed
def test_alignment():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=1,
cohesion_factor=0,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.dir = Vector2(np.random.uniform(-5, 5),
np.random.uniform(-5, 5))
boid2.dir = Vector2(np.random.uniform(-5, 5),
np.random.uniform(-5, 5))
flock.alignment(boid1, [boid2])
assert boid1.dir == boid2.dir
def test_alignment_multiple_types():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=1,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=1,
cohesion_factor=0,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.dir = Vector2(np.random.uniform(-5, 5),
np.random.uniform(-5, 5))
boid2.dir = Vector2(np.random.uniform(-5, 5),
np.random.uniform(-5, 5))
boid1.type = 0
boid2.type = 1
prev_dir = boid1.dir
flock.alignment(boid1, [boid2])
assert boid1.dir == prev_dir
def test_cohesion():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=2,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=1,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid2.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid1.dir = Vector2(0, 0)
boid2.dir = Vector2(0, 0)
dist = boid1.pos.distance_to(boid2.pos)
flock.cohesion(boid1, [boid2])
boid1.pos += boid1.dir
current_dist = boid1.pos.distance_to(boid2.pos)
assert current_dist < dist
def test_cohesion_multiple_types():
flock = Flock(
num_boids=0,
num_types=2,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=2,
field_of_view=360,
avoid_dist=0,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=1,
seperation_factor=0,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.pos = Vector2(10, np.random.uniform(0, 50))
boid2.pos = Vector2(-10, np.random.uniform(0, 50))
boid1.dir = Vector2(1, 0)
boid1.type = 0
boid2.type = 1
dist = boid1.pos.distance_to(boid2.pos)
flock.cohesion(boid1, [boid1])
boid1.pos += boid1.dir
current_dist = boid1.pos.distance_to(boid2.pos)
assert current_dist > dist
def test_seperation():
flock = Flock(
num_boids=0,
num_types=1,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=2,
field_of_view=360,
avoid_dist=50,
other_avoid_mult=1,
other_avoid_dist=0,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=1,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid2.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid1.dir = Vector2(0, 0)
boid2.dir = Vector2(0, 0)
dist = boid1.pos.distance_to(boid2.pos)
flock.seperation(boid1, [boid2])
boid1.pos += boid1.dir
current_dist = boid1.pos.distance_to(boid2.pos)
if dist <= flock.avoid_dist:
assert current_dist > dist
else:
assert current_dist == dist
def test_seperation_multiple_types():
flock = Flock(
num_boids=0,
num_types=2,
world_size=(100, 100),
cell_size=10,
max_speed=5,
perception=2,
field_of_view=360,
avoid_dist=50,
other_avoid_mult=1,
other_avoid_dist=50,
alignment_factor=0,
cohesion_factor=0,
seperation_factor=1,
turn_margin=10,
turn_factor=1,
loop_bounds=True
)
for _ in range(100):
boid1 = flock.create_boid(1, 360)
boid2 = flock.create_boid(1, 360)
boid1.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid2.pos = Vector2(np.random.uniform(0, 50),
np.random.uniform(0, 50))
boid1.dir = Vector2(0, 0)
boid2.dir = Vector2(0, 0)
boid1.type = 0
boid2.type = 1
dist = boid1.pos.distance_to(boid2.pos)
flock.seperation(boid1, [boid2])
boid1.pos += boid1.dir
current_dist = boid1.pos.distance_to(boid2.pos)
if dist <= flock.avoid_dist:
assert current_dist > dist
else:
assert current_dist == dist
| 23.566449 | 70 | 0.562263 | 1,458 | 10,817 | 3.967078 | 0.049383 | 0.058783 | 0.062241 | 0.047026 | 0.933783 | 0.922026 | 0.891598 | 0.871542 | 0.856501 | 0.831777 | 0 | 0.093313 | 0.325321 | 10,817 | 458 | 71 | 23.617904 | 0.699233 | 0 | 0 | 0.815341 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076705 | 1 | 0.028409 | false | 0 | 0.008523 | 0 | 0.036932 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
28da3c84b9bd1b5d4bd903b1ecab41e9d3238de7 | 2,536 | py | Python | app/bns/migrations/0045_wbi_livelihood.py | dianedetoeuf/django_kobo | d437a289e1952bb55fb7004fddbff6b978aa15d6 | [
"MIT"
] | 1 | 2018-12-20T07:59:55.000Z | 2018-12-20T07:59:55.000Z | app/bns/migrations/0045_wbi_livelihood.py | dianedetoeuf/django_kobo | d437a289e1952bb55fb7004fddbff6b978aa15d6 | [
"MIT"
] | 9 | 2018-11-06T01:51:28.000Z | 2018-12-21T22:19:42.000Z | app/bns/migrations/0045_wbi_livelihood.py | dianedetoeuf/django_kobo | d437a289e1952bb55fb7004fddbff6b978aa15d6 | [
"MIT"
] | 2 | 2018-11-21T15:13:32.000Z | 2020-02-19T08:39:37.000Z | from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bns', '0044_wbi_hh_type'),
]
operations = [
migrations.RunSQL(
"""
CREATE OR REPLACE VIEW bns_wbi_village_livelihood AS
SELECT row_number() OVER () AS id,
bns_wbi_hh.dataset_uuid_id,
bns_wbi_hh.dataset_year,
bns_wbi_hh.livelihood_1,
bns_wbi_hh.village,
bns_wbi_hh.district,
bns_wbi_hh.landscape,
round(avg(bns_wbi_hh.wbi), 2) AS avg_wbi,
round(stddev_samp(bns_wbi_hh.wbi), 2) AS stddev_wbi,
count(bns_wbi_hh.hh_id) AS n
FROM bns_wbi_hh
GROUP BY bns_wbi_hh.dataset_uuid_id, bns_wbi_hh.dataset_year, bns_wbi_hh.livelihood_1, bns_wbi_hh.village, bns_wbi_hh.district, bns_wbi_hh.landscape;
""",
reverse_sql="DROP VIEW bns_wbi_village_livelihood;"),
migrations.RunSQL(
"""
CREATE OR REPLACE VIEW bns_wbi_district_livelihood AS
SELECT row_number() OVER () AS id,
bns_wbi_hh.dataset_uuid_id,
bns_wbi_hh.dataset_year,
bns_wbi_hh.livelihood_1,
bns_wbi_hh.district,
bns_wbi_hh.landscape,
round(avg(bns_wbi_hh.wbi), 2) AS avg_wbi,
round(stddev_samp(bns_wbi_hh.wbi), 2) AS stddev_wbi,
count(bns_wbi_hh.hh_id) AS n
FROM bns_wbi_hh
GROUP BY bns_wbi_hh.dataset_uuid_id, bns_wbi_hh.dataset_year, bns_wbi_hh.livelihood_1, bns_wbi_hh.district, bns_wbi_hh.landscape;
""",
reverse_sql="DROP VIEW bns_wbi_district_livelihood;"),
migrations.RunSQL(
"""
CREATE OR REPLACE VIEW bns_wbi_landscape_livelihood AS
SELECT row_number() OVER () AS id,
bns_wbi_hh.dataset_uuid_id,
bns_wbi_hh.dataset_year,
bns_wbi_hh.livelihood_1,
bns_wbi_hh.landscape,
round(avg(bns_wbi_hh.wbi), 2) AS avg_wbi,
round(stddev_samp(bns_wbi_hh.wbi), 2) AS stddev_wbi,
count(bns_wbi_hh.hh_id) AS n
FROM bns_wbi_hh
GROUP BY bns_wbi_hh.dataset_uuid_id, bns_wbi_hh.dataset_year, bns_wbi_hh.livelihood_1, bns_wbi_hh.landscape;
""",
reverse_sql="DROP VIEW bns_wbi_landscape_livelihood;")
]
| 42.266667 | 163 | 0.587145 | 340 | 2,536 | 3.947059 | 0.132353 | 0.214605 | 0.250373 | 0.134128 | 0.92623 | 0.88301 | 0.88301 | 0.88301 | 0.852459 | 0.776453 | 0 | 0.009535 | 0.338328 | 2,536 | 59 | 164 | 42.983051 | 0.790226 | 0 | 0 | 0.1875 | 0 | 0 | 0.270325 | 0.170732 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
28db3ba1865577c25308fb9e72d40826343e0d62 | 92 | py | Python | 20160302.py | JaeGyu/PythonEx_1 | e67053db6ca7431c3dd66351c190c53229e3f141 | [
"MIT"
] | null | null | null | 20160302.py | JaeGyu/PythonEx_1 | e67053db6ca7431c3dd66351c190c53229e3f141 | [
"MIT"
] | null | null | null | 20160302.py | JaeGyu/PythonEx_1 | e67053db6ca7431c3dd66351c190c53229e3f141 | [
"MIT"
] | null | null | null | #_*_ coding: utf-8 _*_
l = [1,2,3,4,5]
print type(l)
print 2/4.0
print 2//4.0 #부동소수점 나눗셈
| 11.5 | 24 | 0.586957 | 22 | 92 | 2.272727 | 0.636364 | 0.24 | 0.28 | 0.32 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162162 | 0.195652 | 92 | 7 | 25 | 13.142857 | 0.513514 | 0.326087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.75 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
e907cb88417537757ab560699bb1bcfd8d9414cd | 5,254 | py | Python | datatools/test/url.py | hayj/DataTools | a779a533afe8e176c33b91d17974468ef7c6b17e | [
"MIT"
] | null | null | null | datatools/test/url.py | hayj/DataTools | a779a533afe8e176c33b91d17974468ef7c6b17e | [
"MIT"
] | null | null | null | datatools/test/url.py | hayj/DataTools | a779a533afe8e176c33b91d17974468ef7c6b17e | [
"MIT"
] | null | null | null | # coding: utf-8
# pew in datatools-venv python ./test/url.py
import os
import sys
sys.path.append('../')
import unittest
import doctest
from datatools import url
from datatools.url import *
# The level allow the unit test execution to choose only the top level test
min = 0
max = 1
assert min <= max
print("==============\nStarting unit tests...")
if min <= 0 <= max:
class DocTest(unittest.TestCase):
def testDoctests(self):
"""Run doctests"""
doctest.testmod(url)
if min <= 1 <= max:
class Test1(unittest.TestCase):
# def test1(self):
# url = urlParser.normalize("http://google.fr")
# print(url)
# self.assertTrue(url == "http://google.fr/")
# url = urlParser.normalize("http://google.fr/")
# print(url)
# self.assertTrue(url == "http://google.fr/")
# url = urlParser.normalize("http://google.fr?t=1")
# print(url)
# self.assertTrue(url == "http://google.fr/?t=1")
# url = urlParser.normalize("http://google.fr/test?t=1&b=2")
# print(url)
# self.assertTrue(url == "http://google.fr/test?t=1&b=2")
# url = urlParser.normalize("http://google.fr/test?t=1&b=2/")
# print(url)
# self.assertTrue(url == "http://google.fr/test?t=1&b=2/")
# url = urlParser.normalize("http://google.fr/test?b=2&t=1/")
# print(url)
# self.assertTrue(url == "http://google.fr/test?b=2&t=1/")
# url = urlParser.normalize("http://google.fr/test?&b=2&t=1/")
# print(url)
# self.assertTrue(url == "http://google.fr/test?b=2&t=1")
def test2(self):
urlParser = URLParser()
url = urlParser.normalize("\ngoogle.fr ")
print(url)
self.assertTrue(url == "http://google.fr/")
##########
url = urlParser.normalize("google.fr")
print(url)
self.assertTrue(url == "http://google.fr/")
##########
url = urlParser.normalize("https://google.fr")
print(url)
self.assertTrue(url == "https://google.fr/")
##########
url = urlParser.normalize("http://google.fr")
print(url)
self.assertTrue(url == "http://google.fr/")
##########
url = urlParser.normalize("http://google.fr/")
print(url)
self.assertTrue(url == "http://google.fr/")
##########
url = urlParser.normalize("http://google.fr/toto/tutu")
print(url)
self.assertTrue(url == "http://google.fr/toto/tutu")
##########
url = urlParser.normalize("http://google.fr/toto/tutu/")
print(url)
# self.assertTrue(url == "http://google.fr/toto/tutu") # FAILED ?
##########
url = urlParser.normalize("http://google.fr/toto/tutu/truc.html")
print(url)
self.assertTrue(url == "http://google.fr/toto/tutu/truc.html")
##########
url = urlParser.normalize("http://google.fr/toto/tutu/truc.html/")
print(url)
# self.assertTrue(url == "http://google.fr/toto/tutu/truc.html") # FAILED ?
##########
url = urlParser.normalize("http://google.fr?t=1")
print(url)
self.assertTrue(url == "http://google.fr/?t=1")
##########
url = urlParser.normalize("http://google.fr/test?t=1&b=2")
print(url)
self.assertTrue(url == "http://google.fr/test?t=1&b=2")
##########
url = urlParser.normalize("http://google.fr/test?t=1&b=2/")
print(url)
self.assertTrue(url == "http://google.fr/test?t=1&b=2/")
##########
url = urlParser.normalize("http://google.fr/test?b=2&t=1/")
print(url)
self.assertTrue(url == "http://google.fr/test?b=2&t=1/")
##########
url = urlParser.normalize("http://google.fr/test?b=2&t=1/")
print(url)
self.assertTrue(url == "http://google.fr/test?b=2&t=1/")
##########
url = urlParser.normalize("http://goOgle.fr/test?b=2&t=1/")
print(url)
self.assertTrue(url == "http://google.fr/test?b=2&t=1/")
##########
url = urlParser.normalize("http://www.cnn.co.uk//test?B=2&t=1")
print(url)
self.assertTrue(url == "http://www.cnn.co.uk/test?B=2&t=1")
##########
url = urlParser.normalize("http://www.cnn.co.uk:8080//test?B=2&t=1")
print(url)
self.assertTrue(url == "http://www.cnn.co.uk:8080/test?B=2&t=1")
##########
url = urlParser.normalize("http://www.cnn.co.uk:80//test?B=2&t=1")
print(url)
self.assertTrue(url == "http://www.cnn.co.uk/test?B=2&t=1")
if min <= 2 <= max:
class Test2(unittest.TestCase):
def test1(self):
pass
if __name__ == '__main__':
unittest.main() # Or execute as Python unit-test in eclipse
print("Unit tests done.\n==============") | 38.918519 | 87 | 0.498097 | 634 | 5,254 | 4.115142 | 0.123028 | 0.131851 | 0.183979 | 0.210809 | 0.801457 | 0.779992 | 0.779992 | 0.762744 | 0.762744 | 0.762744 | 0 | 0.019677 | 0.293871 | 5,254 | 135 | 88 | 38.918519 | 0.683558 | 0.275219 | 0 | 0.371795 | 0 | 0.038462 | 0.271643 | 0.012486 | 0 | 0 | 0 | 0 | 0.217949 | 1 | 0.038462 | false | 0.012821 | 0.076923 | 0 | 0.153846 | 0.25641 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e93b7ca9e5a2b087af77f86ba884266ca60838de | 566 | py | Python | tmp/cython_class.py | fluiddyn/transonic | a460e9f6d1139f79b668cb3306d1e8a7e190b72d | [
"BSD-3-Clause"
] | 88 | 2019-01-08T16:39:08.000Z | 2022-02-06T14:19:23.000Z | tmp/cython_class.py | fluiddyn/transonic | a460e9f6d1139f79b668cb3306d1e8a7e190b72d | [
"BSD-3-Clause"
] | 13 | 2019-06-20T15:53:10.000Z | 2021-02-09T11:03:29.000Z | tmp/cython_class.py | fluiddyn/transonic | a460e9f6d1139f79b668cb3306d1e8a7e190b72d | [
"BSD-3-Clause"
] | 1 | 2019-11-05T03:03:14.000Z | 2019-11-05T03:03:14.000Z | from transonic import boost
@boost
class MyClass:
a : int
def __init__(self, a):
self.a = a
@boost
def method(self, b : int):
return self.a + b
@boost
def method2(self, b : int):
return self.a + b
@boost
class MyClass2:
a : int
def __init__(self, a):
self.a = a
@boost
def method(self, b : int):
o : int
o = 1
return self.a + b + o
@boost
def method2(self, b : int):
return self.a + b
def method3(self, b : int):
return self.a + b | 16.171429 | 31 | 0.5053 | 82 | 566 | 3.390244 | 0.231707 | 0.161871 | 0.143885 | 0.215827 | 0.701439 | 0.701439 | 0.701439 | 0.629496 | 0.568345 | 0.568345 | 0 | 0.014451 | 0.388693 | 566 | 35 | 32 | 16.171429 | 0.789017 | 0 | 0 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.259259 | false | 0 | 0.037037 | 0.148148 | 0.62963 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c95177d47a83c05da5e5517aa98e9a601ed74540 | 19,634 | py | Python | devel/lib/python2.7/dist-packages/industrial_msgs/srv/_GetRobotInfo.py | Pontiky/yaskawa-hc10-moveit | 2a6031f9404d285aa662636ccc941485b339e7fd | [
"BSD-2-Clause"
] | 1 | 2021-05-19T04:09:29.000Z | 2021-05-19T04:09:29.000Z | devel/lib/python2.7/dist-packages/industrial_msgs/srv/_GetRobotInfo.py | Pontiky/yaskawa-hc10-moveit | 2a6031f9404d285aa662636ccc941485b339e7fd | [
"BSD-2-Clause"
] | null | null | null | devel/lib/python2.7/dist-packages/industrial_msgs/srv/_GetRobotInfo.py | Pontiky/yaskawa-hc10-moveit | 2a6031f9404d285aa662636ccc941485b339e7fd | [
"BSD-2-Clause"
] | null | null | null | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from industrial_msgs/GetRobotInfoRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetRobotInfoRequest(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "industrial_msgs/GetRobotInfoRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """
"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetRobotInfoRequest, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from industrial_msgs/GetRobotInfoResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import industrial_msgs.msg
class GetRobotInfoResponse(genpy.Message):
_md5sum = "5db3230b3e61c85a320b999ffd7f3b3f"
_type = "industrial_msgs/GetRobotInfoResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """industrial_msgs/DeviceInfo controller
industrial_msgs/DeviceInfo[] robots
industrial_msgs/ServiceReturnCode code
================================================================================
MSG: industrial_msgs/DeviceInfo
# Device info captures device agnostic information about a piece of hardware.
# This message is meant as a generic as possible. Items that don't apply should
# be left blank. This message is not meant to replace diagnostic messages, but
# rather provide a standard service message that can be used to populate standard
# components (like a GUI for example)
string model
string serial_number
string hw_version
string sw_version
string address
================================================================================
MSG: industrial_msgs/ServiceReturnCode
# Service return codes for simple requests. All ROS-Industrial service
# replies are required to have a return code indicating success or failure
# Specific return codes for different failure should be negative.
int8 val
int8 SUCCESS = 1
int8 FAILURE = -1
"""
__slots__ = ['controller','robots','code']
_slot_types = ['industrial_msgs/DeviceInfo','industrial_msgs/DeviceInfo[]','industrial_msgs/ServiceReturnCode']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
controller,robots,code
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetRobotInfoResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.controller is None:
self.controller = industrial_msgs.msg.DeviceInfo()
if self.robots is None:
self.robots = []
if self.code is None:
self.code = industrial_msgs.msg.ServiceReturnCode()
else:
self.controller = industrial_msgs.msg.DeviceInfo()
self.robots = []
self.code = industrial_msgs.msg.ServiceReturnCode()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.controller.model
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.serial_number
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.hw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.sw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.address
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.robots)
buff.write(_struct_I.pack(length))
for val1 in self.robots:
_x = val1.model
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.serial_number
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.hw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.sw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.address
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.code.val
buff.write(_get_struct_b().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.controller is None:
self.controller = industrial_msgs.msg.DeviceInfo()
if self.robots is None:
self.robots = None
if self.code is None:
self.code = industrial_msgs.msg.ServiceReturnCode()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.model = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.model = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.serial_number = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.serial_number = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.hw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.hw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.sw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.sw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.address = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.address = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.robots = []
for i in range(0, length):
val1 = industrial_msgs.msg.DeviceInfo()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.model = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.model = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.serial_number = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.serial_number = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.hw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.hw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.sw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.sw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.address = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.address = str[start:end]
self.robots.append(val1)
start = end
end += 1
(self.code.val,) = _get_struct_b().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.controller.model
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.serial_number
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.hw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.sw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.controller.address
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.robots)
buff.write(_struct_I.pack(length))
for val1 in self.robots:
_x = val1.model
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.serial_number
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.hw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.sw_version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = val1.address
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.code.val
buff.write(_get_struct_b().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.controller is None:
self.controller = industrial_msgs.msg.DeviceInfo()
if self.robots is None:
self.robots = None
if self.code is None:
self.code = industrial_msgs.msg.ServiceReturnCode()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.model = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.model = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.serial_number = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.serial_number = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.hw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.hw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.sw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.sw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.controller.address = str[start:end].decode('utf-8', 'rosmsg')
else:
self.controller.address = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.robots = []
for i in range(0, length):
val1 = industrial_msgs.msg.DeviceInfo()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.model = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.model = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.serial_number = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.serial_number = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.hw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.hw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.sw_version = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.sw_version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.address = str[start:end].decode('utf-8', 'rosmsg')
else:
val1.address = str[start:end]
self.robots.append(val1)
start = end
end += 1
(self.code.val,) = _get_struct_b().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_b = None
def _get_struct_b():
global _struct_b
if _struct_b is None:
_struct_b = struct.Struct("<b")
return _struct_b
class GetRobotInfo(object):
_type = 'industrial_msgs/GetRobotInfo'
_md5sum = '5db3230b3e61c85a320b999ffd7f3b3f'
_request_class = GetRobotInfoRequest
_response_class = GetRobotInfoResponse
| 33.968858 | 145 | 0.60385 | 2,563 | 19,634 | 4.472103 | 0.088178 | 0.07538 | 0.06142 | 0.053045 | 0.871401 | 0.865992 | 0.858751 | 0.858751 | 0.858751 | 0.858751 | 0 | 0.016228 | 0.262453 | 19,634 | 577 | 146 | 34.02773 | 0.775292 | 0.120098 | 0 | 0.903361 | 1 | 0 | 0.120128 | 0.03483 | 0 | 0 | 0.001185 | 0 | 0 | 1 | 0.031513 | false | 0.004202 | 0.018908 | 0 | 0.109244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.