hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7fa5e14fa548e86ae6533ead8e6779b2df82b39 | 1,510 | py | Python | data/menus/options_menu.py | ChristianD37/PyRacer | 8f096723bdea95c3e918b2f70b01e3ed0b3f9fe1 | [
"MIT"
] | 4 | 2021-01-12T10:22:45.000Z | 2021-02-17T04:49:59.000Z | data/menus/options_menu.py | ChristianD37/PyRacer | 8f096723bdea95c3e918b2f70b01e3ed0b3f9fe1 | [
"MIT"
] | null | null | null | data/menus/options_menu.py | ChristianD37/PyRacer | 8f096723bdea95c3e918b2f70b01e3ed0b3f9fe1 | [
"MIT"
] | 1 | 2021-01-17T04:32:39.000Z | 2021-01-17T04:32:39.000Z | from data.menus.menu import Menu
class OptionsMenu(Menu):
def __init__(self, game):
Menu.__init__(self, game)
self.states = {0 : "Volume", 1 : "Controls"}
self.index, self.newline = 0, 40
self.cursor_rect.center = (self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 )
def display_menu(self):
self.run_display = True
while self.run_display:
self.game.get_events()
self.handle_input()
self.game.display.fill((0,0,0))
self.draw_text("Options", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H /4)
self.draw_text("Volume", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 )
self.draw_text("Controls", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 + self.newline)
self.draw_cursor()
self.game.draw_screen()
self.game.reset_keys()
def handle_input(self):
self.move_cursor()
if self.game.actions['start']:
if self.states[self.index] == "Volume":
print("Volume Menu")
if self.states[self.index] == "Controls":
print("Controls Menu")
self.game.current_menu = self.game.controls_menu
self.run_display = False
if self.game.actions['run']:
self.game.current_menu = self.game.main_menu
self.run_display = False | 44.411765 | 119 | 0.564238 | from data.menus.menu import Menu
class OptionsMenu(Menu):
def __init__(self, game):
Menu.__init__(self, game)
self.states = {0 : "Volume", 1 : "Controls"}
self.index, self.newline = 0, 40
self.cursor_rect.center = (self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 )
def display_menu(self):
self.run_display = True
while self.run_display:
self.game.get_events()
self.handle_input()
self.game.display.fill((0,0,0))
self.draw_text("Options", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H /4)
self.draw_text("Volume", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 )
self.draw_text("Controls", 40, (255,255,255), self.game.DISPLAY_W/2, self.game.DISPLAY_H/2 + self.newline)
self.draw_cursor()
self.game.draw_screen()
self.game.reset_keys()
def handle_input(self):
self.move_cursor()
if self.game.actions['start']:
if self.states[self.index] == "Volume":
print("Volume Menu")
if self.states[self.index] == "Controls":
print("Controls Menu")
self.game.current_menu = self.game.controls_menu
self.run_display = False
if self.game.actions['run']:
self.game.current_menu = self.game.main_menu
self.run_display = False | true | true |
f7fa5e91400000b4953ab8022408df2a80e3be82 | 3,388 | py | Python | pypoca/cogs/general.py | leandcesar/PyPoca | 416f690faad0b511ca9d04b012af35256ee95089 | [
"MIT"
] | 1 | 2021-11-22T04:22:08.000Z | 2021-11-22T04:22:08.000Z | pypoca/cogs/general.py | leandcesar/PyPoca | 416f690faad0b511ca9d04b012af35256ee95089 | [
"MIT"
] | null | null | null | pypoca/cogs/general.py | leandcesar/PyPoca | 416f690faad0b511ca9d04b012af35256ee95089 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import disnake
from disnake.ext import commands
from pypoca.config import COLOR, URLS
from pypoca.database import Server
from pypoca.ext import ALL, DEFAULT, Choice, Option
class General(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.slash_command(name="ping", description=DEFAULT["COMMAND_PING_DESC"])
async def slash_ping(self, inter: disnake.ApplicationCommandInteraction, hide: Choice.boolean = Option.hide):
server = Server.get_by_id(inter.guild.id)
locale = ALL[server.language] if server else DEFAULT
latency = int(self.bot.latency * 1000)
description = locale["COMMAND_PING_REPLY"] + f": {latency}ms"
embed = disnake.Embed(description=description, color=COLOR)
await inter.send(embed=embed, ephemeral=hide)
@commands.slash_command(name="help", description=DEFAULT["COMMAND_HELP_DESC"])
async def slash_help(self, inter: disnake.ApplicationCommandInteraction, hide: Choice.boolean = Option.hide):
server = Server.get_by_id(inter.guild.id)
locale = ALL[server.language] if server else DEFAULT
BLANK = "<:blank:914183315056111627>"
description = f"""
**/movie**
{BLANK} **discover** {locale["COMMAND_MOVIE_DISCOVER_DESC"]}
{BLANK} **find** {locale["COMMAND_MOVIE_FIND_DESC"]}
{BLANK} **popular** {locale["COMMAND_MOVIE_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_MOVIE_SEARCH_DESC"]}
{BLANK} **top** {locale["COMMAND_MOVIE_TOP_DESC"]}
{BLANK} **trending** {locale["COMMAND_MOVIE_TRENDING_DESC"]}
{BLANK} **upcoming** {locale["COMMAND_MOVIE_UPCOMING_DESC"]}
**/tv**
{BLANK} **discover** {locale["COMMAND_TV_DISCOVER_DESC"]}
{BLANK} **popular** {locale["COMMAND_TV_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_TV_SEARCH_DESC"]}
{BLANK} **top** {locale["COMMAND_TV_TOP_DESC"]}
{BLANK} **trending** {locale["COMMAND_TV_TRENDING_DESC"]}
{BLANK} **upcoming** {locale["COMMAND_TV_UPCOMING_DESC"]}
**/people**
{BLANK} **popular** {locale["COMMAND_PERSON_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_PERSON_SEARCH_DESC"]}
{BLANK} **trending** {locale["COMMAND_PERSON_TRENDING_DESC"]}
**/game**
{BLANK} **frame** {locale["COMMAND_GAME_FRAME_DESC"]}
{BLANK} **higher** {locale["COMMAND_GAME_HIGHER_DESC"]}
**/setting**
{BLANK} **language** {locale["COMMAND_LANGUAGE_DESC"]}
"""
buttons = [
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_INVITE"], "url": URLS["invite"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_VOTE"], "url": URLS["vote"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_SERVER"], "url": URLS["server"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_SITE"], "url": URLS["site"]},
]
embed = disnake.Embed(description=description, color=COLOR)
view = disnake.ui.View()
[view.add_item(disnake.ui.Button(**button)) for button in buttons]
await inter.send(embed=embed, view=view, ephemeral=hide)
def setup(bot: commands.Bot) -> None:
bot.add_cog(General(bot))
| 47.055556 | 113 | 0.626328 |
import disnake
from disnake.ext import commands
from pypoca.config import COLOR, URLS
from pypoca.database import Server
from pypoca.ext import ALL, DEFAULT, Choice, Option
class General(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.slash_command(name="ping", description=DEFAULT["COMMAND_PING_DESC"])
async def slash_ping(self, inter: disnake.ApplicationCommandInteraction, hide: Choice.boolean = Option.hide):
server = Server.get_by_id(inter.guild.id)
locale = ALL[server.language] if server else DEFAULT
latency = int(self.bot.latency * 1000)
description = locale["COMMAND_PING_REPLY"] + f": {latency}ms"
embed = disnake.Embed(description=description, color=COLOR)
await inter.send(embed=embed, ephemeral=hide)
@commands.slash_command(name="help", description=DEFAULT["COMMAND_HELP_DESC"])
async def slash_help(self, inter: disnake.ApplicationCommandInteraction, hide: Choice.boolean = Option.hide):
server = Server.get_by_id(inter.guild.id)
locale = ALL[server.language] if server else DEFAULT
BLANK = "<:blank:914183315056111627>"
description = f"""
**/movie**
{BLANK} **discover** {locale["COMMAND_MOVIE_DISCOVER_DESC"]}
{BLANK} **find** {locale["COMMAND_MOVIE_FIND_DESC"]}
{BLANK} **popular** {locale["COMMAND_MOVIE_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_MOVIE_SEARCH_DESC"]}
{BLANK} **top** {locale["COMMAND_MOVIE_TOP_DESC"]}
{BLANK} **trending** {locale["COMMAND_MOVIE_TRENDING_DESC"]}
{BLANK} **upcoming** {locale["COMMAND_MOVIE_UPCOMING_DESC"]}
**/tv**
{BLANK} **discover** {locale["COMMAND_TV_DISCOVER_DESC"]}
{BLANK} **popular** {locale["COMMAND_TV_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_TV_SEARCH_DESC"]}
{BLANK} **top** {locale["COMMAND_TV_TOP_DESC"]}
{BLANK} **trending** {locale["COMMAND_TV_TRENDING_DESC"]}
{BLANK} **upcoming** {locale["COMMAND_TV_UPCOMING_DESC"]}
**/people**
{BLANK} **popular** {locale["COMMAND_PERSON_POPULAR_DESC"]}
{BLANK} **search** {locale["COMMAND_PERSON_SEARCH_DESC"]}
{BLANK} **trending** {locale["COMMAND_PERSON_TRENDING_DESC"]}
**/game**
{BLANK} **frame** {locale["COMMAND_GAME_FRAME_DESC"]}
{BLANK} **higher** {locale["COMMAND_GAME_HIGHER_DESC"]}
**/setting**
{BLANK} **language** {locale["COMMAND_LANGUAGE_DESC"]}
"""
buttons = [
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_INVITE"], "url": URLS["invite"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_VOTE"], "url": URLS["vote"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_SERVER"], "url": URLS["server"]},
{"style": 5, "label": locale["COMMAND_HELP_BUTTON_SITE"], "url": URLS["site"]},
]
embed = disnake.Embed(description=description, color=COLOR)
view = disnake.ui.View()
[view.add_item(disnake.ui.Button(**button)) for button in buttons]
await inter.send(embed=embed, view=view, ephemeral=hide)
def setup(bot: commands.Bot) -> None:
bot.add_cog(General(bot))
| true | true |
f7fa5ebc1de24bb058fb900810372a06231c2c5a | 1,279 | py | Python | ___Python/Jonas/Python/p05_random/m01_wuerfeln.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | ___Python/Jonas/Python/p05_random/m01_wuerfeln.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | ___Python/Jonas/Python/p05_random/m01_wuerfeln.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | import random
r= random.Random()
def wuerfeln():
return r.randint(1, 6) # Augenzahl zwischen 1 und 6
def muenzwurf():
return r.randint(0,1) # 0 = Kopf, 1 = Zahl
print(wuerfeln())
d = {}
for i in range(10000):
augenzahl = wuerfeln()
if augenzahl in d:
d[augenzahl] += 1
else:
d[augenzahl] = 1
print(d)
# 1) Lottozahlen 6 aus 49 ermitteln ==> [2,7,13,17,19,42]
def kugel():
return r.randint(1, 49) # Anzahl Kugeln 1 bis 49
#Lösung a
zahlen = []
d = {}
kugeln = 0
while kugeln < 6:
ziehung = kugel() # Menge der gezogenen Kugeln
if ziehung not in d: #diese Kugel wurde zuvor noch nicht gezogen
d[ziehung] =1
zahlen.append(ziehung) # Kugel darf nich nochmal gezogen werden deshalb nach ziehung
kugeln += 1
print (sorted(zahlen))
# Lösung b
urne = list(range(1, 50)) # urne = [1,2,3..., 49]
lottoziehung = []
for i in range(6):
ziehung = urne.pop(r.randint(0, len(urne) - 1))
lottoziehung.append(ziehung)
print(sorted(lottoziehung))
# Lösung c
lottoziehung = r.sample(range(1, 50), 6)
print(sorted(lottoziehung))
# 2) Schreibe eine Funktion wuerfeln2, die fair würfelt.
# bei der Implementierung darf nur die Funktion muenzwurf benutzt werden
| 24.596154 | 93 | 0.629398 | import random
r= random.Random()
def wuerfeln():
return r.randint(1, 6)
def muenzwurf():
return r.randint(0,1)
print(wuerfeln())
d = {}
for i in range(10000):
augenzahl = wuerfeln()
if augenzahl in d:
d[augenzahl] += 1
else:
d[augenzahl] = 1
print(d)
def kugel():
return r.randint(1, 49)
zahlen = []
d = {}
kugeln = 0
while kugeln < 6:
ziehung = kugel()
if ziehung not in d:
d[ziehung] =1
zahlen.append(ziehung)
kugeln += 1
print (sorted(zahlen))
urne = list(range(1, 50))
lottoziehung = []
for i in range(6):
ziehung = urne.pop(r.randint(0, len(urne) - 1))
lottoziehung.append(ziehung)
print(sorted(lottoziehung))
lottoziehung = r.sample(range(1, 50), 6)
print(sorted(lottoziehung))
| true | true |
f7fa5f1642a150279cadce1f48333bb3cecfcb27 | 955 | py | Python | button.py | DamianS6/Jordan-Catch | ff786527503ec3ddaa3856bc77eb8b919e69048d | [
"MIT"
] | null | null | null | button.py | DamianS6/Jordan-Catch | ff786527503ec3ddaa3856bc77eb8b919e69048d | [
"MIT"
] | null | null | null | button.py | DamianS6/Jordan-Catch | ff786527503ec3ddaa3856bc77eb8b919e69048d | [
"MIT"
] | null | null | null | import pygame.font
class Button:
def __init__(self, screen, msg):
"""Initialize button size and position."""
self.screen = screen
self.screen_rect = screen.get_rect()
self.width, self.height = 200, 50
self.button_color = (0, 0, 0)
self.font_color = (200, 83, 65)
self.font = pygame.font.SysFont(None, 48)
self.rect = pygame.Rect(0, 0, self.width, self.height)
self.rect.center = self.screen_rect.center
self.prep_msg(msg)
def prep_msg(self, msg):
self.msg_image = self.font.render(msg, True, self.font_color,
self.button_color)
self.msg_image_rect = self.msg_image.get_rect()
self.msg_image_rect.center = self.rect.center
def draw_button(self):
self.screen.fill(self.button_color, self.rect)
self.screen.blit(self.msg_image, self.msg_image_rect)
| 32.931034 | 70 | 0.597906 | import pygame.font
class Button:
def __init__(self, screen, msg):
self.screen = screen
self.screen_rect = screen.get_rect()
self.width, self.height = 200, 50
self.button_color = (0, 0, 0)
self.font_color = (200, 83, 65)
self.font = pygame.font.SysFont(None, 48)
self.rect = pygame.Rect(0, 0, self.width, self.height)
self.rect.center = self.screen_rect.center
self.prep_msg(msg)
def prep_msg(self, msg):
self.msg_image = self.font.render(msg, True, self.font_color,
self.button_color)
self.msg_image_rect = self.msg_image.get_rect()
self.msg_image_rect.center = self.rect.center
def draw_button(self):
self.screen.fill(self.button_color, self.rect)
self.screen.blit(self.msg_image, self.msg_image_rect)
| true | true |
f7fa5f37f12354bcee4297263876bb8a2f70c161 | 394 | py | Python | examples/tutorials/04_1_line_2.py | strakam/PyEasyGraphics | 57a586aa92385d26725d4ec3d61b2bbbe970195d | [
"BSD-3-Clause"
] | 5 | 2019-09-23T05:15:47.000Z | 2021-01-17T08:06:47.000Z | examples/tutorials/04_1_line_2.py | strakam/PyEasyGraphics | 57a586aa92385d26725d4ec3d61b2bbbe970195d | [
"BSD-3-Clause"
] | 3 | 2019-05-03T05:25:17.000Z | 2021-04-15T04:53:16.000Z | examples/tutorials/04_1_line_2.py | strakam/PyEasyGraphics | 57a586aa92385d26725d4ec3d61b2bbbe970195d | [
"BSD-3-Clause"
] | 4 | 2019-05-04T13:42:40.000Z | 2021-04-15T10:38:48.000Z | from easygraphics import *
import math as m
def main():
init_graph(600, 400)
translate(300, 200) # move origin to the center
scale(100, -100) # zoom each axis 100 times, and make y-axis grow from bottom to top.
x = -3
delta = 0.01
move_to(x, m.sin(x))
while x <= 3:
line_to(x, m.sin(x))
x = x + delta
pause()
close_graph()
easy_run(main) | 21.888889 | 90 | 0.593909 | from easygraphics import *
import math as m
def main():
init_graph(600, 400)
translate(300, 200)
scale(100, -100)
x = -3
delta = 0.01
move_to(x, m.sin(x))
while x <= 3:
line_to(x, m.sin(x))
x = x + delta
pause()
close_graph()
easy_run(main) | true | true |
f7fa5fb35a032d36136d6c301dc8ea08f4be8ca9 | 172 | py | Python | wrangle/utils/connection_check.py | abhijithneilabraham/wrangle | cff8f3232b6ec31a94f533f41a5f05a9aa1d4273 | [
"MIT"
] | 17 | 2018-07-29T20:02:55.000Z | 2022-02-27T20:58:48.000Z | wrangle/utils/connection_check.py | abhijithneilabraham/wrangle | cff8f3232b6ec31a94f533f41a5f05a9aa1d4273 | [
"MIT"
] | 24 | 2018-07-10T14:41:40.000Z | 2022-01-31T19:44:32.000Z | wrangle/utils/connection_check.py | abhijithneilabraham/wrangle | cff8f3232b6ec31a94f533f41a5f05a9aa1d4273 | [
"MIT"
] | 10 | 2019-07-29T03:36:47.000Z | 2022-03-05T12:29:59.000Z | import socket
def is_connected():
try:
socket.create_connection(("www.google.com", 80))
return True
except OSError:
pass
return False
| 15.636364 | 56 | 0.610465 | import socket
def is_connected():
try:
socket.create_connection(("www.google.com", 80))
return True
except OSError:
pass
return False
| true | true |
f7fa60b5e1ff37188d7b2c8d751957108f76b262 | 8,134 | py | Python | satchmo/caching/__init__.py | sankroh/satchmo | e48df0c2a4be4ce14785d0a5d6dd1e516c57a838 | [
"BSD-3-Clause"
] | 1 | 2016-05-09T12:21:04.000Z | 2016-05-09T12:21:04.000Z | satchmo/caching/__init__.py | sankroh/satchmo | e48df0c2a4be4ce14785d0a5d6dd1e516c57a838 | [
"BSD-3-Clause"
] | null | null | null | satchmo/caching/__init__.py | sankroh/satchmo | e48df0c2a4be4ce14785d0a5d6dd1e516c57a838 | [
"BSD-3-Clause"
] | null | null | null | """A full cache system written on top of Django's rudimentary one."""
from django.conf import settings
from django.core.cache import cache
from django.utils.encoding import smart_str
import cPickle as pickle
import md5
import types
import logging
from satchmo.utils import is_string_like, is_list_or_tuple
log = logging.getLogger('caching')
CACHED_KEYS = {}
CACHE_CALLS = 0
CACHE_HITS = 0
KEY_DELIM = "::"
try:
CACHE_PREFIX = settings.CACHE_PREFIX
except AttributeError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No CACHE_PREFIX found in settings, using SITE_ID. Please update your settings to add a CACHE_PREFIX")
_CACHE_ENABLED = settings.CACHE_TIMEOUT > 0
class CacheWrapper(object):
def __init__(self, val, inprocess=False):
self.val = val
self.inprocess = inprocess
def __str__(self):
return str(self.val)
def __repr__(self):
return repr(self.val)
def wrap(cls, obj):
if isinstance(obj, cls):
return obj
else:
return cls(obj)
wrap = classmethod(wrap)
class MethodNotFinishedError(Exception):
def __init__(self, f):
self.func = f
class NotCachedError(Exception):
def __init__(self, k):
self.key = k
class CacheNotRespondingError(Exception):
pass
def cache_delete(*keys, **kwargs):
removed = []
if cache_enabled():
global CACHED_KEYS
log.debug('cache_delete')
children = kwargs.pop('children',False)
if (keys or kwargs):
key = cache_key(*keys, **kwargs)
if CACHED_KEYS.has_key(key):
del CACHED_KEYS[key]
removed.append(key)
cache.delete(key)
if children:
key = key + KEY_DELIM
children = [x for x in CACHED_KEYS.keys() if x.startswith(key)]
for k in children:
del CACHED_KEYS[k]
cache.delete(k)
removed.append(k)
else:
key = "All Keys"
deleteneeded = _cache_flush_all()
removed = CACHED_KEYS.keys()
if deleteneeded:
for k in CACHED_KEYS:
cache.delete(k)
CACHED_KEYS = {}
if removed:
log.debug("Cache delete: %s", removed)
else:
log.debug("No cached objects to delete for %s", key)
return removed
def cache_delete_function(func):
return cache_delete(['func', func.__name__, func.__module__], children=True)
def cache_enabled():
global _CACHE_ENABLED
return _CACHE_ENABLED
def cache_enable(state=True):
global _CACHE_ENABLED
_CACHE_ENABLED=state
def _cache_flush_all():
if is_memcached_backend():
cache._cache.flush_all()
return False
return True
def cache_function(length=settings.CACHE_TIMEOUT):
"""
A variant of the snippet posted by Jeff Wheeler at
http://www.djangosnippets.org/snippets/109/
Caches a function, using the function and its arguments as the key, and the return
value as the value saved. It passes all arguments on to the function, as
it should.
The decorator itself takes a length argument, which is the number of
seconds the cache will keep the result around.
It will put a temp value in the cache while the function is
processing. This should not matter in most cases, but if the app is using
threads, you won't be able to get the previous value, and will need to
wait until the function finishes. If this is not desired behavior, you can
remove the first two lines after the ``else``.
"""
def decorator(func):
def inner_func(*args, **kwargs):
if not cache_enabled():
value = func(*args, **kwargs)
else:
try:
value = cache_get('func', func.__name__, func.__module__, args, kwargs)
except NotCachedError, e:
# This will set a temporary value while ``func`` is being
# processed. When using threads, this is vital, as otherwise
# the function can be called several times before it finishes
# and is put into the cache.
funcwrapper = CacheWrapper(".".join([func.__module__, func.__name__]), inprocess=True)
cache_set(e.key, value=funcwrapper, length=length, skiplog=True)
value = func(*args, **kwargs)
cache_set(e.key, value=value, length=length)
except MethodNotFinishedError, e:
value = func(*args, **kwargs)
return value
return inner_func
return decorator
def cache_get(*keys, **kwargs):
if kwargs.has_key('default'):
default_value = kwargs.pop('default')
use_default = True
else:
use_default = False
key = cache_key(keys, **kwargs)
if not cache_enabled():
raise NotCachedError(key)
else:
global CACHE_CALLS, CACHE_HITS
CACHE_CALLS += 1
if CACHE_CALLS == 1:
cache_require()
obj = cache.get(key)
if obj and isinstance(obj, CacheWrapper):
CACHE_HITS += 1
CACHED_KEYS[key] = True
log.debug('got cached [%i/%i]: %s', CACHE_CALLS, CACHE_HITS, key)
if obj.inprocess:
raise MethodNotFinishedError(obj.val)
return obj.val
else:
try:
del CACHED_KEYS[key]
except KeyError:
pass
if use_default:
return default_value
raise NotCachedError(key)
def cache_set(*keys, **kwargs):
"""Set an object into the cache."""
if cache_enabled():
global CACHED_KEYS
obj = kwargs.pop('value')
length = kwargs.pop('length', settings.CACHE_TIMEOUT)
skiplog = kwargs.pop('skiplog', False)
key = cache_key(keys, **kwargs)
val = CacheWrapper.wrap(obj)
if not skiplog:
log.debug('setting cache: %s', key)
cache.set(key, val, length)
CACHED_KEYS[key] = True
def _hash_or_string(key):
if is_string_like(key) or isinstance(key, (types.IntType, types.LongType, types.FloatType)):
return smart_str(key)
else:
try:
#if it has a PK, use it.
return str(key._get_pk_val())
except AttributeError:
return md5_hash(key)
def cache_contains(*keys, **kwargs):
key = cache_key(keys, **kwargs)
return CACHED_KEYS.has_key(key)
def cache_key(*keys, **pairs):
"""Smart key maker, returns the object itself if a key, else a list
delimited by ':', automatically hashing any non-scalar objects."""
if is_string_like(keys):
keys = [keys]
if is_list_or_tuple(keys):
if len(keys) == 1 and is_list_or_tuple(keys[0]):
keys = keys[0]
else:
keys = [md5_hash(keys)]
if pairs:
keys = list(keys)
klist = pairs.keys()
klist.sort()
for k in klist:
keys.append(k)
keys.append(pairs[k])
key = KEY_DELIM.join([_hash_or_string(x) for x in keys])
prefix = CACHE_PREFIX + KEY_DELIM
if not key.startswith(prefix):
key = prefix+key
return key.replace(" ", ".")
def md5_hash(obj):
pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
return md5.new(pickled).hexdigest()
def is_memcached_backend():
try:
return cache._cache.__module__.endswith('memcache')
except AttributeError:
return False
def cache_require():
"""Error if caching isn't running."""
if cache_enabled():
key = cache_key('require_cache')
cache_set(key,value='1')
v = cache_get(key, default = '0')
if v != '1':
raise CacheNotRespondingError()
else:
log.debug("Cache responding OK")
return True
| 29.05 | 116 | 0.592328 | """A full cache system written on top of Django's rudimentary one."""
from django.conf import settings
from django.core.cache import cache
from django.utils.encoding import smart_str
import cPickle as pickle
import md5
import types
import logging
from satchmo.utils import is_string_like, is_list_or_tuple
log = logging.getLogger('caching')
CACHED_KEYS = {}
CACHE_CALLS = 0
CACHE_HITS = 0
KEY_DELIM = "::"
try:
CACHE_PREFIX = settings.CACHE_PREFIX
except AttributeError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No CACHE_PREFIX found in settings, using SITE_ID. Please update your settings to add a CACHE_PREFIX")
_CACHE_ENABLED = settings.CACHE_TIMEOUT > 0
class CacheWrapper(object):
def __init__(self, val, inprocess=False):
self.val = val
self.inprocess = inprocess
def __str__(self):
return str(self.val)
def __repr__(self):
return repr(self.val)
def wrap(cls, obj):
if isinstance(obj, cls):
return obj
else:
return cls(obj)
wrap = classmethod(wrap)
class MethodNotFinishedError(Exception):
def __init__(self, f):
self.func = f
class NotCachedError(Exception):
def __init__(self, k):
self.key = k
class CacheNotRespondingError(Exception):
pass
def cache_delete(*keys, **kwargs):
removed = []
if cache_enabled():
global CACHED_KEYS
log.debug('cache_delete')
children = kwargs.pop('children',False)
if (keys or kwargs):
key = cache_key(*keys, **kwargs)
if CACHED_KEYS.has_key(key):
del CACHED_KEYS[key]
removed.append(key)
cache.delete(key)
if children:
key = key + KEY_DELIM
children = [x for x in CACHED_KEYS.keys() if x.startswith(key)]
for k in children:
del CACHED_KEYS[k]
cache.delete(k)
removed.append(k)
else:
key = "All Keys"
deleteneeded = _cache_flush_all()
removed = CACHED_KEYS.keys()
if deleteneeded:
for k in CACHED_KEYS:
cache.delete(k)
CACHED_KEYS = {}
if removed:
log.debug("Cache delete: %s", removed)
else:
log.debug("No cached objects to delete for %s", key)
return removed
def cache_delete_function(func):
return cache_delete(['func', func.__name__, func.__module__], children=True)
def cache_enabled():
global _CACHE_ENABLED
return _CACHE_ENABLED
def cache_enable(state=True):
global _CACHE_ENABLED
_CACHE_ENABLED=state
def _cache_flush_all():
if is_memcached_backend():
cache._cache.flush_all()
return False
return True
def cache_function(length=settings.CACHE_TIMEOUT):
"""
A variant of the snippet posted by Jeff Wheeler at
http://www.djangosnippets.org/snippets/109/
Caches a function, using the function and its arguments as the key, and the return
value as the value saved. It passes all arguments on to the function, as
it should.
The decorator itself takes a length argument, which is the number of
seconds the cache will keep the result around.
It will put a temp value in the cache while the function is
processing. This should not matter in most cases, but if the app is using
threads, you won't be able to get the previous value, and will need to
wait until the function finishes. If this is not desired behavior, you can
remove the first two lines after the ``else``.
"""
def decorator(func):
def inner_func(*args, **kwargs):
if not cache_enabled():
value = func(*args, **kwargs)
else:
try:
value = cache_get('func', func.__name__, func.__module__, args, kwargs)
except NotCachedError, e:
funcwrapper = CacheWrapper(".".join([func.__module__, func.__name__]), inprocess=True)
cache_set(e.key, value=funcwrapper, length=length, skiplog=True)
value = func(*args, **kwargs)
cache_set(e.key, value=value, length=length)
except MethodNotFinishedError, e:
value = func(*args, **kwargs)
return value
return inner_func
return decorator
def cache_get(*keys, **kwargs):
if kwargs.has_key('default'):
default_value = kwargs.pop('default')
use_default = True
else:
use_default = False
key = cache_key(keys, **kwargs)
if not cache_enabled():
raise NotCachedError(key)
else:
global CACHE_CALLS, CACHE_HITS
CACHE_CALLS += 1
if CACHE_CALLS == 1:
cache_require()
obj = cache.get(key)
if obj and isinstance(obj, CacheWrapper):
CACHE_HITS += 1
CACHED_KEYS[key] = True
log.debug('got cached [%i/%i]: %s', CACHE_CALLS, CACHE_HITS, key)
if obj.inprocess:
raise MethodNotFinishedError(obj.val)
return obj.val
else:
try:
del CACHED_KEYS[key]
except KeyError:
pass
if use_default:
return default_value
raise NotCachedError(key)
def cache_set(*keys, **kwargs):
"""Set an object into the cache."""
if cache_enabled():
global CACHED_KEYS
obj = kwargs.pop('value')
length = kwargs.pop('length', settings.CACHE_TIMEOUT)
skiplog = kwargs.pop('skiplog', False)
key = cache_key(keys, **kwargs)
val = CacheWrapper.wrap(obj)
if not skiplog:
log.debug('setting cache: %s', key)
cache.set(key, val, length)
CACHED_KEYS[key] = True
def _hash_or_string(key):
if is_string_like(key) or isinstance(key, (types.IntType, types.LongType, types.FloatType)):
return smart_str(key)
else:
try:
return str(key._get_pk_val())
except AttributeError:
return md5_hash(key)
def cache_contains(*keys, **kwargs):
key = cache_key(keys, **kwargs)
return CACHED_KEYS.has_key(key)
def cache_key(*keys, **pairs):
"""Smart key maker, returns the object itself if a key, else a list
delimited by ':', automatically hashing any non-scalar objects."""
if is_string_like(keys):
keys = [keys]
if is_list_or_tuple(keys):
if len(keys) == 1 and is_list_or_tuple(keys[0]):
keys = keys[0]
else:
keys = [md5_hash(keys)]
if pairs:
keys = list(keys)
klist = pairs.keys()
klist.sort()
for k in klist:
keys.append(k)
keys.append(pairs[k])
key = KEY_DELIM.join([_hash_or_string(x) for x in keys])
prefix = CACHE_PREFIX + KEY_DELIM
if not key.startswith(prefix):
key = prefix+key
return key.replace(" ", ".")
def md5_hash(obj):
pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
return md5.new(pickled).hexdigest()
def is_memcached_backend():
try:
return cache._cache.__module__.endswith('memcache')
except AttributeError:
return False
def cache_require():
"""Error if caching isn't running."""
if cache_enabled():
key = cache_key('require_cache')
cache_set(key,value='1')
v = cache_get(key, default = '0')
if v != '1':
raise CacheNotRespondingError()
else:
log.debug("Cache responding OK")
return True
| false | true |
f7fa60c2eb33aa41af6762f478a7a41957396632 | 208 | py | Python | projects/serializers.py | KeoH/orchestrapi | 575e66a86c42b5c249fd943bb5f40c8c310139aa | [
"MIT"
] | 1 | 2021-07-05T19:37:37.000Z | 2021-07-05T19:37:37.000Z | projects/serializers.py | KeoH/orchestrapi | 575e66a86c42b5c249fd943bb5f40c8c310139aa | [
"MIT"
] | 6 | 2020-06-05T19:30:52.000Z | 2021-07-05T19:28:53.000Z | projects/serializers.py | KeoH/orchestrapi | 575e66a86c42b5c249fd943bb5f40c8c310139aa | [
"MIT"
] | 1 | 2020-05-15T23:58:24.000Z | 2020-05-15T23:58:24.000Z | from rest_framework import serializers
from .models import Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = ['id', 'name', 'network']
| 18.909091 | 53 | 0.701923 | from rest_framework import serializers
from .models import Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = ['id', 'name', 'network']
| true | true |
f7fa60ee824b460f2c2497316b8fd85faa86e2e2 | 9,238 | py | Python | transtory/shanghaimetro/publicdata.py | feynfan13/transtory | 4dd48033fdcd95b4a0dbc19f5b26d15b4b533979 | [
"MIT"
] | 1 | 2018-12-20T08:05:11.000Z | 2018-12-20T08:05:11.000Z | transtory/shanghaimetro/publicdata.py | feynfan13/transtory | 4dd48033fdcd95b4a0dbc19f5b26d15b4b533979 | [
"MIT"
] | null | null | null | transtory/shanghaimetro/publicdata.py | feynfan13/transtory | 4dd48033fdcd95b4a0dbc19f5b26d15b4b533979 | [
"MIT"
] | null | null | null | import os
import pandas as pd
from transtory.common import singleton
from .configs import get_configs, ShmSysConfigs
class ShmPublicData(object):
"""Public data, including
-- Lines
-- Stations
-- Trains
"""
train_json_name = 'trains.json'
def __init__(self):
self.train_vs_type = None
@staticmethod
def _get_train_sn_from_line_and_seq(line, seq, gen=None):
if gen is None:
return '{:s}{:03d}'.format(line, seq)
else:
return '{:s}{:03d}-{:02d}'.format(line, seq, gen)
def _add_train_and_type_in_sn_range(self, line, train_type: str, sn_range, gen=None):
seq_list = range(sn_range[0], sn_range[1] + 1)
for seq in seq_list:
self.train_line_type_list[0].append(self._get_train_sn_from_line_and_seq(line, seq, gen))
self.train_line_type_list[1].append(line)
self.train_line_type_list[2].append(train_type)
def get_train_vs_type_table(self):
if self.train_vs_type is None:
self.train_vs_type = self._make_train_vs_type_table()
return self.train_vs_type
def _load_train_table_from_json(self):
configs: ShmSysConfigs = get_configs()
json_path = os.sep.join([configs.publicdata_folder, self.train_json_name])
def _make_train_vs_type_table(self):
self.train_line_type_list = [[], [], []]
# Line 01
self._add_train_and_type_in_sn_range('01', "01A01-01", (1, 1))
self._add_train_and_type_in_sn_range('01', "01A01-02", (2, 2))
self._add_train_and_type_in_sn_range('01', "01A01-01", (3, 10))
self._add_train_and_type_in_sn_range('01', "01A03", (11, 13))
self._add_train_and_type_in_sn_range('01', "01A01-01", (14, 14))
self._add_train_and_type_in_sn_range('01', "01A03", (15, 16))
self._add_train_and_type_in_sn_range('01', "01A02-02", (17, 17))
self._add_train_and_type_in_sn_range('01', "01A02-01", (18, 25))
self._add_train_and_type_in_sn_range('01', "01A04-01", (26, 29))
self._add_train_and_type_in_sn_range('01', "01A04-02", (30, 37))
self._add_train_and_type_in_sn_range('01', "01A05", (40, 55))
self._add_train_and_type_in_sn_range('01', "01A06", (56, 66))
self._add_train_and_type_in_sn_range('01', "01A06", (67, 86))
# Line 02
self._add_train_and_type_in_sn_range('02', '02A01', (1, 16))
self._add_train_and_type_in_sn_range('02', '02A02', (33, 53))
self._add_train_and_type_in_sn_range('02', '02A03', (54, 69))
self._add_train_and_type_in_sn_range('02', '02A04-01', (70, 85), 1)
self._add_train_and_type_in_sn_range('02', '02A04', (70, 85))
self._add_train_and_type_in_sn_range('02', '02A05', (86, 116))
# Line 03
self._add_train_and_type_in_sn_range('03', "03A01", (1, 28))
self._add_train_and_type_in_sn_range('03', "03A02&04A02", (29, 36))
# Train 37-49 is borrowed from Line 04 and patched to 03xxx
# We use 04xxx when registering the respective trips
# Line 04
self._add_train_and_type_in_sn_range('04', "04A01", (1, 2)) # Siemens
self._add_train_and_type_in_sn_range('04', "04A01", (3, 28)) # 南车株洲
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (29, 29)) # 中车长春
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (30, 36)) # Alstom上海
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (37, 49)) # Alstom上海
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (50, 55)) # Alstom上海
# Line 05
self._add_train_and_type_in_sn_range('05', "05C01", (1, 13))
self._add_train_and_type_in_sn_range('05', "05C01", (15, 18))
self._add_train_and_type_in_sn_range('05', "05C02", (19, 51))
# Line 06
self._add_train_and_type_in_sn_range('06', "06C01", (1, 3))
self._add_train_and_type_in_sn_range('06', "06C01", (5, 13))
self._add_train_and_type_in_sn_range('06', "06C01", (15, 23))
self._add_train_and_type_in_sn_range('06', "06C02", (25, 33))
self._add_train_and_type_in_sn_range('06', "06C02", (35, 36))
self._add_train_and_type_in_sn_range('06', "06C03", (37, 43))
self._add_train_and_type_in_sn_range('06', "06C03", (45, 53))
self._add_train_and_type_in_sn_range('06', "06C03", (55, 56))
self._add_train_and_type_in_sn_range('06', "06C04", (57, 82))
# Line 07
self._add_train_and_type_in_sn_range('07', "07A01", (1, 42))
self._add_train_and_type_in_sn_range('07', "07A02", (43, 72))
self._add_train_and_type_in_sn_range('07', "07A03", (73, 79))
# Line 08
self._add_train_and_type_in_sn_range('08', "08C01", (1, 28))
self._add_train_and_type_in_sn_range('08', "08C02", (29, 45))
self._add_train_and_type_in_sn_range('08', "08C03", (46, 66))
self._add_train_and_type_in_sn_range('08', "08C04", (67, 90))
# Line 09
self._add_train_and_type_in_sn_range('09', "09A01", (1, 10))
self._add_train_and_type_in_sn_range('09', "09A02", (11, 51))
self._add_train_and_type_in_sn_range('09', "09A03", (53, 88))
self._add_train_and_type_in_sn_range('09', "09A04", (89, 105))
# Line 10
self._add_train_and_type_in_sn_range('10', "10A01", (1, 41))
self._add_train_and_type_in_sn_range('10', "10A02", (42, 67))
# Line 11
self._add_train_and_type_in_sn_range('11', "11A01", (1, 66))
self._add_train_and_type_in_sn_range('11', "11A02", (67, 72))
self._add_train_and_type_in_sn_range('11', "11A03", (73, 82))
# Line 12
self._add_train_and_type_in_sn_range('12', "12A01", (1, 41))
self._add_train_and_type_in_sn_range('12', "12A02", (42, 56))
self._add_train_and_type_in_sn_range('12', "12A03", (57, 75))
# Line 13
self._add_train_and_type_in_sn_range('13', "13A01", (1, 24))
self._add_train_and_type_in_sn_range('13', "13A02", (25, 62))
# Line 14
self._add_train_and_type_in_sn_range('14', "14A01", (1, 49))
# Line 15
self._add_train_and_type_in_sn_range('15', "15A01", (1, 54))
# Line 16
self._add_train_and_type_in_sn_range('16', "16A01", (1, 46))
self._add_train_and_type_in_sn_range('16', "16A02", (47, 61))
# Line 17
self._add_train_and_type_in_sn_range('17', "17A01", (1, 5))
self._add_train_and_type_in_sn_range('17', "17A01", (6, 28))
# Line 18
self._add_train_and_type_in_sn_range('18', "18A01", (1, 50))
# Line T01
self._add_train_and_type_in_sn_range('T01', 'APM300', (1, 11))
train_vs_type_df = pd.DataFrame.from_dict(data={'train': self.train_line_type_list[0],
'line': self.train_line_type_list[1],
'type': self.train_line_type_list[2]})
train_vs_type_df.index = train_vs_type_df['train']
return train_vs_type_df
get_public_data = singleton(ShmPublicData)
class ShmPublicDataApp(object):
instance = None
def __init__(self):
self.public_data: ShmPublicData = get_public_data()
@classmethod
def get_instance(cls):
if cls.instance is None:
cls.instance = ShmPublicDataApp()
return cls.instance
def get_type_of_train(self, train_sn):
query_table = self.public_data.get_train_vs_type_table()
# Include the case of updated trains
if '-' in train_sn:
train_sn = train_sn.split('-')[0]
return query_table.loc[train_sn, 'type']
def get_train_type_list(self):
train_table = self.public_data.get_train_vs_type_table()
train_type_list = train_table.groupby(by='type')['train'].count()
return train_type_list
def get_train_df(self):
return self.public_data.train_vs_type
def get_line_list(self):
train_table = self.public_data.get_train_vs_type_table()
line_list = train_table['line'].unique()
return line_list
def get_trains_of_line(self, line_str):
train_df = self.public_data.get_train_vs_type_table()
return train_df[train_df['line'] == int(line_str)]
@staticmethod
def get_train_sn(line: str, seq: int):
"""Get train sn from line and number in line
Before 2017-12, number takes two digits, such as 0101.
Currently, with expanding rolling stocks and lines, number can be
-- for main lines, 2-digit line number + 3-digit train sequence number, such as 01001.
-- for minor lines, 3-alphadigit line number + 3-digit train sequence number, such as T01001.
"""
# return "{:2d}{:2d}".format(line, number)
return '{:s}{:3d}'.format(line, seq)
@staticmethod
def get_line_and_seq_from_train_sn(train_sn):
if train_sn[0].isalpha(): # minor lines, such as APM
sep_loc = 3
else: # main lines, such as Line 1
sep_loc = 2
return train_sn[0:sep_loc], int(train_sn[sep_loc:])
get_public_data_app = singleton(ShmPublicDataApp)
| 45.960199 | 103 | 0.638991 | import os
import pandas as pd
from transtory.common import singleton
from .configs import get_configs, ShmSysConfigs
class ShmPublicData(object):
train_json_name = 'trains.json'
def __init__(self):
self.train_vs_type = None
@staticmethod
def _get_train_sn_from_line_and_seq(line, seq, gen=None):
if gen is None:
return '{:s}{:03d}'.format(line, seq)
else:
return '{:s}{:03d}-{:02d}'.format(line, seq, gen)
def _add_train_and_type_in_sn_range(self, line, train_type: str, sn_range, gen=None):
seq_list = range(sn_range[0], sn_range[1] + 1)
for seq in seq_list:
self.train_line_type_list[0].append(self._get_train_sn_from_line_and_seq(line, seq, gen))
self.train_line_type_list[1].append(line)
self.train_line_type_list[2].append(train_type)
def get_train_vs_type_table(self):
if self.train_vs_type is None:
self.train_vs_type = self._make_train_vs_type_table()
return self.train_vs_type
def _load_train_table_from_json(self):
configs: ShmSysConfigs = get_configs()
json_path = os.sep.join([configs.publicdata_folder, self.train_json_name])
def _make_train_vs_type_table(self):
self.train_line_type_list = [[], [], []]
self._add_train_and_type_in_sn_range('01', "01A01-01", (1, 1))
self._add_train_and_type_in_sn_range('01', "01A01-02", (2, 2))
self._add_train_and_type_in_sn_range('01', "01A01-01", (3, 10))
self._add_train_and_type_in_sn_range('01', "01A03", (11, 13))
self._add_train_and_type_in_sn_range('01', "01A01-01", (14, 14))
self._add_train_and_type_in_sn_range('01', "01A03", (15, 16))
self._add_train_and_type_in_sn_range('01', "01A02-02", (17, 17))
self._add_train_and_type_in_sn_range('01', "01A02-01", (18, 25))
self._add_train_and_type_in_sn_range('01', "01A04-01", (26, 29))
self._add_train_and_type_in_sn_range('01', "01A04-02", (30, 37))
self._add_train_and_type_in_sn_range('01', "01A05", (40, 55))
self._add_train_and_type_in_sn_range('01', "01A06", (56, 66))
self._add_train_and_type_in_sn_range('01', "01A06", (67, 86))
self._add_train_and_type_in_sn_range('02', '02A01', (1, 16))
self._add_train_and_type_in_sn_range('02', '02A02', (33, 53))
self._add_train_and_type_in_sn_range('02', '02A03', (54, 69))
self._add_train_and_type_in_sn_range('02', '02A04-01', (70, 85), 1)
self._add_train_and_type_in_sn_range('02', '02A04', (70, 85))
self._add_train_and_type_in_sn_range('02', '02A05', (86, 116))
self._add_train_and_type_in_sn_range('03', "03A01", (1, 28))
self._add_train_and_type_in_sn_range('03', "03A02&04A02", (29, 36))
self._add_train_and_type_in_sn_range('04', "04A01", (1, 2))
self._add_train_and_type_in_sn_range('04', "04A01", (3, 28))
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (29, 29))
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (30, 36))
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (37, 49))
self._add_train_and_type_in_sn_range('04', "03A02&04A02", (50, 55))
self._add_train_and_type_in_sn_range('05', "05C01", (1, 13))
self._add_train_and_type_in_sn_range('05', "05C01", (15, 18))
self._add_train_and_type_in_sn_range('05', "05C02", (19, 51))
self._add_train_and_type_in_sn_range('06', "06C01", (1, 3))
self._add_train_and_type_in_sn_range('06', "06C01", (5, 13))
self._add_train_and_type_in_sn_range('06', "06C01", (15, 23))
self._add_train_and_type_in_sn_range('06', "06C02", (25, 33))
self._add_train_and_type_in_sn_range('06', "06C02", (35, 36))
self._add_train_and_type_in_sn_range('06', "06C03", (37, 43))
self._add_train_and_type_in_sn_range('06', "06C03", (45, 53))
self._add_train_and_type_in_sn_range('06', "06C03", (55, 56))
self._add_train_and_type_in_sn_range('06', "06C04", (57, 82))
self._add_train_and_type_in_sn_range('07', "07A01", (1, 42))
self._add_train_and_type_in_sn_range('07', "07A02", (43, 72))
self._add_train_and_type_in_sn_range('07', "07A03", (73, 79))
self._add_train_and_type_in_sn_range('08', "08C01", (1, 28))
self._add_train_and_type_in_sn_range('08', "08C02", (29, 45))
self._add_train_and_type_in_sn_range('08', "08C03", (46, 66))
self._add_train_and_type_in_sn_range('08', "08C04", (67, 90))
self._add_train_and_type_in_sn_range('09', "09A01", (1, 10))
self._add_train_and_type_in_sn_range('09', "09A02", (11, 51))
self._add_train_and_type_in_sn_range('09', "09A03", (53, 88))
self._add_train_and_type_in_sn_range('09', "09A04", (89, 105))
self._add_train_and_type_in_sn_range('10', "10A01", (1, 41))
self._add_train_and_type_in_sn_range('10', "10A02", (42, 67))
self._add_train_and_type_in_sn_range('11', "11A01", (1, 66))
self._add_train_and_type_in_sn_range('11', "11A02", (67, 72))
self._add_train_and_type_in_sn_range('11', "11A03", (73, 82))
self._add_train_and_type_in_sn_range('12', "12A01", (1, 41))
self._add_train_and_type_in_sn_range('12', "12A02", (42, 56))
self._add_train_and_type_in_sn_range('12', "12A03", (57, 75))
self._add_train_and_type_in_sn_range('13', "13A01", (1, 24))
self._add_train_and_type_in_sn_range('13', "13A02", (25, 62))
self._add_train_and_type_in_sn_range('14', "14A01", (1, 49))
self._add_train_and_type_in_sn_range('15', "15A01", (1, 54))
self._add_train_and_type_in_sn_range('16', "16A01", (1, 46))
self._add_train_and_type_in_sn_range('16', "16A02", (47, 61))
self._add_train_and_type_in_sn_range('17', "17A01", (1, 5))
self._add_train_and_type_in_sn_range('17', "17A01", (6, 28))
self._add_train_and_type_in_sn_range('18', "18A01", (1, 50))
self._add_train_and_type_in_sn_range('T01', 'APM300', (1, 11))
train_vs_type_df = pd.DataFrame.from_dict(data={'train': self.train_line_type_list[0],
'line': self.train_line_type_list[1],
'type': self.train_line_type_list[2]})
train_vs_type_df.index = train_vs_type_df['train']
return train_vs_type_df
get_public_data = singleton(ShmPublicData)
class ShmPublicDataApp(object):
instance = None
def __init__(self):
self.public_data: ShmPublicData = get_public_data()
@classmethod
def get_instance(cls):
if cls.instance is None:
cls.instance = ShmPublicDataApp()
return cls.instance
def get_type_of_train(self, train_sn):
query_table = self.public_data.get_train_vs_type_table()
if '-' in train_sn:
train_sn = train_sn.split('-')[0]
return query_table.loc[train_sn, 'type']
def get_train_type_list(self):
train_table = self.public_data.get_train_vs_type_table()
train_type_list = train_table.groupby(by='type')['train'].count()
return train_type_list
def get_train_df(self):
return self.public_data.train_vs_type
def get_line_list(self):
train_table = self.public_data.get_train_vs_type_table()
line_list = train_table['line'].unique()
return line_list
def get_trains_of_line(self, line_str):
train_df = self.public_data.get_train_vs_type_table()
return train_df[train_df['line'] == int(line_str)]
@staticmethod
def get_train_sn(line: str, seq: int):
return '{:s}{:3d}'.format(line, seq)
@staticmethod
def get_line_and_seq_from_train_sn(train_sn):
if train_sn[0].isalpha():
sep_loc = 3
else:
sep_loc = 2
return train_sn[0:sep_loc], int(train_sn[sep_loc:])
get_public_data_app = singleton(ShmPublicDataApp)
| true | true |
f7fa6325c0c66eb6dbf2dfdbe962e844e057f342 | 347 | py | Python | docs/examples/expected_exceptions.py | vlcinsky/nameko | 88d7e5211de4fcc1c34cd7f84d7c77f0619c5f5d | [
"Apache-2.0"
] | 3,425 | 2016-11-10T17:12:42.000Z | 2022-03-31T19:07:49.000Z | docs/examples/expected_exceptions.py | vlcinsky/nameko | 88d7e5211de4fcc1c34cd7f84d7c77f0619c5f5d | [
"Apache-2.0"
] | 371 | 2020-03-04T21:51:56.000Z | 2022-03-31T20:59:11.000Z | docs/examples/expected_exceptions.py | vlcinsky/nameko | 88d7e5211de4fcc1c34cd7f84d7c77f0619c5f5d | [
"Apache-2.0"
] | 420 | 2016-11-17T05:46:42.000Z | 2022-03-23T12:36:06.000Z | from nameko.rpc import rpc
from .auth import Auth, Unauthorized
class Service:
name = "service"
auth = Auth()
@rpc(expected_exceptions=Unauthorized)
def update(self, data):
if not self.auth.has_role("admin"):
raise Unauthorized()
# perform update
raise TypeError("Whoops, genuine error.")
| 19.277778 | 49 | 0.639769 | from nameko.rpc import rpc
from .auth import Auth, Unauthorized
class Service:
name = "service"
auth = Auth()
@rpc(expected_exceptions=Unauthorized)
def update(self, data):
if not self.auth.has_role("admin"):
raise Unauthorized()
raise TypeError("Whoops, genuine error.")
| true | true |
f7fa64a912a6fdf7458fa98f32593eb327eddb1b | 7,960 | py | Python | pyKinectTools/utils/AlignCameras.py | colincsl/pyKinectTools | a84bb5b7ff9dd613576415932865c2ad435520b3 | [
"BSD-2-Clause-FreeBSD"
] | 33 | 2015-04-07T16:28:04.000Z | 2021-11-22T00:28:43.000Z | pyKinectTools/utils/AlignCameras.py | colincsl/pyKinectTools | a84bb5b7ff9dd613576415932865c2ad435520b3 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | pyKinectTools/utils/AlignCameras.py | colincsl/pyKinectTools | a84bb5b7ff9dd613576415932865c2ad435520b3 | [
"BSD-2-Clause-FreeBSD"
] | 13 | 2015-04-07T16:28:34.000Z | 2021-04-26T08:04:36.000Z | ''' Use this file to hand register multiple depth cameras with the 3D visualizer
Procedure:
1) Modify the scrip below for your files
2) After adding points, click the mayavi button in the window and add Transformation to the scene. Drag the second points to the transformation.
3) Manually match the two scenes
4) Click red button "Start/Stop Script Recording". Transform a tiny bit so that you see the transformation matrix
5) Write down/save the transformation matrix
'''
from pyKinectTools.utils.DepthUtils import *
from scipy.misc import imread
from mayavi import mlab
from mayavi.api import Engine
import cPickle as pickle
''' ---- Transforms ---
ICUDec2012 data:
#3->2: transform_data.transform.matrix.__setstate__({'elements': [0.9553782053802112, -0.09691967661345026, 0.27903236545178867, -392.81878278215254, 0.09283849668727677, 0.9952919671849423, 0.02783726980083738, 231.6724797545669, -0.2804166511056782, -0.0006901755293638524, 0.9598781305147085, -118.84124965680712, 0.0, 0.0, 0.0, 1.0]})
#1->2: transform_data2.transform.matrix.__setstate__({'elements': [-0.8531195226064485, -0.08215320378328564, 0.5152066878990207, 761.2299809410998, 0.3177589268248827, 0.7014041249433673, 0.6380137286418792, 1427.5420972165339, -0.4137829679564377, 0.7080134918351199, -0.5722766383564786, -3399.696025885259, 0.0, 0.0, 0.0, 1.0]})
Office 23Feb2013
top->bottom view
#1->2 [0.9955555989899513, 0.03914715069837748, 0.08565366257179756, 240.34720254711863,
-0.08535684788048972, 0.7593599156829527, 0.6450478485925556, 1305.7428154935583,
-0.039790172651939335, -0.6494921239292868, 0.759326493093817, -237.20556423494145,
0.0, 0.0, 0.0, 1.0]})
'''
'''
ICUDec2012
base_dir1 = '/media/Data/ICU_Dec2012/ICU_Dec2012_r40_c1/depth/356/14/1/'
base_dir23 = '/media/Data/ICU_Dec2012/ICU_Dec2012_r40_c2/depth/356/14/0/'
depthFile1 = base_dir1+'device_1/'+'depth_356_14_1_55_00_95.png'
depthFile2 = base_dir23+'device_1/'+'depth_356_14_0_10_01_44.png'
depthFile3 = base_dir23+'device_2/'+'depth_356_14_0_10_00_48.png'
'''
'''
# Office_25Feb2013
base_dir = '/Users/colin/Data/Office_25Feb2013/depth/56/17/31/'
depthFile1 = base_dir+'device_1/'+'depth_56_17_31_0_00_506677.png'
depthFile2 = base_dir+'device_2/'+'depth_56_17_31_0_00_510469.png'
'''
'''
# CIRL_28Feb2013
base_dir = '/Users/colin/Data/CIRL_28Feb2013/depth/59/13/42/'
depthFile1 = base_dir+'device_1/'+'depth_59_13_42_0_00_364016.png'
depthFile2 = base_dir+'device_2/'+'depth_59_13_42_0_00_645072.png'
result:
T = np.array([[0.857551855717905, 0.11935353392976167, 0.5003594195108932, -1053.586999301418],
[0.1430128492517155, 0.8790419590510106, -0.45478847740098743, 1081.8626448851123],
[-0.4941175363248235, 0.4615625289885183, 0.736754974282534, 1295.7083313896273],
[0.0, 0.0, 0.0, 1.0]])
'''
'''
# JHU CIRL pod
base_dir = '/Users/colin/Data/JHU_RGBD_Pose/CIRL_P1/depth/100/11/15/'
depthFile1 = base_dir+'device_1/'+'depth_100_11_15_59_13_725918.png'
depthFile2 = base_dir+'device_2/'+'depth_100_11_15_59_13_395133.png'
result:
T = np.array([[0.857551855717905, 0.11935353392976167, 0.5003594195108932, -1053.586999301418],
[0.1430128492517155, 0.8790419590510106, -0.45478847740098743, 1081.8626448851123],
[-0.4941175363248235, 0.4615625289885183, 0.736754974282534, 1295.7083313896273],
[0.0, 0.0, 0.0, 1.0]])
'''
''' --------------------Main setup-------------------------------- '''
depthIm1 = imread(depthFile1)
depthIm2 = imread(depthFile2)
pts1 = depthIm2XYZ(depthIm1).astype(np.int)
pts2 = depthIm2XYZ(depthIm2).astype(np.int)
engine = Engine()
engine.start()
figure = mlab.figure(1, bgcolor=(0,0,0), fgcolor=(1,1,1))
mlab.clf()
figure.scene.disable_render = True
interval = 40 # Don't show all points (otherwise it's slow!)
pts = np.array([x for x in pts1 if x[2] > -93500])
pts = np.vstack([[0,0,1], pts])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=30., colormap='summer')
pts = np.array([x for x in pts2 if x[2] > -93500])
pts = np.vstack([[0,0,1], pts])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=30., colormap='Blues')
# Copy description and transform here as 4x4 matrix
# e.g.
filename = 'Registration.dat'
description = "JHU CIRL Pod Bottom (good) cam to top (bad) cam"
T = np.array([[0.857551855717905, 0.11935353392976167, 0.5003594195108932, -1053.586999301418],
[0.1430128492517155, 0.8790419590510106, -0.45478847740098743, 1081.8626448851123],
[-0.4941175363248235, 0.4615625289885183, 0.736754974282534, 1295.7083313896273],
[0.0, 0.0, 0.0, 1.0]])
# save
pickle.dump({'description':description, 'transform':T}, open(filename, 'w'))
''' --------------------------------------------------------- '''
''' ------------------- Fine Tuning ------------------------- '''
''' --------------------------------------------------------- '''
from mayavi.filters.transform_data import TransformData
depthIm1 = imread(depthFile1)
depthIm2 = imread(depthFile2)
depthIm3 = imread(depthFile3)
''' Put all in the frame of 2 '''
pts1 = depthIm2XYZ(depthIm1)#.astype(np.int)
pts2 = depthIm2XYZ(depthIm2)#.astype(np.int)
pts3 = depthIm2XYZ(depthIm3).astype(np.int)
p1 = depthIm2PosIm(depthIm1)
p2 = depthIm2PosIm(depthIm2)
p3 = depthIm2PosIm(depthIm3)
'''3DViz'''
engine = Engine()
engine.start()
figure = mlab.figure(1, bgcolor=(0,0,0), fgcolor=(1,1,1))
mlab.clf()
figure.scene.disable_render = True
interval = 15
'''2'''
pts = np.array([x for x in pts2 if x[2] > -93500])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='Blues')
'''3'''
pts = np.array([x for x in pts3 if x[2] > -93500])
ptsViz2 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='PuOr')
transform_data = TransformData()
engine.add_filter(transform_data, engine.scenes[0].children[1])
transform_data.children = [engine.scenes[0].children[1].children[0]]
# engine.scenes[0].children[1].children[0]=[]
transform_data.transform.matrix.__setstate__({'elements': [0.9553782053802112, -0.09691967661345026, 0.27903236545178867, -392.81878278215254, 0.09283849668727677, 0.9952919671849423, 0.02783726980083738, 231.6724797545669, -0.2804166511056782, -0.0006901755293638524, 0.9598781305147085, -118.84124965680712, 0.0, 0.0, 0.0, 1.0]})
transform_data.widget.set_transform(transform_data.transform)
transform_data.filter.update()
transform_data.widget.enabled = False
'''1'''
pts = np.array([x for x in pts1 if x[2] > -93500])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='summer')
mlab.view(azimuth=0, elevation=0, distance=3000., focalpoint=(0,0,0), figure=figure)#, reset_roll=False)
figure.scene.disable_render = False
transform_data2 = TransformData()
engine.add_filter(transform_data2, engine.scenes[0].children[2])
transform_data2.children = [engine.scenes[0].children[2].children[0]]
# engine.scenes[0].children[2].children[0]=[]
transform_data2.transform.matrix.__setstate__({'elements': [-0.8531195226064485, -0.08215320378328564, 0.5152066878990207, 761.2299809410998, 0.3177589268248827, 0.7014041249433673, 0.6380137286418792, 1427.5420972165339, -0.4137829679564377, 0.7080134918351199, -0.5722766383564786, -3399.696025885259, 0.0, 0.0, 0.0, 1.0]})
transform_data2.widget.set_transform(transform_data1.transform)
transform_data2.filter.update()
transform_data2.widget.enabled = False
'''
mlab.view(azimuth=0, elevation=0, distance=3000., focalpoint=(0,0,0), figure=figure)#, reset_roll=False)
'''
| 44.222222 | 338 | 0.72098 |
from pyKinectTools.utils.DepthUtils import *
from scipy.misc import imread
from mayavi import mlab
from mayavi.api import Engine
import cPickle as pickle
depthIm1 = imread(depthFile1)
depthIm2 = imread(depthFile2)
pts1 = depthIm2XYZ(depthIm1).astype(np.int)
pts2 = depthIm2XYZ(depthIm2).astype(np.int)
engine = Engine()
engine.start()
figure = mlab.figure(1, bgcolor=(0,0,0), fgcolor=(1,1,1))
mlab.clf()
figure.scene.disable_render = True
interval = 40
pts = np.array([x for x in pts1 if x[2] > -93500])
pts = np.vstack([[0,0,1], pts])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=30., colormap='summer')
pts = np.array([x for x in pts2 if x[2] > -93500])
pts = np.vstack([[0,0,1], pts])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=30., colormap='Blues')
filename = 'Registration.dat'
description = "JHU CIRL Pod Bottom (good) cam to top (bad) cam"
T = np.array([[0.857551855717905, 0.11935353392976167, 0.5003594195108932, -1053.586999301418],
[0.1430128492517155, 0.8790419590510106, -0.45478847740098743, 1081.8626448851123],
[-0.4941175363248235, 0.4615625289885183, 0.736754974282534, 1295.7083313896273],
[0.0, 0.0, 0.0, 1.0]])
pickle.dump({'description':description, 'transform':T}, open(filename, 'w'))
from mayavi.filters.transform_data import TransformData
depthIm1 = imread(depthFile1)
depthIm2 = imread(depthFile2)
depthIm3 = imread(depthFile3)
pts1 = depthIm2XYZ(depthIm1)
pts2 = depthIm2XYZ(depthIm2)
pts3 = depthIm2XYZ(depthIm3).astype(np.int)
p1 = depthIm2PosIm(depthIm1)
p2 = depthIm2PosIm(depthIm2)
p3 = depthIm2PosIm(depthIm3)
engine = Engine()
engine.start()
figure = mlab.figure(1, bgcolor=(0,0,0), fgcolor=(1,1,1))
mlab.clf()
figure.scene.disable_render = True
interval = 15
pts = np.array([x for x in pts2 if x[2] > -93500])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='Blues')
pts = np.array([x for x in pts3 if x[2] > -93500])
ptsViz2 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='PuOr')
transform_data = TransformData()
engine.add_filter(transform_data, engine.scenes[0].children[1])
transform_data.children = [engine.scenes[0].children[1].children[0]]
transform_data.transform.matrix.__setstate__({'elements': [0.9553782053802112, -0.09691967661345026, 0.27903236545178867, -392.81878278215254, 0.09283849668727677, 0.9952919671849423, 0.02783726980083738, 231.6724797545669, -0.2804166511056782, -0.0006901755293638524, 0.9598781305147085, -118.84124965680712, 0.0, 0.0, 0.0, 1.0]})
transform_data.widget.set_transform(transform_data.transform)
transform_data.filter.update()
transform_data.widget.enabled = False
pts = np.array([x for x in pts1 if x[2] > -93500])
ptsViz1 = mlab.points3d(pts[::interval,0], pts[::interval,1], pts[::interval,2], 2.-(np.minimum(pts[::interval,2], 5000)/float((-pts[:,2]).max()))/1000., scale_factor=10., colormap='summer')
mlab.view(azimuth=0, elevation=0, distance=3000., focalpoint=(0,0,0), figure=figure)
figure.scene.disable_render = False
transform_data2 = TransformData()
engine.add_filter(transform_data2, engine.scenes[0].children[2])
transform_data2.children = [engine.scenes[0].children[2].children[0]]
transform_data2.transform.matrix.__setstate__({'elements': [-0.8531195226064485, -0.08215320378328564, 0.5152066878990207, 761.2299809410998, 0.3177589268248827, 0.7014041249433673, 0.6380137286418792, 1427.5420972165339, -0.4137829679564377, 0.7080134918351199, -0.5722766383564786, -3399.696025885259, 0.0, 0.0, 0.0, 1.0]})
transform_data2.widget.set_transform(transform_data1.transform)
transform_data2.filter.update()
transform_data2.widget.enabled = False
| true | true |
f7fa64f8c250a5640d137418842647c5bfc14206 | 17,076 | py | Python | env/lib/python3.6/site-packages/nacl/bindings/crypto_pwhash.py | escacan/GymTracker | cda8f821bf9e77fa442f778661fc2123cb590dc5 | [
"Apache-2.0"
] | 3 | 2018-07-04T12:21:31.000Z | 2020-10-27T09:07:00.000Z | nacl/bindings/crypto_pwhash.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1 | 2018-04-04T12:13:40.000Z | 2018-05-03T07:57:52.000Z | nacl/bindings/crypto_pwhash.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1 | 2020-05-03T01:13:16.000Z | 2020-05-03T01:13:16.000Z | # Copyright 2013 Donald Stufft and individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import sys
from six import integer_types
import nacl.exceptions as exc
from nacl._sodium import ffi, lib
from nacl.exceptions import ensure
crypto_pwhash_scryptsalsa208sha256_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_scryptsalsa208sha256_strprefix()
)
)[:]
crypto_pwhash_scryptsalsa208sha256_SALTBYTES = \
lib.crypto_pwhash_scryptsalsa208sha256_saltbytes()
crypto_pwhash_scryptsalsa208sha256_STRBYTES = \
lib.crypto_pwhash_scryptsalsa208sha256_strbytes()
crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_passwd_min()
crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_passwd_max()
crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_bytes_min()
crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_bytes_max()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_min()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_max()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_min()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_max()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_interactive()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_interactive()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_sensitive()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_sensitive()
crypto_pwhash_ALG_ARGON2I13 = lib.crypto_pwhash_alg_argon2i13()
crypto_pwhash_ALG_ARGON2ID13 = lib.crypto_pwhash_alg_argon2id13()
crypto_pwhash_ALG_DEFAULT = lib.crypto_pwhash_alg_default()
crypto_pwhash_SALTBYTES = lib.crypto_pwhash_saltbytes()
crypto_pwhash_STRBYTES = lib.crypto_pwhash_strbytes()
crypto_pwhash_PASSWD_MIN = lib.crypto_pwhash_passwd_min()
crypto_pwhash_PASSWD_MAX = lib.crypto_pwhash_passwd_max()
crypto_pwhash_BYTES_MIN = lib.crypto_pwhash_bytes_min()
crypto_pwhash_BYTES_MAX = lib.crypto_pwhash_bytes_max()
crypto_pwhash_argon2i_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_argon2i_strprefix()
)
)[:]
crypto_pwhash_argon2i_MEMLIMIT_MIN = \
lib.crypto_pwhash_argon2i_memlimit_min()
crypto_pwhash_argon2i_MEMLIMIT_MAX = \
lib.crypto_pwhash_argon2i_memlimit_max()
crypto_pwhash_argon2i_OPSLIMIT_MIN = \
lib.crypto_pwhash_argon2i_opslimit_min()
crypto_pwhash_argon2i_OPSLIMIT_MAX = \
lib.crypto_pwhash_argon2i_opslimit_max()
crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2i_opslimit_interactive()
crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2i_memlimit_interactive()
crypto_pwhash_argon2i_OPSLIMIT_MODERATE = \
lib.crypto_pwhash_argon2i_opslimit_moderate()
crypto_pwhash_argon2i_MEMLIMIT_MODERATE = \
lib.crypto_pwhash_argon2i_memlimit_moderate()
crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2i_opslimit_sensitive()
crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2i_memlimit_sensitive()
crypto_pwhash_argon2id_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_argon2id_strprefix()
)
)[:]
crypto_pwhash_argon2id_MEMLIMIT_MIN = \
lib.crypto_pwhash_argon2id_memlimit_min()
crypto_pwhash_argon2id_MEMLIMIT_MAX = \
lib.crypto_pwhash_argon2id_memlimit_max()
crypto_pwhash_argon2id_OPSLIMIT_MIN = \
lib.crypto_pwhash_argon2id_opslimit_min()
crypto_pwhash_argon2id_OPSLIMIT_MAX = \
lib.crypto_pwhash_argon2id_opslimit_max()
crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2id_opslimit_interactive()
crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2id_memlimit_interactive()
crypto_pwhash_argon2id_OPSLIMIT_MODERATE = \
lib.crypto_pwhash_argon2id_opslimit_moderate()
crypto_pwhash_argon2id_MEMLIMIT_MODERATE = \
lib.crypto_pwhash_argon2id_memlimit_moderate()
crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2id_opslimit_sensitive()
crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2id_memlimit_sensitive()
SCRYPT_OPSLIMIT_INTERACTIVE = \
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE
SCRYPT_MEMLIMIT_INTERACTIVE = \
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE
SCRYPT_OPSLIMIT_SENSITIVE = \
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE
SCRYPT_MEMLIMIT_SENSITIVE = \
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE
SCRYPT_SALTBYTES = \
crypto_pwhash_scryptsalsa208sha256_SALTBYTES
SCRYPT_STRBYTES = \
crypto_pwhash_scryptsalsa208sha256_STRBYTES
SCRYPT_PR_MAX = ((1 << 30) - 1)
LOG2_UINT64_MAX = 63
UINT64_MAX = (1 << 64) - 1
SCRYPT_MAX_MEM = 32 * (1024 * 1024)
def _check_memory_occupation(n, r, p, maxmem=SCRYPT_MAX_MEM):
ensure(r != 0, 'Invalid block size',
raising=exc.ValueError)
ensure(p != 0, 'Invalid parallelization factor',
raising=exc.ValueError)
ensure((n & (n - 1)) == 0, 'Cost factor must be a power of 2',
raising=exc.ValueError)
ensure(n > 1, 'Cost factor must be at least 2',
raising=exc.ValueError)
ensure(p <= SCRYPT_PR_MAX / r,
'p*r is greater than {0}'.format(SCRYPT_PR_MAX),
raising=exc.ValueError)
ensure(n < (1 << (16 * r)),
raising=exc.ValueError)
Blen = p * 128 * r
i = UINT64_MAX / 128
ensure(n + 2 <= i / r,
raising=exc.ValueError)
Vlen = 32 * r * (n + 2) * 4
ensure(Blen <= UINT64_MAX - Vlen,
raising=exc.ValueError)
ensure(Blen <= sys.maxsize - Vlen,
raising=exc.ValueError)
ensure(Blen + Vlen <= maxmem,
'Memory limit would be exceeded with the choosen n, r, p',
raising=exc.ValueError)
def nacl_bindings_pick_scrypt_params(opslimit, memlimit):
"""Python implementation of libsodium's pickparams"""
if opslimit < 32768:
opslimit = 32768
r = 8
if opslimit < (memlimit // 32):
p = 1
maxn = opslimit // (4 * r)
for n_log2 in range(1, 63): # pragma: no branch
if (2 ** n_log2) > (maxn // 2):
break
else:
maxn = memlimit // (r * 128)
for n_log2 in range(1, 63): # pragma: no branch
if (2 ** n_log2) > maxn // 2:
break
maxrp = (opslimit // 4) // (2 ** n_log2)
if maxrp > 0x3fffffff: # pragma: no cover
maxrp = 0x3fffffff
p = maxrp // r
return n_log2, r, p
def crypto_pwhash_scryptsalsa208sha256_ll(passwd, salt, n, r, p, dklen=64,
maxmem=SCRYPT_MAX_MEM):
"""
Derive a cryptographic key using the ``passwd`` and ``salt``
given as input.
The work factor can be tuned by by picking different
values for the parameters
:param bytes passwd:
:param bytes salt:
:param bytes salt: *must* be *exactly* :py:const:`.SALTBYTES` long
:param int dklen:
:param int opslimit:
:param int n:
:param int r: block size,
:param int p: the parallelism factor
:param int maxmem: the maximum available memory available for scrypt's
operations
:rtype: bytes
"""
ensure(isinstance(n, integer_types),
raising=TypeError)
ensure(isinstance(r, integer_types),
raising=TypeError)
ensure(isinstance(p, integer_types),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
ensure(isinstance(salt, bytes),
raising=TypeError)
_check_memory_occupation(n, r, p, maxmem)
buf = ffi.new("uint8_t[]", dklen)
ret = lib.crypto_pwhash_scryptsalsa208sha256_ll(passwd, len(passwd),
salt, len(salt),
n, r, p,
buf, dklen)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.buffer(ffi.cast("char *", buf), dklen)[:]
def crypto_pwhash_scryptsalsa208sha256_str(
passwd, opslimit=SCRYPT_OPSLIMIT_INTERACTIVE,
memlimit=SCRYPT_MEMLIMIT_INTERACTIVE):
"""
Derive a cryptographic key using the ``passwd`` and ``salt``
given as input, returning a string representation which includes
the salt and the tuning parameters.
The returned string can be directly stored as a password hash.
See :py:func:`.crypto_pwhash_scryptsalsa208sha256` for a short
discussion about ``opslimit`` and ``memlimit`` values.
:param bytes passwd:
:param int opslimit:
:param int memlimit:
:return: serialized key hash, including salt and tuning parameters
:rtype: bytes
"""
buf = ffi.new("char[]", SCRYPT_STRBYTES)
ret = lib.crypto_pwhash_scryptsalsa208sha256_str(buf, passwd,
len(passwd),
opslimit,
memlimit)
ensure(ret == 0, 'Unexpected failure in password hashing',
raising=exc.RuntimeError)
return ffi.string(buf)
def crypto_pwhash_scryptsalsa208sha256_str_verify(passwd_hash, passwd):
"""
Verifies the ``passwd`` against the ``passwd_hash`` that was generated.
Returns True or False depending on the success
:param passwd_hash: bytes
:param passwd: bytes
:rtype: boolean
"""
ensure(len(passwd_hash) == SCRYPT_STRBYTES - 1, 'Invalid password hash',
raising=exc.ValueError)
ret = lib.crypto_pwhash_scryptsalsa208sha256_str_verify(passwd_hash,
passwd,
len(passwd))
ensure(ret == 0,
"Wrong password",
raising=exc.InvalidkeyError)
# all went well, therefore:
return True
def _check_argon2_limits_alg(opslimit, memlimit, alg):
if (alg == crypto_pwhash_ALG_ARGON2I13):
if memlimit < crypto_pwhash_argon2i_MEMLIMIT_MIN:
raise exc.ValueError('memlimit must be at least {0} bytes'.format(
crypto_pwhash_argon2i_MEMLIMIT_MIN))
elif memlimit > crypto_pwhash_argon2i_MEMLIMIT_MAX:
raise exc.ValueError('memlimit must be at most {0} bytes'.format(
crypto_pwhash_argon2i_MEMLIMIT_MAX))
if opslimit < crypto_pwhash_argon2i_OPSLIMIT_MIN:
raise exc.ValueError('opslimit must be at least {0}'.format(
crypto_pwhash_argon2i_OPSLIMIT_MIN))
elif opslimit > crypto_pwhash_argon2i_OPSLIMIT_MAX:
raise exc.ValueError('opslimit must be at most {0}'.format(
crypto_pwhash_argon2i_OPSLIMIT_MAX))
elif (alg == crypto_pwhash_ALG_ARGON2ID13):
if memlimit < crypto_pwhash_argon2id_MEMLIMIT_MIN:
raise exc.ValueError('memlimit must be at least {0} bytes'.format(
crypto_pwhash_argon2id_MEMLIMIT_MIN))
elif memlimit > crypto_pwhash_argon2id_MEMLIMIT_MAX:
raise exc.ValueError('memlimit must be at most {0} bytes'.format(
crypto_pwhash_argon2id_MEMLIMIT_MAX))
if opslimit < crypto_pwhash_argon2id_OPSLIMIT_MIN:
raise exc.ValueError('opslimit must be at least {0}'.format(
crypto_pwhash_argon2id_OPSLIMIT_MIN))
elif opslimit > crypto_pwhash_argon2id_OPSLIMIT_MAX:
raise exc.ValueError('opslimit must be at most {0}'.format(
crypto_pwhash_argon2id_OPSLIMIT_MAX))
else:
raise exc.TypeError('Unsupported algorithm')
def crypto_pwhash_alg(outlen, passwd, salt, opslimit, memlimit, alg):
"""
Derive a raw cryptographic key using the ``passwd`` and the ``salt``
given as input to the ``alg`` algorithm.
:param outlen: the length of the derived key
:type outlen: int
:param passwd: The input password
:type passwd: bytes
:param opslimit: computational cost
:type opslimit: int
:param memlimit: memory cost
:type memlimit: int
:param alg: algorithm identifier
:type alg: int
:return: derived key
:rtype: bytes
"""
ensure(isinstance(outlen, integer_types),
raising=exc.TypeError)
ensure(isinstance(opslimit, integer_types),
raising=exc.TypeError)
ensure(isinstance(memlimit, integer_types),
raising=exc.TypeError)
ensure(isinstance(alg, integer_types),
raising=exc.TypeError)
ensure(isinstance(passwd, bytes),
raising=exc.TypeError)
if len(salt) != crypto_pwhash_SALTBYTES:
raise exc.ValueError("salt must be exactly {0} bytes long".format(
crypto_pwhash_SALTBYTES))
if outlen < crypto_pwhash_BYTES_MIN:
raise exc.ValueError(
'derived key must be at least {0} bytes long'.format(
crypto_pwhash_BYTES_MIN))
elif outlen > crypto_pwhash_BYTES_MAX:
raise exc.ValueError(
'derived key must be at most {0} bytes long'.format(
crypto_pwhash_BYTES_MAX))
_check_argon2_limits_alg(opslimit, memlimit, alg)
outbuf = ffi.new("unsigned char[]", outlen)
ret = lib.crypto_pwhash(outbuf, outlen, passwd, len(passwd),
salt, opslimit, memlimit, alg)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.buffer(outbuf, outlen)[:]
def crypto_pwhash_str_alg(passwd, opslimit, memlimit, alg):
"""
Derive a cryptographic key using the ``passwd`` given as input
and a random ``salt``, returning a string representation which
includes the salt, the tuning parameters and the used algorithm.
:param passwd: The input password
:type passwd: bytes
:param opslimit: computational cost
:type opslimit: int
:param memlimit: memory cost
:type memlimit: int
:param alg: The algorithm to use
:type alg: int
:return: serialized derived key and parameters
:rtype: bytes
"""
ensure(isinstance(opslimit, integer_types),
raising=TypeError)
ensure(isinstance(memlimit, integer_types),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
_check_argon2_limits_alg(opslimit, memlimit, alg)
outbuf = ffi.new("char[]", 128)
ret = lib.crypto_pwhash_str_alg(outbuf, passwd, len(passwd),
opslimit, memlimit, alg)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.string(outbuf)
def crypto_pwhash_str_verify(passwd_hash, passwd):
"""
int crypto_pwhash_str_verify(const char str[128],
const char * const passwd,
unsigned long long passwdlen);
Verifies the ``passwd`` against a given password hash.
Returns True on success, raises InvalidkeyError on failure
:param passwd_hash: saved password hash
:type passwd_hash: bytes
:param passwd: password to be checked
:type passwd: bytes
:return: success
:rtype: boolean
"""
ensure(isinstance(passwd_hash, bytes),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
ensure(len(passwd_hash) <= 127,
"Hash must be at most 127 bytes long",
raising=exc.ValueError)
ret = lib.crypto_pwhash_str_verify(passwd_hash, passwd, len(passwd))
ensure(ret == 0,
"Wrong password",
raising=exc.InvalidkeyError)
# all went well, therefore:
return True
crypto_pwhash_argon2i_str_verify = crypto_pwhash_str_verify
| 36.025316 | 78 | 0.685406 |
from __future__ import absolute_import, division, print_function
import sys
from six import integer_types
import nacl.exceptions as exc
from nacl._sodium import ffi, lib
from nacl.exceptions import ensure
crypto_pwhash_scryptsalsa208sha256_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_scryptsalsa208sha256_strprefix()
)
)[:]
crypto_pwhash_scryptsalsa208sha256_SALTBYTES = \
lib.crypto_pwhash_scryptsalsa208sha256_saltbytes()
crypto_pwhash_scryptsalsa208sha256_STRBYTES = \
lib.crypto_pwhash_scryptsalsa208sha256_strbytes()
crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_passwd_min()
crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_passwd_max()
crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_bytes_min()
crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_bytes_max()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_min()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_max()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_min()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_max()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_interactive()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_interactive()
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_opslimit_sensitive()
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_scryptsalsa208sha256_memlimit_sensitive()
crypto_pwhash_ALG_ARGON2I13 = lib.crypto_pwhash_alg_argon2i13()
crypto_pwhash_ALG_ARGON2ID13 = lib.crypto_pwhash_alg_argon2id13()
crypto_pwhash_ALG_DEFAULT = lib.crypto_pwhash_alg_default()
crypto_pwhash_SALTBYTES = lib.crypto_pwhash_saltbytes()
crypto_pwhash_STRBYTES = lib.crypto_pwhash_strbytes()
crypto_pwhash_PASSWD_MIN = lib.crypto_pwhash_passwd_min()
crypto_pwhash_PASSWD_MAX = lib.crypto_pwhash_passwd_max()
crypto_pwhash_BYTES_MIN = lib.crypto_pwhash_bytes_min()
crypto_pwhash_BYTES_MAX = lib.crypto_pwhash_bytes_max()
crypto_pwhash_argon2i_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_argon2i_strprefix()
)
)[:]
crypto_pwhash_argon2i_MEMLIMIT_MIN = \
lib.crypto_pwhash_argon2i_memlimit_min()
crypto_pwhash_argon2i_MEMLIMIT_MAX = \
lib.crypto_pwhash_argon2i_memlimit_max()
crypto_pwhash_argon2i_OPSLIMIT_MIN = \
lib.crypto_pwhash_argon2i_opslimit_min()
crypto_pwhash_argon2i_OPSLIMIT_MAX = \
lib.crypto_pwhash_argon2i_opslimit_max()
crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2i_opslimit_interactive()
crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2i_memlimit_interactive()
crypto_pwhash_argon2i_OPSLIMIT_MODERATE = \
lib.crypto_pwhash_argon2i_opslimit_moderate()
crypto_pwhash_argon2i_MEMLIMIT_MODERATE = \
lib.crypto_pwhash_argon2i_memlimit_moderate()
crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2i_opslimit_sensitive()
crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2i_memlimit_sensitive()
crypto_pwhash_argon2id_STRPREFIX = \
ffi.string(ffi.cast("char *",
lib.crypto_pwhash_argon2id_strprefix()
)
)[:]
crypto_pwhash_argon2id_MEMLIMIT_MIN = \
lib.crypto_pwhash_argon2id_memlimit_min()
crypto_pwhash_argon2id_MEMLIMIT_MAX = \
lib.crypto_pwhash_argon2id_memlimit_max()
crypto_pwhash_argon2id_OPSLIMIT_MIN = \
lib.crypto_pwhash_argon2id_opslimit_min()
crypto_pwhash_argon2id_OPSLIMIT_MAX = \
lib.crypto_pwhash_argon2id_opslimit_max()
crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2id_opslimit_interactive()
crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE = \
lib.crypto_pwhash_argon2id_memlimit_interactive()
crypto_pwhash_argon2id_OPSLIMIT_MODERATE = \
lib.crypto_pwhash_argon2id_opslimit_moderate()
crypto_pwhash_argon2id_MEMLIMIT_MODERATE = \
lib.crypto_pwhash_argon2id_memlimit_moderate()
crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2id_opslimit_sensitive()
crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE = \
lib.crypto_pwhash_argon2id_memlimit_sensitive()
SCRYPT_OPSLIMIT_INTERACTIVE = \
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE
SCRYPT_MEMLIMIT_INTERACTIVE = \
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE
SCRYPT_OPSLIMIT_SENSITIVE = \
crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE
SCRYPT_MEMLIMIT_SENSITIVE = \
crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE
SCRYPT_SALTBYTES = \
crypto_pwhash_scryptsalsa208sha256_SALTBYTES
SCRYPT_STRBYTES = \
crypto_pwhash_scryptsalsa208sha256_STRBYTES
SCRYPT_PR_MAX = ((1 << 30) - 1)
LOG2_UINT64_MAX = 63
UINT64_MAX = (1 << 64) - 1
SCRYPT_MAX_MEM = 32 * (1024 * 1024)
def _check_memory_occupation(n, r, p, maxmem=SCRYPT_MAX_MEM):
ensure(r != 0, 'Invalid block size',
raising=exc.ValueError)
ensure(p != 0, 'Invalid parallelization factor',
raising=exc.ValueError)
ensure((n & (n - 1)) == 0, 'Cost factor must be a power of 2',
raising=exc.ValueError)
ensure(n > 1, 'Cost factor must be at least 2',
raising=exc.ValueError)
ensure(p <= SCRYPT_PR_MAX / r,
'p*r is greater than {0}'.format(SCRYPT_PR_MAX),
raising=exc.ValueError)
ensure(n < (1 << (16 * r)),
raising=exc.ValueError)
Blen = p * 128 * r
i = UINT64_MAX / 128
ensure(n + 2 <= i / r,
raising=exc.ValueError)
Vlen = 32 * r * (n + 2) * 4
ensure(Blen <= UINT64_MAX - Vlen,
raising=exc.ValueError)
ensure(Blen <= sys.maxsize - Vlen,
raising=exc.ValueError)
ensure(Blen + Vlen <= maxmem,
'Memory limit would be exceeded with the choosen n, r, p',
raising=exc.ValueError)
def nacl_bindings_pick_scrypt_params(opslimit, memlimit):
if opslimit < 32768:
opslimit = 32768
r = 8
if opslimit < (memlimit // 32):
p = 1
maxn = opslimit // (4 * r)
for n_log2 in range(1, 63):
if (2 ** n_log2) > (maxn // 2):
break
else:
maxn = memlimit // (r * 128)
for n_log2 in range(1, 63):
if (2 ** n_log2) > maxn // 2:
break
maxrp = (opslimit // 4) // (2 ** n_log2)
if maxrp > 0x3fffffff:
maxrp = 0x3fffffff
p = maxrp // r
return n_log2, r, p
def crypto_pwhash_scryptsalsa208sha256_ll(passwd, salt, n, r, p, dklen=64,
maxmem=SCRYPT_MAX_MEM):
ensure(isinstance(n, integer_types),
raising=TypeError)
ensure(isinstance(r, integer_types),
raising=TypeError)
ensure(isinstance(p, integer_types),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
ensure(isinstance(salt, bytes),
raising=TypeError)
_check_memory_occupation(n, r, p, maxmem)
buf = ffi.new("uint8_t[]", dklen)
ret = lib.crypto_pwhash_scryptsalsa208sha256_ll(passwd, len(passwd),
salt, len(salt),
n, r, p,
buf, dklen)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.buffer(ffi.cast("char *", buf), dklen)[:]
def crypto_pwhash_scryptsalsa208sha256_str(
passwd, opslimit=SCRYPT_OPSLIMIT_INTERACTIVE,
memlimit=SCRYPT_MEMLIMIT_INTERACTIVE):
buf = ffi.new("char[]", SCRYPT_STRBYTES)
ret = lib.crypto_pwhash_scryptsalsa208sha256_str(buf, passwd,
len(passwd),
opslimit,
memlimit)
ensure(ret == 0, 'Unexpected failure in password hashing',
raising=exc.RuntimeError)
return ffi.string(buf)
def crypto_pwhash_scryptsalsa208sha256_str_verify(passwd_hash, passwd):
ensure(len(passwd_hash) == SCRYPT_STRBYTES - 1, 'Invalid password hash',
raising=exc.ValueError)
ret = lib.crypto_pwhash_scryptsalsa208sha256_str_verify(passwd_hash,
passwd,
len(passwd))
ensure(ret == 0,
"Wrong password",
raising=exc.InvalidkeyError)
return True
def _check_argon2_limits_alg(opslimit, memlimit, alg):
if (alg == crypto_pwhash_ALG_ARGON2I13):
if memlimit < crypto_pwhash_argon2i_MEMLIMIT_MIN:
raise exc.ValueError('memlimit must be at least {0} bytes'.format(
crypto_pwhash_argon2i_MEMLIMIT_MIN))
elif memlimit > crypto_pwhash_argon2i_MEMLIMIT_MAX:
raise exc.ValueError('memlimit must be at most {0} bytes'.format(
crypto_pwhash_argon2i_MEMLIMIT_MAX))
if opslimit < crypto_pwhash_argon2i_OPSLIMIT_MIN:
raise exc.ValueError('opslimit must be at least {0}'.format(
crypto_pwhash_argon2i_OPSLIMIT_MIN))
elif opslimit > crypto_pwhash_argon2i_OPSLIMIT_MAX:
raise exc.ValueError('opslimit must be at most {0}'.format(
crypto_pwhash_argon2i_OPSLIMIT_MAX))
elif (alg == crypto_pwhash_ALG_ARGON2ID13):
if memlimit < crypto_pwhash_argon2id_MEMLIMIT_MIN:
raise exc.ValueError('memlimit must be at least {0} bytes'.format(
crypto_pwhash_argon2id_MEMLIMIT_MIN))
elif memlimit > crypto_pwhash_argon2id_MEMLIMIT_MAX:
raise exc.ValueError('memlimit must be at most {0} bytes'.format(
crypto_pwhash_argon2id_MEMLIMIT_MAX))
if opslimit < crypto_pwhash_argon2id_OPSLIMIT_MIN:
raise exc.ValueError('opslimit must be at least {0}'.format(
crypto_pwhash_argon2id_OPSLIMIT_MIN))
elif opslimit > crypto_pwhash_argon2id_OPSLIMIT_MAX:
raise exc.ValueError('opslimit must be at most {0}'.format(
crypto_pwhash_argon2id_OPSLIMIT_MAX))
else:
raise exc.TypeError('Unsupported algorithm')
def crypto_pwhash_alg(outlen, passwd, salt, opslimit, memlimit, alg):
ensure(isinstance(outlen, integer_types),
raising=exc.TypeError)
ensure(isinstance(opslimit, integer_types),
raising=exc.TypeError)
ensure(isinstance(memlimit, integer_types),
raising=exc.TypeError)
ensure(isinstance(alg, integer_types),
raising=exc.TypeError)
ensure(isinstance(passwd, bytes),
raising=exc.TypeError)
if len(salt) != crypto_pwhash_SALTBYTES:
raise exc.ValueError("salt must be exactly {0} bytes long".format(
crypto_pwhash_SALTBYTES))
if outlen < crypto_pwhash_BYTES_MIN:
raise exc.ValueError(
'derived key must be at least {0} bytes long'.format(
crypto_pwhash_BYTES_MIN))
elif outlen > crypto_pwhash_BYTES_MAX:
raise exc.ValueError(
'derived key must be at most {0} bytes long'.format(
crypto_pwhash_BYTES_MAX))
_check_argon2_limits_alg(opslimit, memlimit, alg)
outbuf = ffi.new("unsigned char[]", outlen)
ret = lib.crypto_pwhash(outbuf, outlen, passwd, len(passwd),
salt, opslimit, memlimit, alg)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.buffer(outbuf, outlen)[:]
def crypto_pwhash_str_alg(passwd, opslimit, memlimit, alg):
ensure(isinstance(opslimit, integer_types),
raising=TypeError)
ensure(isinstance(memlimit, integer_types),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
_check_argon2_limits_alg(opslimit, memlimit, alg)
outbuf = ffi.new("char[]", 128)
ret = lib.crypto_pwhash_str_alg(outbuf, passwd, len(passwd),
opslimit, memlimit, alg)
ensure(ret == 0, 'Unexpected failure in key derivation',
raising=exc.RuntimeError)
return ffi.string(outbuf)
def crypto_pwhash_str_verify(passwd_hash, passwd):
ensure(isinstance(passwd_hash, bytes),
raising=TypeError)
ensure(isinstance(passwd, bytes),
raising=TypeError)
ensure(len(passwd_hash) <= 127,
"Hash must be at most 127 bytes long",
raising=exc.ValueError)
ret = lib.crypto_pwhash_str_verify(passwd_hash, passwd, len(passwd))
ensure(ret == 0,
"Wrong password",
raising=exc.InvalidkeyError)
return True
crypto_pwhash_argon2i_str_verify = crypto_pwhash_str_verify
| true | true |
f7fa6aa837d5ab35ff608cdc236096f65ff30d1e | 1,373 | py | Python | kbc/regularizers/__init__.py | AgaDob/concept_formation_in_knowledge_graphs | 9d68981207dddecb5a5d7de2d016333fd44ee6dc | [
"MIT"
] | null | null | null | kbc/regularizers/__init__.py | AgaDob/concept_formation_in_knowledge_graphs | 9d68981207dddecb5a5d7de2d016333fd44ee6dc | [
"MIT"
] | null | null | null | kbc/regularizers/__init__.py | AgaDob/concept_formation_in_knowledge_graphs | 9d68981207dddecb5a5d7de2d016333fd44ee6dc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from kbc.regularizers.base import Regularizer
from kbc.regularizers.base import F2
from kbc.regularizers.base import L1
from kbc.regularizers.base import N3
from kbc.regularizers.base import NX
from kbc.regularizers.adaptive import AdaptiveRegularizer
from kbc.regularizers.adaptive import NXAdaptiveRegularizer
from kbc.regularizers.adaptive import FixedLambdaNXAdaptiveRegularizer
from kbc.regularizers.adaptive import ConstantAdaptiveRegularizer
from kbc.regularizers.adaptive import EmbeddingAdaptiveRegularizer
from kbc.regularizers.adaptive import LinearAdaptiveRegularizer
from kbc.regularizers.adaptive import GatedLinearAdaptiveRegularizer
from kbc.regularizers.adaptive import GatedLinearSigmoidAdaptiveRegularizer
from kbc.regularizers.bregman import DiagonalMahalanobisAdaptiveRegularizer
from kbc.regularizers.bregman import ProjectedDiagonalMahalanobisAdaptiveRegularizer
__all__ = [
'Regularizer',
'F2',
'L1',
'N3',
'NX',
'AdaptiveRegularizer',
'NXAdaptiveRegularizer',
'FixedLambdaNXAdaptiveRegularizer',
'ConstantAdaptiveRegularizer',
'EmbeddingAdaptiveRegularizer',
'LinearAdaptiveRegularizer',
'GatedLinearAdaptiveRegularizer',
'GatedLinearSigmoidAdaptiveRegularizer',
'DiagonalMahalanobisAdaptiveRegularizer',
'ProjectedDiagonalMahalanobisAdaptiveRegularizer'
]
| 33.487805 | 84 | 0.827385 |
from kbc.regularizers.base import Regularizer
from kbc.regularizers.base import F2
from kbc.regularizers.base import L1
from kbc.regularizers.base import N3
from kbc.regularizers.base import NX
from kbc.regularizers.adaptive import AdaptiveRegularizer
from kbc.regularizers.adaptive import NXAdaptiveRegularizer
from kbc.regularizers.adaptive import FixedLambdaNXAdaptiveRegularizer
from kbc.regularizers.adaptive import ConstantAdaptiveRegularizer
from kbc.regularizers.adaptive import EmbeddingAdaptiveRegularizer
from kbc.regularizers.adaptive import LinearAdaptiveRegularizer
from kbc.regularizers.adaptive import GatedLinearAdaptiveRegularizer
from kbc.regularizers.adaptive import GatedLinearSigmoidAdaptiveRegularizer
from kbc.regularizers.bregman import DiagonalMahalanobisAdaptiveRegularizer
from kbc.regularizers.bregman import ProjectedDiagonalMahalanobisAdaptiveRegularizer
__all__ = [
'Regularizer',
'F2',
'L1',
'N3',
'NX',
'AdaptiveRegularizer',
'NXAdaptiveRegularizer',
'FixedLambdaNXAdaptiveRegularizer',
'ConstantAdaptiveRegularizer',
'EmbeddingAdaptiveRegularizer',
'LinearAdaptiveRegularizer',
'GatedLinearAdaptiveRegularizer',
'GatedLinearSigmoidAdaptiveRegularizer',
'DiagonalMahalanobisAdaptiveRegularizer',
'ProjectedDiagonalMahalanobisAdaptiveRegularizer'
]
| true | true |
f7fa6adce65db2cbd5a4497551024c1b89fa5087 | 1,645 | py | Python | end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py | kmray/openapi-python-client | 19dd9d8c2407e9f01d73bdb833d8dedf428d897c | [
"MIT"
] | 172 | 2020-02-15T20:14:16.000Z | 2021-06-09T07:09:15.000Z | end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py | kmray/openapi-python-client | 19dd9d8c2407e9f01d73bdb833d8dedf428d897c | [
"MIT"
] | 410 | 2020-02-15T19:39:29.000Z | 2021-06-09T19:28:57.000Z | end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py | kmray/openapi-python-client | 19dd9d8c2407e9f01d73bdb833d8dedf428d897c | [
"MIT"
] | 38 | 2020-04-12T09:36:27.000Z | 2021-06-11T08:57:07.000Z | from typing import Any, Dict
import httpx
from ...client import Client
from ...models.a_form_data import AFormData
from ...types import Response
def _get_kwargs(
*,
client: Client,
form_data: AFormData,
) -> Dict[str, Any]:
url = "{}/tests/post_form_data".format(client.base_url)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"data": form_data.to_dict(),
}
def _build_response(*, response: httpx.Response) -> Response[Any]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=None,
)
def sync_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
"""Post from data
Post form data
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
async def asyncio_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
"""Post from data
Post form data
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
| 19.127907 | 70 | 0.617629 | from typing import Any, Dict
import httpx
from ...client import Client
from ...models.a_form_data import AFormData
from ...types import Response
def _get_kwargs(
*,
client: Client,
form_data: AFormData,
) -> Dict[str, Any]:
url = "{}/tests/post_form_data".format(client.base_url)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"data": form_data.to_dict(),
}
def _build_response(*, response: httpx.Response) -> Response[Any]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=None,
)
def sync_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
async def asyncio_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
| true | true |
f7fa6af0aa46bdc831e530ff8b6e10db81c4c085 | 4,621 | py | Python | fetch_papers.py | pepicello/quant-arxiv-sanity-preserver | 9a6920df359085c38133ee22a56a3b9610e3b8c2 | [
"MIT"
] | 6 | 2018-08-22T12:58:09.000Z | 2021-05-28T17:33:29.000Z | fetch_papers.py | pepicello/quant-arxiv-sanity-preserver | 9a6920df359085c38133ee22a56a3b9610e3b8c2 | [
"MIT"
] | null | null | null | fetch_papers.py | pepicello/quant-arxiv-sanity-preserver | 9a6920df359085c38133ee22a56a3b9610e3b8c2 | [
"MIT"
] | null | null | null | """
Queries arxiv API and downloads papers (the query is a parameter).
The script is intended to enrich an existing database pickle (by default db.p),
so this file will be loaded first, and then new results will be added to it.
"""
import os
import time
import pickle
import random
import argparse
import urllib.request
import feedparser
from utils import Config, safe_pickle_dump
def encode_feedparser_dict(d):
"""
helper function to get rid of feedparser bs with a deep copy.
I hate when libs wrap simple things in their own classes.
"""
if isinstance(d, feedparser.FeedParserDict) or isinstance(d, dict):
j = {}
for k in d.keys():
j[k] = encode_feedparser_dict(d[k])
return j
elif isinstance(d, list):
l = []
for k in d:
l.append(encode_feedparser_dict(k))
return l
else:
return d
def parse_arxiv_url(url):
"""
examples is http://arxiv.org/abs/1512.08756v2
we want to extract the raw id and the version
"""
ix = url.rfind('/')
idversion = url[ix+1:] # extract just the id (and the version)
parts = idversion.split('v')
assert len(parts) == 2, 'error parsing url ' + url
return parts[0], int(parts[1])
if __name__ == "__main__":
# parse input arguments
parser = argparse.ArgumentParser()
parser.add_argument('--search-query', type=str,
default='cat:q-fin.PR+OR+cat:q-fin.PM+OR+cat:q-fin.TR+OR+cat:q-fin.MF+OR+cat:q-fin.CP+OR+cat:q-fin.ST+OR+cat:q-fin.GN',
help='query used for arxiv API. See http://arxiv.org/help/api/user-manual#detailed_examples')
parser.add_argument('--start-index', type=int, default=0, help='0 = most recent API result')
parser.add_argument('--max-index', type=int, default=10000, help='upper bound on paper index we will fetch')
parser.add_argument('--results-per-iteration', type=int, default=100, help='passed to arxiv API')
parser.add_argument('--wait-time', type=float, default=5.0, help='lets be gentle to arxiv API (in number of seconds)')
parser.add_argument('--break-on-no-added', type=int, default=1, help='break out early if all returned query papers are already in db? 1=yes, 0=no')
args = parser.parse_args()
# misc hardcoded variables
base_url = 'http://export.arxiv.org/api/query?' # base api query url
print('Searching arXiv for %s' % (args.search_query, ))
# lets load the existing database to memory
try:
db = pickle.load(open(Config.db_path, 'rb'))
except Exception as e:
print('error loading existing database:')
print(e)
print('starting from an empty database')
db = {}
# -----------------------------------------------------------------------------
# main loop where we fetch the new results
print('database has %d entries at start' % (len(db), ))
num_added_total = 0
for i in range(args.start_index, args.max_index, args.results_per_iteration):
print("Results %i - %i" % (i,i+args.results_per_iteration))
query = 'search_query=%s&sortBy=lastUpdatedDate&start=%i&max_results=%i' % (args.search_query,
i, args.results_per_iteration)
with urllib.request.urlopen(base_url+query) as url:
response = url.read()
parse = feedparser.parse(response)
num_added = 0
num_skipped = 0
for e in parse.entries:
j = encode_feedparser_dict(e)
# extract just the raw arxiv id and version for this paper
rawid, version = parse_arxiv_url(j['id'])
j['_rawid'] = rawid
j['_version'] = version
# add to our database if we didn't have it before, or if this is a new version
if not rawid in db or j['_version'] > db[rawid]['_version']:
db[rawid] = j
print('Updated %s added %s' % (j['updated'].encode('utf-8'), j['title'].encode('utf-8')))
num_added += 1
num_added_total += 1
else:
num_skipped += 1
# print some information
print('Added %d papers, already had %d.' % (num_added, num_skipped))
if len(parse.entries) == 0:
print('Received no results from arxiv. Rate limiting? Exiting. Restart later maybe.')
print(response)
break
if num_added == 0 and args.break_on_no_added == 1:
print('No new papers were added. Assuming no new papers exist. Exiting.')
break
print('Sleeping for %i seconds' % (args.wait_time , ))
time.sleep(args.wait_time + random.uniform(0, 3))
# save the database before we quit, if we found anything new
if num_added_total > 0:
print('Saving database with %d papers to %s' % (len(db), Config.db_path))
safe_pickle_dump(db, Config.db_path)
| 36.968 | 149 | 0.652889 |
import os
import time
import pickle
import random
import argparse
import urllib.request
import feedparser
from utils import Config, safe_pickle_dump
def encode_feedparser_dict(d):
if isinstance(d, feedparser.FeedParserDict) or isinstance(d, dict):
j = {}
for k in d.keys():
j[k] = encode_feedparser_dict(d[k])
return j
elif isinstance(d, list):
l = []
for k in d:
l.append(encode_feedparser_dict(k))
return l
else:
return d
def parse_arxiv_url(url):
ix = url.rfind('/')
idversion = url[ix+1:]
parts = idversion.split('v')
assert len(parts) == 2, 'error parsing url ' + url
return parts[0], int(parts[1])
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--search-query', type=str,
default='cat:q-fin.PR+OR+cat:q-fin.PM+OR+cat:q-fin.TR+OR+cat:q-fin.MF+OR+cat:q-fin.CP+OR+cat:q-fin.ST+OR+cat:q-fin.GN',
help='query used for arxiv API. See http://arxiv.org/help/api/user-manual#detailed_examples')
parser.add_argument('--start-index', type=int, default=0, help='0 = most recent API result')
parser.add_argument('--max-index', type=int, default=10000, help='upper bound on paper index we will fetch')
parser.add_argument('--results-per-iteration', type=int, default=100, help='passed to arxiv API')
parser.add_argument('--wait-time', type=float, default=5.0, help='lets be gentle to arxiv API (in number of seconds)')
parser.add_argument('--break-on-no-added', type=int, default=1, help='break out early if all returned query papers are already in db? 1=yes, 0=no')
args = parser.parse_args()
base_url = 'http://export.arxiv.org/api/query?'
print('Searching arXiv for %s' % (args.search_query, ))
try:
db = pickle.load(open(Config.db_path, 'rb'))
except Exception as e:
print('error loading existing database:')
print(e)
print('starting from an empty database')
db = {}
print('database has %d entries at start' % (len(db), ))
num_added_total = 0
for i in range(args.start_index, args.max_index, args.results_per_iteration):
print("Results %i - %i" % (i,i+args.results_per_iteration))
query = 'search_query=%s&sortBy=lastUpdatedDate&start=%i&max_results=%i' % (args.search_query,
i, args.results_per_iteration)
with urllib.request.urlopen(base_url+query) as url:
response = url.read()
parse = feedparser.parse(response)
num_added = 0
num_skipped = 0
for e in parse.entries:
j = encode_feedparser_dict(e)
rawid, version = parse_arxiv_url(j['id'])
j['_rawid'] = rawid
j['_version'] = version
if not rawid in db or j['_version'] > db[rawid]['_version']:
db[rawid] = j
print('Updated %s added %s' % (j['updated'].encode('utf-8'), j['title'].encode('utf-8')))
num_added += 1
num_added_total += 1
else:
num_skipped += 1
# print some information
print('Added %d papers, already had %d.' % (num_added, num_skipped))
if len(parse.entries) == 0:
print('Received no results from arxiv. Rate limiting? Exiting. Restart later maybe.')
print(response)
break
if num_added == 0 and args.break_on_no_added == 1:
print('No new papers were added. Assuming no new papers exist. Exiting.')
break
print('Sleeping for %i seconds' % (args.wait_time , ))
time.sleep(args.wait_time + random.uniform(0, 3))
# save the database before we quit, if we found anything new
if num_added_total > 0:
print('Saving database with %d papers to %s' % (len(db), Config.db_path))
safe_pickle_dump(db, Config.db_path)
| true | true |
f7fa6af52227c61161fccc5f9a1d62a0c2f15438 | 14,085 | py | Python | models/pointcnn.py | mhwasil/pointcloud_classification | c4dd36db556087cc90dca16e31958adfd3641482 | [
"MIT"
] | 4 | 2021-11-14T14:54:03.000Z | 2022-01-26T14:47:17.000Z | models/pointcnn.py | mhwasil/cnn_pointcloud_classification | c4dd36db556087cc90dca16e31958adfd3641482 | [
"MIT"
] | 1 | 2019-09-17T09:54:34.000Z | 2019-09-17T09:54:34.000Z | models/pointcnn.py | mhwasil/pointcloud_classification | c4dd36db556087cc90dca16e31958adfd3641482 | [
"MIT"
] | 1 | 2019-12-05T19:46:16.000Z | 2019-12-05T19:46:16.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../../utils/pointcnn'))
import math
import pointfly as pf
import tensorflow as tf
def xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation, depth_multiplier,
sorting_method=None, with_global=False):
_, indices_dilated = pf.knn_indices_general(qrs, pts, K * D, True)
indices = indices_dilated[:, :, ::D, :]
if sorting_method is not None:
indices = pf.sort_points(pts, indices, sorting_method)
nn_pts = tf.gather_nd(pts, indices, name=tag + 'nn_pts') # (N, P, K, 3)
nn_pts_center = tf.expand_dims(qrs, axis=2, name=tag + 'nn_pts_center') # (N, P, 1, 3)
nn_pts_local = tf.subtract(nn_pts, nn_pts_center, name=tag + 'nn_pts_local') # (N, P, K, 3)
# Prepare features to be transformed
nn_fts_from_pts_0 = pf.dense(nn_pts_local, C_pts_fts, tag + 'nn_fts_from_pts_0', is_training)
nn_fts_from_pts = pf.dense(nn_fts_from_pts_0, C_pts_fts, tag + 'nn_fts_from_pts', is_training)
if fts is None:
nn_fts_input = nn_fts_from_pts
else:
nn_fts_from_prev = tf.gather_nd(fts, indices, name=tag + 'nn_fts_from_prev')
nn_fts_input = tf.concat([nn_fts_from_pts, nn_fts_from_prev], axis=-1, name=tag + 'nn_fts_input')
if with_X_transformation:
######################## X-transformation #########################
X_0 = pf.conv2d(nn_pts_local, K * K, tag + 'X_0', is_training, (1, K))
X_0_KK = tf.reshape(X_0, (N, P, K, K), name=tag + 'X_0_KK')
X_1 = pf.depthwise_conv2d(X_0_KK, K, tag + 'X_1', is_training, (1, K))
X_1_KK = tf.reshape(X_1, (N, P, K, K), name=tag + 'X_1_KK')
X_2 = pf.depthwise_conv2d(X_1_KK, K, tag + 'X_2', is_training, (1, K), activation=None)
X_2_KK = tf.reshape(X_2, (N, P, K, K), name=tag + 'X_2_KK')
fts_X = tf.matmul(X_2_KK, nn_fts_input, name=tag + 'fts_X')
###################################################################
else:
fts_X = nn_fts_input
fts_conv = pf.separable_conv2d(fts_X, C, tag + 'fts_conv', is_training, (1, K), depth_multiplier=depth_multiplier)
fts_conv_3d = tf.squeeze(fts_conv, axis=2, name=tag + 'fts_conv_3d')
if with_global:
fts_global_0 = pf.dense(qrs, C // 4, tag + 'fts_global_0', is_training)
fts_global = pf.dense(fts_global_0, C // 4, tag + 'fts_global', is_training)
return tf.concat([fts_global, fts_conv_3d], axis=-1, name=tag + 'fts_conv_3d_with_global')
else:
return fts_conv_3d
class PointCNN:
def __init__(self, points, features, is_training, setting):
xconv_params = setting.xconv_params
fc_params = setting.fc_params
with_X_transformation = setting.with_X_transformation
sorting_method = setting.sorting_method
N = tf.shape(points)[0]
if setting.sampling == 'fps':
from sampling import tf_sampling
self.layer_pts = [points]
if features is None:
self.layer_fts = [features]
else:
features = tf.reshape(features, (N, -1, setting.data_dim - 3), name='features_reshape')
C_fts = xconv_params[0]['C'] // 2
features_hd = pf.dense(features, C_fts, 'features_hd', is_training)
self.layer_fts = [features_hd]
for layer_idx, layer_param in enumerate(xconv_params):
tag = 'xconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
P = layer_param['P']
C = layer_param['C']
links = layer_param['links']
if setting.sampling != 'random' and links:
print('Error: flexible links are supported only when random sampling is used!')
exit()
# get k-nearest points
pts = self.layer_pts[-1]
fts = self.layer_fts[-1]
if P == -1 or (layer_idx > 0 and P == xconv_params[layer_idx - 1]['P']):
qrs = self.layer_pts[-1]
else:
if setting.sampling == 'fps':
fps_indices = tf_sampling.farthest_point_sample(P, pts)
batch_indices = tf.tile(tf.reshape(tf.range(N), (-1, 1, 1)), (1, P, 1))
indices = tf.concat([batch_indices, tf.expand_dims(fps_indices,-1)], axis=-1)
qrs = tf.gather_nd(pts, indices, name= tag + 'qrs') # (N, P, 3)
elif setting.sampling == 'ids':
indices = pf.inverse_density_sampling(pts, K, P)
qrs = tf.gather_nd(pts, indices)
elif setting.sampling == 'random':
qrs = tf.slice(pts, (0, 0, 0), (-1, P, -1), name=tag + 'qrs') # (N, P, 3)
else:
print('Unknown sampling method!')
exit()
self.layer_pts.append(qrs)
if layer_idx == 0:
C_pts_fts = C // 2 if fts is None else C // 4
depth_multiplier = 4
else:
C_prev = xconv_params[layer_idx - 1]['C']
C_pts_fts = C_prev // 4
depth_multiplier = math.ceil(C / C_prev)
with_global = (setting.with_global and layer_idx == len(xconv_params) - 1)
fts_xconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method, with_global)
fts_list = []
for link in links:
fts_from_link = self.layer_fts[link]
if fts_from_link is not None:
fts_slice = tf.slice(fts_from_link, (0, 0, 0), (-1, P, -1), name=tag + 'fts_slice_' + str(-link))
fts_list.append(fts_slice)
if fts_list:
fts_list.append(fts_xconv)
self.layer_fts.append(tf.concat(fts_list, axis=-1, name=tag + 'fts_list_concat'))
else:
self.layer_fts.append(fts_xconv)
if hasattr(setting, 'xdconv_params'):
for layer_idx, layer_param in enumerate(setting.xdconv_params):
tag = 'xdconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
pts_layer_idx = layer_param['pts_layer_idx']
qrs_layer_idx = layer_param['qrs_layer_idx']
pts = self.layer_pts[pts_layer_idx + 1]
fts = self.layer_fts[pts_layer_idx + 1] if layer_idx == 0 else self.layer_fts[-1]
qrs = self.layer_pts[qrs_layer_idx + 1]
fts_qrs = self.layer_fts[qrs_layer_idx + 1]
P = xconv_params[qrs_layer_idx]['P']
C = xconv_params[qrs_layer_idx]['C']
C_prev = xconv_params[pts_layer_idx]['C']
C_pts_fts = C_prev // 4
depth_multiplier = 1
fts_xdconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method)
fts_concat = tf.concat([fts_xdconv, fts_qrs], axis=-1, name=tag + 'fts_concat')
fts_fuse = pf.dense(fts_concat, C, tag + 'fts_fuse', is_training)
self.layer_pts.append(qrs)
self.layer_fts.append(fts_fuse)
self.fc_layers = [self.layer_fts[-1]]
for layer_idx, layer_param in enumerate(fc_params):
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers[-1], C, 'fc{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc{:d}_drop'.format(layer_idx))
self.fc_layers.append(fc_drop)
class PointCNN_SEG:
def __init__(self, points, features, is_training, setting):
xconv_params = setting.xconv_params
fc_params_classification = setting.fc_params_classification
fc_params_segmentation = setting.fc_params_segmentation
with_X_transformation = setting.with_X_transformation
sorting_method = setting.sorting_method
N = tf.shape(points)[0]
if setting.sampling == 'fps':
from sampling import tf_sampling
self.layer_pts = [points]
if features is None:
self.layer_fts = [features]
else:
features = tf.reshape(features, (N, -1, setting.data_dim - 3), name='features_reshape')
C_fts = xconv_params[0]['C'] // 2
features_hd = pf.dense(features, C_fts, 'features_hd', is_training)
self.layer_fts = [features_hd]
for layer_idx, layer_param in enumerate(xconv_params):
tag = 'xconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
P = layer_param['P']
C = layer_param['C']
links = layer_param['links']
if setting.sampling != 'random' and links:
print('Error: flexible links are supported only when random sampling is used!')
exit()
# get k-nearest points
pts = self.layer_pts[-1]
fts = self.layer_fts[-1]
if P == -1 or (layer_idx > 0 and P == xconv_params[layer_idx - 1]['P']):
qrs = self.layer_pts[-1]
else:
if setting.sampling == 'fps':
fps_indices = tf_sampling.farthest_point_sample(P, pts)
batch_indices = tf.tile(tf.reshape(tf.range(N), (-1, 1, 1)), (1, P, 1))
indices = tf.concat([batch_indices, tf.expand_dims(fps_indices,-1)], axis=-1)
qrs = tf.gather_nd(pts, indices, name= tag + 'qrs') # (N, P, 3)
elif setting.sampling == 'ids':
indices = pf.inverse_density_sampling(pts, K, P)
qrs = tf.gather_nd(pts, indices)
elif setting.sampling == 'random':
qrs = tf.slice(pts, (0, 0, 0), (-1, P, -1), name=tag + 'qrs') # (N, P, 3)
else:
print('Unknown sampling method!')
exit()
self.layer_pts.append(qrs)
if layer_idx == 0:
C_pts_fts = C // 2 if fts is None else C // 4
depth_multiplier = 4
else:
C_prev = xconv_params[layer_idx - 1]['C']
C_pts_fts = C_prev // 4
depth_multiplier = math.ceil(C / C_prev)
with_global = (setting.with_global and layer_idx == len(xconv_params) - 1)
fts_xconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method, with_global)
fts_list = []
for link in links:
fts_from_link = self.layer_fts[link]
if fts_from_link is not None:
fts_slice = tf.slice(fts_from_link, (0, 0, 0), (-1, P, -1), name=tag + 'fts_slice_' + str(-link))
fts_list.append(fts_slice)
if fts_list:
fts_list.append(fts_xconv)
self.layer_fts.append(tf.concat(fts_list, axis=-1, name=tag + 'fts_list_concat'))
else:
self.layer_fts.append(fts_xconv)
#######Classification Branch
self.fc_layers_classification = [self.layer_fts[-1]]
for layer_idx, layer_param in enumerate(fc_params_classification):
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers_classification[-1], C, 'fc_class_{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc_class_{:d}_drop'.format(layer_idx))
self.fc_layers_classification.append(fc_drop)
#######Segmentation Branch
if hasattr(setting, 'xdconv_params'):
for layer_idx, layer_param in enumerate(setting.xdconv_params):
tag = 'xdconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
pts_layer_idx = layer_param['pts_layer_idx']
qrs_layer_idx = layer_param['qrs_layer_idx']
pts = self.layer_pts[pts_layer_idx + 1]
fts = self.layer_fts[pts_layer_idx + 1] if layer_idx == 0 else self.layer_fts[-1]
qrs = self.layer_pts[qrs_layer_idx + 1]
fts_qrs = self.layer_fts[qrs_layer_idx + 1]
P = xconv_params[qrs_layer_idx]['P']
C = xconv_params[qrs_layer_idx]['C']
C_prev = xconv_params[pts_layer_idx]['C']
C_pts_fts = C_prev // 4
depth_multiplier = 1
fts_xdconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method)
fts_concat = tf.concat([fts_xdconv, fts_qrs], axis=-1, name=tag + 'fts_concat')
fts_fuse = pf.dense(fts_concat, C, tag + 'fts_fuse', is_training)
self.layer_pts.append(qrs)
self.layer_fts.append(fts_fuse)
self.fc_layers_segmentation = [self.layer_fts[-1]]
for layer_idx, layer_param in enumerate(fc_params_segmentation):
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers_segmentation[-1], C, 'fc_seg_{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc_seg_{:d}_drop'.format(layer_idx))
self.fc_layers_segmentation.append(fc_drop) | 49.770318 | 124 | 0.567483 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../../utils/pointcnn'))
import math
import pointfly as pf
import tensorflow as tf
def xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation, depth_multiplier,
sorting_method=None, with_global=False):
_, indices_dilated = pf.knn_indices_general(qrs, pts, K * D, True)
indices = indices_dilated[:, :, ::D, :]
if sorting_method is not None:
indices = pf.sort_points(pts, indices, sorting_method)
nn_pts = tf.gather_nd(pts, indices, name=tag + 'nn_pts')
nn_pts_center = tf.expand_dims(qrs, axis=2, name=tag + 'nn_pts_center')
nn_pts_local = tf.subtract(nn_pts, nn_pts_center, name=tag + 'nn_pts_local')
nn_fts_from_pts_0 = pf.dense(nn_pts_local, C_pts_fts, tag + 'nn_fts_from_pts_0', is_training)
nn_fts_from_pts = pf.dense(nn_fts_from_pts_0, C_pts_fts, tag + 'nn_fts_from_pts', is_training)
if fts is None:
nn_fts_input = nn_fts_from_pts
else:
nn_fts_from_prev = tf.gather_nd(fts, indices, name=tag + 'nn_fts_from_prev')
nn_fts_input = tf.concat([nn_fts_from_pts, nn_fts_from_prev], axis=-1, name=tag + 'nn_fts_input')
if with_X_transformation:
s_xconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method, with_global)
fts_list = []
for link in links:
fts_from_link = self.layer_fts[link]
if fts_from_link is not None:
fts_slice = tf.slice(fts_from_link, (0, 0, 0), (-1, P, -1), name=tag + 'fts_slice_' + str(-link))
fts_list.append(fts_slice)
if fts_list:
fts_list.append(fts_xconv)
self.layer_fts.append(tf.concat(fts_list, axis=-1, name=tag + 'fts_list_concat'))
else:
self.layer_fts.append(fts_xconv)
if hasattr(setting, 'xdconv_params'):
for layer_idx, layer_param in enumerate(setting.xdconv_params):
tag = 'xdconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
pts_layer_idx = layer_param['pts_layer_idx']
qrs_layer_idx = layer_param['qrs_layer_idx']
pts = self.layer_pts[pts_layer_idx + 1]
fts = self.layer_fts[pts_layer_idx + 1] if layer_idx == 0 else self.layer_fts[-1]
qrs = self.layer_pts[qrs_layer_idx + 1]
fts_qrs = self.layer_fts[qrs_layer_idx + 1]
P = xconv_params[qrs_layer_idx]['P']
C = xconv_params[qrs_layer_idx]['C']
C_prev = xconv_params[pts_layer_idx]['C']
C_pts_fts = C_prev // 4
depth_multiplier = 1
fts_xdconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method)
fts_concat = tf.concat([fts_xdconv, fts_qrs], axis=-1, name=tag + 'fts_concat')
fts_fuse = pf.dense(fts_concat, C, tag + 'fts_fuse', is_training)
self.layer_pts.append(qrs)
self.layer_fts.append(fts_fuse)
self.fc_layers = [self.layer_fts[-1]]
for layer_idx, layer_param in enumerate(fc_params):
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers[-1], C, 'fc{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc{:d}_drop'.format(layer_idx))
self.fc_layers.append(fc_drop)
class PointCNN_SEG:
def __init__(self, points, features, is_training, setting):
xconv_params = setting.xconv_params
fc_params_classification = setting.fc_params_classification
fc_params_segmentation = setting.fc_params_segmentation
with_X_transformation = setting.with_X_transformation
sorting_method = setting.sorting_method
N = tf.shape(points)[0]
if setting.sampling == 'fps':
from sampling import tf_sampling
self.layer_pts = [points]
if features is None:
self.layer_fts = [features]
else:
features = tf.reshape(features, (N, -1, setting.data_dim - 3), name='features_reshape')
C_fts = xconv_params[0]['C'] // 2
features_hd = pf.dense(features, C_fts, 'features_hd', is_training)
self.layer_fts = [features_hd]
for layer_idx, layer_param in enumerate(xconv_params):
tag = 'xconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
P = layer_param['P']
C = layer_param['C']
links = layer_param['links']
if setting.sampling != 'random' and links:
print('Error: flexible links are supported only when random sampling is used!')
exit()
pts = self.layer_pts[-1]
fts = self.layer_fts[-1]
if P == -1 or (layer_idx > 0 and P == xconv_params[layer_idx - 1]['P']):
qrs = self.layer_pts[-1]
else:
if setting.sampling == 'fps':
fps_indices = tf_sampling.farthest_point_sample(P, pts)
batch_indices = tf.tile(tf.reshape(tf.range(N), (-1, 1, 1)), (1, P, 1))
indices = tf.concat([batch_indices, tf.expand_dims(fps_indices,-1)], axis=-1)
qrs = tf.gather_nd(pts, indices, name= tag + 'qrs')
elif setting.sampling == 'ids':
indices = pf.inverse_density_sampling(pts, K, P)
qrs = tf.gather_nd(pts, indices)
elif setting.sampling == 'random':
qrs = tf.slice(pts, (0, 0, 0), (-1, P, -1), name=tag + 'qrs')
else:
print('Unknown sampling method!')
exit()
self.layer_pts.append(qrs)
if layer_idx == 0:
C_pts_fts = C // 2 if fts is None else C // 4
depth_multiplier = 4
else:
C_prev = xconv_params[layer_idx - 1]['C']
C_pts_fts = C_prev // 4
depth_multiplier = math.ceil(C / C_prev)
with_global = (setting.with_global and layer_idx == len(xconv_params) - 1)
fts_xconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method, with_global)
fts_list = []
for link in links:
fts_from_link = self.layer_fts[link]
if fts_from_link is not None:
fts_slice = tf.slice(fts_from_link, (0, 0, 0), (-1, P, -1), name=tag + 'fts_slice_' + str(-link))
fts_list.append(fts_slice)
if fts_list:
fts_list.append(fts_xconv)
self.layer_fts.append(tf.concat(fts_list, axis=-1, name=tag + 'fts_list_concat'))
else:
self.layer_fts.append(fts_xconv)
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers_classification[-1], C, 'fc_class_{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc_class_{:d}_drop'.format(layer_idx))
self.fc_layers_classification.append(fc_drop)
tag = 'xdconv_' + str(layer_idx + 1) + '_'
K = layer_param['K']
D = layer_param['D']
pts_layer_idx = layer_param['pts_layer_idx']
qrs_layer_idx = layer_param['qrs_layer_idx']
pts = self.layer_pts[pts_layer_idx + 1]
fts = self.layer_fts[pts_layer_idx + 1] if layer_idx == 0 else self.layer_fts[-1]
qrs = self.layer_pts[qrs_layer_idx + 1]
fts_qrs = self.layer_fts[qrs_layer_idx + 1]
P = xconv_params[qrs_layer_idx]['P']
C = xconv_params[qrs_layer_idx]['C']
C_prev = xconv_params[pts_layer_idx]['C']
C_pts_fts = C_prev // 4
depth_multiplier = 1
fts_xdconv = xconv(pts, fts, qrs, tag, N, K, D, P, C, C_pts_fts, is_training, with_X_transformation,
depth_multiplier, sorting_method)
fts_concat = tf.concat([fts_xdconv, fts_qrs], axis=-1, name=tag + 'fts_concat')
fts_fuse = pf.dense(fts_concat, C, tag + 'fts_fuse', is_training)
self.layer_pts.append(qrs)
self.layer_fts.append(fts_fuse)
self.fc_layers_segmentation = [self.layer_fts[-1]]
for layer_idx, layer_param in enumerate(fc_params_segmentation):
C = layer_param['C']
dropout_rate = layer_param['dropout_rate']
fc = pf.dense(self.fc_layers_segmentation[-1], C, 'fc_seg_{:d}'.format(layer_idx), is_training)
fc_drop = tf.layers.dropout(fc, dropout_rate, training=is_training, name='fc_seg_{:d}_drop'.format(layer_idx))
self.fc_layers_segmentation.append(fc_drop) | true | true |
f7fa6af915f71ef76d4daaa41ed41a3c60ba2778 | 118 | py | Python | PythonClient/gym-car/gym_car/__init__.py | Yuchi94/CustomAirSim | 8eec882e603c8c6cfdab8969b29daa02e2babc87 | [
"MIT"
] | 3 | 2020-12-18T07:07:44.000Z | 2021-10-18T02:43:01.000Z | PythonClient/gym-car/gym_car/__init__.py | Yuchi94/CustomAirSim | 8eec882e603c8c6cfdab8969b29daa02e2babc87 | [
"MIT"
] | null | null | null | PythonClient/gym-car/gym_car/__init__.py | Yuchi94/CustomAirSim | 8eec882e603c8c6cfdab8969b29daa02e2babc87 | [
"MIT"
] | null | null | null | from gym.envs.registration import register
register(
id='carsim-v0',
entry_point='gym_car.envs:CarSimEnv',
)
| 16.857143 | 42 | 0.728814 | from gym.envs.registration import register
register(
id='carsim-v0',
entry_point='gym_car.envs:CarSimEnv',
)
| true | true |
f7fa6b0f47dd67c4cc969147764ee845e362bd9d | 11,089 | py | Python | tests/terraform_compliance/common/test_helper.py | rileydak/terraform-compliance | 6397b4b9b3ce7e23850c0ac0fb77d4bef044c266 | [
"MIT"
] | 1 | 2019-12-07T01:04:27.000Z | 2019-12-07T01:04:27.000Z | tests/terraform_compliance/common/test_helper.py | rileydak/terraform-compliance | 6397b4b9b3ce7e23850c0ac0fb77d4bef044c266 | [
"MIT"
] | null | null | null | tests/terraform_compliance/common/test_helper.py | rileydak/terraform-compliance | 6397b4b9b3ce7e23850c0ac0fb77d4bef044c266 | [
"MIT"
] | null | null | null | from unittest import TestCase
from terraform_compliance.common.helper import (
flatten_list,
check_if_cidr,
is_ip_in_cidr,
assign_sg_params,
validate_sg_rule,
seek_key_in_dict,
find_root_by_key,
are_networks_same,
convert_resource_type,
seek_regex_key_in_dict_values,
jsonify,
remove_mounted_resources,
get_resource_name_from_stash
)
from terraform_compliance.common.exceptions import Failure
from tests.mocks import MockedData
class TestHelperFunctions(TestCase):
def test_flatten_single_dimensional_list(self):
a = ['a', 'b', 'c']
b = ['a', 'b', 'c']
self.assertEqual(flatten_list(a), b)
def test_flatten_multi_dimensional_list(self):
a = ['a', 'b', ['c']]
b = ['a', 'b', 'c']
self.assertEqual(flatten_list(a), b)
def test_flatten_multi_dimensional_nested_list(self):
a = ['a', 'b', ['c', ['d', 'e'], 'f'], 'g', 'h', 'i', ['j', 'k']]
b = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k']
self.assertEqual(flatten_list(a), b)
def test_check_if_cidr_success(self):
self.assertTrue(check_if_cidr('10.0.0.0/8'))
self.assertTrue(check_if_cidr('10.14.0.0/16'))
self.assertTrue(check_if_cidr('10.0.0.0/24'))
self.assertTrue(check_if_cidr('10.0.0.7/32'))
def test_check_if_cidr_failure(self):
self.assertFalse(check_if_cidr('256.0.0.0/8'))
self.assertFalse(check_if_cidr('10.256.0.0/16'))
self.assertFalse(check_if_cidr('10.0.256.0/24'))
self.assertFalse(check_if_cidr('10.0.0.256/32'))
self.assertFalse(check_if_cidr('10.0.0.256/33'))
def test_is_ip_in_cidr_success(self):
self.assertTrue(is_ip_in_cidr('10.0.0.0/8', ['0.0.0.0/0']))
self.assertTrue(is_ip_in_cidr('10.0.0.0/16', ['10.0.0.0/8']))
self.assertTrue(is_ip_in_cidr('10.0.200.0/24', ['10.0.0.0/16']))
self.assertTrue(is_ip_in_cidr('10.0.0.1/32', ['10.0.0.0/24']))
def test_is_ip_in_cidr_failure(self):
self.assertFalse(is_ip_in_cidr('200.0.0.0/16', ['10.0.0.0/8']))
self.assertFalse(is_ip_in_cidr('10.200.0.0/24', ['10.0.0.0/16']))
self.assertFalse(is_ip_in_cidr('10.0.1.1/32', ['10.0.0.0/24']))
def test_assign_sg_params_one_port_with_two_cidrs(self):
self.assertEqual(MockedData.sg_params_ssh_with_2_cidrs, assign_sg_params(MockedData.sg_ssh_with_2_cidrs))
def test_assign_sg_params_one_port_two_cidrs_any_proto(self):
self.assertEqual(MockedData.sg_params_ssh_with_2_cidrs_any_proto, assign_sg_params(MockedData.sg_ssh_with_2_cidrs_any_proto))
def test_assign_sg_params_all_ports_with_all_ips(self):
self.assertEqual(MockedData.sg_params_all_port_all_ip, assign_sg_params(MockedData.sg_all_port_all_ip))
def test_assign_sg_params_no_data_given_in_rules(self):
self.assertEqual(MockedData.sg_params_all_port_no_ip, assign_sg_params(MockedData.sg_all_port_no_ip))
def test_assign_sg_params_from_port_bigger_than_to_port(self):
with self.assertRaises(Failure) as context:
assign_sg_params(MockedData.sg_invalid)
self.assertTrue('Invalid configuration from_port can not be bigger than to_port.' in context.exception)
def test_validate_sg_rule_port_found_in_cidr(self):
with self.assertRaises(Failure) as context:
params = dict(from_port=22, to_port=22, cidr='0.0.0.0/0', ports='', proto='tcp')
validate_sg_rule(MockedData.sg_params_all_port_all_ip, params, False)
self.assertTrue('Found' in context.exception)
def test_validate_sg_rule_invalid_port_range_within_scenario(self):
with self.assertRaises(AssertionError) as context:
params = dict(from_port=43, to_port=42, cidr=None, ports='', proto='tcp')
validate_sg_rule(None, params, False)
self.assertTrue('Port range is defined incorrectly within the Scenario.' in context.exception)
def test_validate_sg_rule_port_range_found_in_cidr_fail(self):
scenario_list = ['22-80', '21-22', '21-23', '70-72', '79-80', '79-81']
for scenario in scenario_list:
with self.assertRaises(Failure) as context:
from_port, to_port = scenario.split('-')
params = dict(proto='tcp', from_port=from_port, to_port=to_port, cidr='0.0.0.0/0', ports='')
validate_sg_rule(MockedData.sg_params_list_range_public, params, False)
self.assertTrue('Found' in context.exception)
def test_validate_sg_rule_port_range_found_in_cidr_success_due_to_cidr_mismatch(self):
scenario_list = ['22-80', '21-22', '21-23', '70-72', '79-80', '79-81']
for scenario in scenario_list:
from_port, to_port = scenario.split('-')
params = dict(proto='tcp', from_port=from_port, to_port=to_port, ports='', cidr='0.0.0.0/0')
self.assertTrue(validate_sg_rule(MockedData.sg_params_list_range_private, params, False))
def test_validate_sg_rule_port_not_found_in_comma_delimited_scenario(self):
with self.assertRaises(Failure) as context:
ports = '22,443'.split(',')
params = dict(proto='tcp', from_port=0, to_port=0, ports=ports, cidr='0.0.0.0/0')
self.assertFalse(validate_sg_rule(MockedData.sg_params_list_range_public, params, True))
def test_validate_sg_rule_port_found_in_comma_delimited_scenario(self):
with self.assertRaises(Failure) as context:
ports = range(22,80)
ports = [str(i) for i in ports]
params = dict(proto='tcp', from_port=0, to_port=0, ports=ports, cidr='0.0.0.0/0')
self.assertFalse(validate_sg_rule(MockedData.sg_params_list_range_public, params, True))
def test_seek_in_dict_finding_a_key_in_root(self):
dictionary = dict(search_key=dict(something=[]))
search_key = 'search_key'
expected = [{'search_key': {'something': []}}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_a_key_in_nested_dict(self):
dictionary = dict(search_key=dict(something=dict(something_else=None)))
search_key = 'something'
expected = [{'something': {'something_else': None}}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_multiple_keys_in_nested_dict(self):
dictionary = dict(search_key=dict(something=dict(something_else=None, something=['something_else'])), something=[])
search_key = 'something'
expected = [{'something': {'something_else': None, 'something': ['something_else']}}, {'something': []}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_values_in_non_dicts(self):
dictionary = dict(search_key=dict(something=[]))
search_key = 'something_else'
expected = []
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_values_in_non_dicts_on_root(self):
dictionary = 'something_else'
search_key = 'something_else'
expected = []
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_find_root_by_key_single_return(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='something')))
search_key = 'tags'
expected = ['some_key']
self.assertEqual(find_root_by_key(haystack, search_key), expected)
def test_find_root_by_key_multiple_return(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='something')), other_key=dict(values=dict(tags=[], something_else='something')))
search_key = 'tags'
expected = ['other_key', 'some_key']
self.assertEqual(sorted(find_root_by_key(haystack, search_key)), sorted(expected))
def test_find_root_by_key_multiple_return_02(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='loki'), find_me='bingo'),
other_key=dict(values=dict(tags=[], something_else='thor'), find_me='b i n g o'))
search_key = 'tags'
return_key = 'find_me'
expected = ['bingo', 'b i n g o']
self.assertEqual(sorted(find_root_by_key(haystack, search_key, return_key)), sorted(expected))
def test_are_networks_same_success(self):
network_a = '192.168.0.0/24'
networks = ['192.168.0.0/24']
self.assertTrue(are_networks_same(network_a, networks))
def test_are_networks_same_false(self):
network_a = '192.168.0.0/24'
networks = ['192.168.0.0/23']
self.assertFalse(are_networks_same(network_a, networks))
def test_convert_resource_type_success(self):
self.assertEqual(convert_resource_type('AWS Security Group'), 'aws_security_group')
def test_convert_resource_type_failure(self):
self.assertEqual(convert_resource_type('test_resource'), 'test_resource')
def test_seek_regex_in_dict_value_nested_dict(self):
haystack = dict(search_key=dict(something='value'))
key_name = 'something'
needle = 'val.*'
expected = ['value']
self.assertEqual(seek_regex_key_in_dict_values(haystack, key_name, needle), expected)
def test_jsonify(self):
self.assertEqual(jsonify({}), {})
self.assertEqual(jsonify([]), [])
self.assertEqual(jsonify(12), 12)
self.assertEqual(jsonify('something'), 'something')
self.assertEqual(jsonify('{"test": "test_value"}'), {'test': 'test_value'})
def test_remove_mounted_resources(self, *args):
resource_list = {
'address': 'aws_subnet.test',
'type': 'aws_subnet',
'name': 'test',
'values': {
'tags': None,
'aws_vpc': [
{
'tags': {
'Environment': 'Sandbox',
'Name': 'mcrilly-sandbox'
},
'aws_subnet': [
{
'tags': None,
'terraform-compliance.mounted': True
}
], 'terraform-compliance.mounted': True
}
]
},
'terraform-compliance.mounted_resources': [
'aws_vpc'
]
}
output = remove_mounted_resources([resource_list])
self.assertEqual({'tags': None}, output[0]['values'])
def test_get_resource_name_from_stash_address_exists(self):
stash = {}
self.assertEqual({'address': 'test'}, get_resource_name_from_stash(stash=stash, address='test'))
def test_get_resource_name_from_stash(self):
stash = [
{
'address': 'test'
}
]
self.assertEqual({'address': 'test'}, get_resource_name_from_stash(stash=stash)) | 43.83004 | 154 | 0.649202 | from unittest import TestCase
from terraform_compliance.common.helper import (
flatten_list,
check_if_cidr,
is_ip_in_cidr,
assign_sg_params,
validate_sg_rule,
seek_key_in_dict,
find_root_by_key,
are_networks_same,
convert_resource_type,
seek_regex_key_in_dict_values,
jsonify,
remove_mounted_resources,
get_resource_name_from_stash
)
from terraform_compliance.common.exceptions import Failure
from tests.mocks import MockedData
class TestHelperFunctions(TestCase):
def test_flatten_single_dimensional_list(self):
a = ['a', 'b', 'c']
b = ['a', 'b', 'c']
self.assertEqual(flatten_list(a), b)
def test_flatten_multi_dimensional_list(self):
a = ['a', 'b', ['c']]
b = ['a', 'b', 'c']
self.assertEqual(flatten_list(a), b)
def test_flatten_multi_dimensional_nested_list(self):
a = ['a', 'b', ['c', ['d', 'e'], 'f'], 'g', 'h', 'i', ['j', 'k']]
b = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k']
self.assertEqual(flatten_list(a), b)
def test_check_if_cidr_success(self):
self.assertTrue(check_if_cidr('10.0.0.0/8'))
self.assertTrue(check_if_cidr('10.14.0.0/16'))
self.assertTrue(check_if_cidr('10.0.0.0/24'))
self.assertTrue(check_if_cidr('10.0.0.7/32'))
def test_check_if_cidr_failure(self):
self.assertFalse(check_if_cidr('256.0.0.0/8'))
self.assertFalse(check_if_cidr('10.256.0.0/16'))
self.assertFalse(check_if_cidr('10.0.256.0/24'))
self.assertFalse(check_if_cidr('10.0.0.256/32'))
self.assertFalse(check_if_cidr('10.0.0.256/33'))
def test_is_ip_in_cidr_success(self):
self.assertTrue(is_ip_in_cidr('10.0.0.0/8', ['0.0.0.0/0']))
self.assertTrue(is_ip_in_cidr('10.0.0.0/16', ['10.0.0.0/8']))
self.assertTrue(is_ip_in_cidr('10.0.200.0/24', ['10.0.0.0/16']))
self.assertTrue(is_ip_in_cidr('10.0.0.1/32', ['10.0.0.0/24']))
def test_is_ip_in_cidr_failure(self):
self.assertFalse(is_ip_in_cidr('200.0.0.0/16', ['10.0.0.0/8']))
self.assertFalse(is_ip_in_cidr('10.200.0.0/24', ['10.0.0.0/16']))
self.assertFalse(is_ip_in_cidr('10.0.1.1/32', ['10.0.0.0/24']))
def test_assign_sg_params_one_port_with_two_cidrs(self):
self.assertEqual(MockedData.sg_params_ssh_with_2_cidrs, assign_sg_params(MockedData.sg_ssh_with_2_cidrs))
def test_assign_sg_params_one_port_two_cidrs_any_proto(self):
self.assertEqual(MockedData.sg_params_ssh_with_2_cidrs_any_proto, assign_sg_params(MockedData.sg_ssh_with_2_cidrs_any_proto))
def test_assign_sg_params_all_ports_with_all_ips(self):
self.assertEqual(MockedData.sg_params_all_port_all_ip, assign_sg_params(MockedData.sg_all_port_all_ip))
def test_assign_sg_params_no_data_given_in_rules(self):
self.assertEqual(MockedData.sg_params_all_port_no_ip, assign_sg_params(MockedData.sg_all_port_no_ip))
def test_assign_sg_params_from_port_bigger_than_to_port(self):
with self.assertRaises(Failure) as context:
assign_sg_params(MockedData.sg_invalid)
self.assertTrue('Invalid configuration from_port can not be bigger than to_port.' in context.exception)
def test_validate_sg_rule_port_found_in_cidr(self):
with self.assertRaises(Failure) as context:
params = dict(from_port=22, to_port=22, cidr='0.0.0.0/0', ports='', proto='tcp')
validate_sg_rule(MockedData.sg_params_all_port_all_ip, params, False)
self.assertTrue('Found' in context.exception)
def test_validate_sg_rule_invalid_port_range_within_scenario(self):
with self.assertRaises(AssertionError) as context:
params = dict(from_port=43, to_port=42, cidr=None, ports='', proto='tcp')
validate_sg_rule(None, params, False)
self.assertTrue('Port range is defined incorrectly within the Scenario.' in context.exception)
def test_validate_sg_rule_port_range_found_in_cidr_fail(self):
scenario_list = ['22-80', '21-22', '21-23', '70-72', '79-80', '79-81']
for scenario in scenario_list:
with self.assertRaises(Failure) as context:
from_port, to_port = scenario.split('-')
params = dict(proto='tcp', from_port=from_port, to_port=to_port, cidr='0.0.0.0/0', ports='')
validate_sg_rule(MockedData.sg_params_list_range_public, params, False)
self.assertTrue('Found' in context.exception)
def test_validate_sg_rule_port_range_found_in_cidr_success_due_to_cidr_mismatch(self):
scenario_list = ['22-80', '21-22', '21-23', '70-72', '79-80', '79-81']
for scenario in scenario_list:
from_port, to_port = scenario.split('-')
params = dict(proto='tcp', from_port=from_port, to_port=to_port, ports='', cidr='0.0.0.0/0')
self.assertTrue(validate_sg_rule(MockedData.sg_params_list_range_private, params, False))
def test_validate_sg_rule_port_not_found_in_comma_delimited_scenario(self):
with self.assertRaises(Failure) as context:
ports = '22,443'.split(',')
params = dict(proto='tcp', from_port=0, to_port=0, ports=ports, cidr='0.0.0.0/0')
self.assertFalse(validate_sg_rule(MockedData.sg_params_list_range_public, params, True))
def test_validate_sg_rule_port_found_in_comma_delimited_scenario(self):
with self.assertRaises(Failure) as context:
ports = range(22,80)
ports = [str(i) for i in ports]
params = dict(proto='tcp', from_port=0, to_port=0, ports=ports, cidr='0.0.0.0/0')
self.assertFalse(validate_sg_rule(MockedData.sg_params_list_range_public, params, True))
def test_seek_in_dict_finding_a_key_in_root(self):
dictionary = dict(search_key=dict(something=[]))
search_key = 'search_key'
expected = [{'search_key': {'something': []}}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_a_key_in_nested_dict(self):
dictionary = dict(search_key=dict(something=dict(something_else=None)))
search_key = 'something'
expected = [{'something': {'something_else': None}}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_multiple_keys_in_nested_dict(self):
dictionary = dict(search_key=dict(something=dict(something_else=None, something=['something_else'])), something=[])
search_key = 'something'
expected = [{'something': {'something_else': None, 'something': ['something_else']}}, {'something': []}]
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_values_in_non_dicts(self):
dictionary = dict(search_key=dict(something=[]))
search_key = 'something_else'
expected = []
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_seek_in_dict_finding_values_in_non_dicts_on_root(self):
dictionary = 'something_else'
search_key = 'something_else'
expected = []
self.assertEqual(seek_key_in_dict(dictionary, search_key), expected)
def test_find_root_by_key_single_return(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='something')))
search_key = 'tags'
expected = ['some_key']
self.assertEqual(find_root_by_key(haystack, search_key), expected)
def test_find_root_by_key_multiple_return(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='something')), other_key=dict(values=dict(tags=[], something_else='something')))
search_key = 'tags'
expected = ['other_key', 'some_key']
self.assertEqual(sorted(find_root_by_key(haystack, search_key)), sorted(expected))
def test_find_root_by_key_multiple_return_02(self):
haystack = dict(some_key=dict(values=dict(tags=[], something_else='loki'), find_me='bingo'),
other_key=dict(values=dict(tags=[], something_else='thor'), find_me='b i n g o'))
search_key = 'tags'
return_key = 'find_me'
expected = ['bingo', 'b i n g o']
self.assertEqual(sorted(find_root_by_key(haystack, search_key, return_key)), sorted(expected))
def test_are_networks_same_success(self):
network_a = '192.168.0.0/24'
networks = ['192.168.0.0/24']
self.assertTrue(are_networks_same(network_a, networks))
def test_are_networks_same_false(self):
network_a = '192.168.0.0/24'
networks = ['192.168.0.0/23']
self.assertFalse(are_networks_same(network_a, networks))
def test_convert_resource_type_success(self):
self.assertEqual(convert_resource_type('AWS Security Group'), 'aws_security_group')
def test_convert_resource_type_failure(self):
self.assertEqual(convert_resource_type('test_resource'), 'test_resource')
def test_seek_regex_in_dict_value_nested_dict(self):
haystack = dict(search_key=dict(something='value'))
key_name = 'something'
needle = 'val.*'
expected = ['value']
self.assertEqual(seek_regex_key_in_dict_values(haystack, key_name, needle), expected)
def test_jsonify(self):
self.assertEqual(jsonify({}), {})
self.assertEqual(jsonify([]), [])
self.assertEqual(jsonify(12), 12)
self.assertEqual(jsonify('something'), 'something')
self.assertEqual(jsonify('{"test": "test_value"}'), {'test': 'test_value'})
def test_remove_mounted_resources(self, *args):
resource_list = {
'address': 'aws_subnet.test',
'type': 'aws_subnet',
'name': 'test',
'values': {
'tags': None,
'aws_vpc': [
{
'tags': {
'Environment': 'Sandbox',
'Name': 'mcrilly-sandbox'
},
'aws_subnet': [
{
'tags': None,
'terraform-compliance.mounted': True
}
], 'terraform-compliance.mounted': True
}
]
},
'terraform-compliance.mounted_resources': [
'aws_vpc'
]
}
output = remove_mounted_resources([resource_list])
self.assertEqual({'tags': None}, output[0]['values'])
def test_get_resource_name_from_stash_address_exists(self):
stash = {}
self.assertEqual({'address': 'test'}, get_resource_name_from_stash(stash=stash, address='test'))
def test_get_resource_name_from_stash(self):
stash = [
{
'address': 'test'
}
]
self.assertEqual({'address': 'test'}, get_resource_name_from_stash(stash=stash)) | true | true |
f7fa6b1c4d8f9c99d2440254d038c5e0584a4b01 | 1,372 | py | Python | logs/urls.py | huseyinyilmaz/worklogger | 3ad66bd7f96e977f5d4a1adb2d3e419a08622cd0 | [
"MIT"
] | 1 | 2017-04-25T10:02:53.000Z | 2017-04-25T10:02:53.000Z | logs/urls.py | huseyinyilmaz/worklogger | 3ad66bd7f96e977f5d4a1adb2d3e419a08622cd0 | [
"MIT"
] | null | null | null | logs/urls.py | huseyinyilmaz/worklogger | 3ad66bd7f96e977f5d4a1adb2d3e419a08622cd0 | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, url
from django.contrib import admin
from logs import views
admin.autodiscover()
urlpatterns = patterns(
'',
# Examples:
# url(r'^$', 'worklogger.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^(?P<year>[0-9]{4})/(?P<month>[0-9]+)/(?P<day>[0-9]+)/$',
views.LogDayArchiveView.as_view(),
name="logs-day"),
url(r'^(?P<year>[0-9]{4})/(?P<month>[0-9]+)/$',
views.LogMonthArchiveView.as_view(),
name="logs-month"),
url(r'^(?P<year>[0-9]{4})/$',
views.LogYearArchiveView.as_view(),
name="logs-year"),
url(r'^log/create/$',
views.CreateLogView.as_view(),
name="logs-log-create"),
url(r'^log/update/(?P<pk>[\w-]+)$',
views.UpdateLogView.as_view(),
name="logs-log-update"),
url(r'^log/delete/(?P<pk>[\w-]+)$',
views.DeleteLogView.as_view(),
name="logs-log-delete"),
url(r'^job/create/$',
views.CreateJobView.as_view(),
name="logs-job-create"),
url(r'^job/update/(?P<pk>[\w-]+)$',
views.UpdateJobView.as_view(),
name="logs-job-update"),
url(r'^job/delete/(?P<pk>[\w-]+)$',
views.DeleteJobView.as_view(),
name="logs-job-delete"),
url(r'^jobs/$',
views.JobListView.as_view(),
name="logs-jobs"),
)
| 28.583333 | 67 | 0.548834 | from django.conf.urls import patterns, url
from django.contrib import admin
from logs import views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^(?P<year>[0-9]{4})/(?P<month>[0-9]+)/(?P<day>[0-9]+)/$',
views.LogDayArchiveView.as_view(),
name="logs-day"),
url(r'^(?P<year>[0-9]{4})/(?P<month>[0-9]+)/$',
views.LogMonthArchiveView.as_view(),
name="logs-month"),
url(r'^(?P<year>[0-9]{4})/$',
views.LogYearArchiveView.as_view(),
name="logs-year"),
url(r'^log/create/$',
views.CreateLogView.as_view(),
name="logs-log-create"),
url(r'^log/update/(?P<pk>[\w-]+)$',
views.UpdateLogView.as_view(),
name="logs-log-update"),
url(r'^log/delete/(?P<pk>[\w-]+)$',
views.DeleteLogView.as_view(),
name="logs-log-delete"),
url(r'^job/create/$',
views.CreateJobView.as_view(),
name="logs-job-create"),
url(r'^job/update/(?P<pk>[\w-]+)$',
views.UpdateJobView.as_view(),
name="logs-job-update"),
url(r'^job/delete/(?P<pk>[\w-]+)$',
views.DeleteJobView.as_view(),
name="logs-job-delete"),
url(r'^jobs/$',
views.JobListView.as_view(),
name="logs-jobs"),
)
| true | true |
f7fa6c8f5eed64b921de94a8e17b58be55e8dfd7 | 1,339 | py | Python | sam-application/lambda_restapi/core/config.py | gbourniq/lambda-restapi | 11d59bb7182fb8d2a0508399607724b9ac3c1601 | [
"MIT"
] | 2 | 2020-12-25T20:13:04.000Z | 2021-02-04T19:09:11.000Z | sam-application/lambda_restapi/core/config.py | gbourniq/lambda-restapi | 11d59bb7182fb8d2a0508399607724b9ac3c1601 | [
"MIT"
] | null | null | null | sam-application/lambda_restapi/core/config.py | gbourniq/lambda-restapi | 11d59bb7182fb8d2a0508399607724b9ac3c1601 | [
"MIT"
] | null | null | null | """Main configuration parameters for FastAPI and Lambda powertools"""
from pathlib import Path
from typing import List
from starlette.config import Config
from starlette.datastructures import URL, CommaSeparatedStrings, Secret
# Paths
SAM_APP_DIR = Path(__file__).resolve().parent.parent.parent
ENV_PATH = SAM_APP_DIR / ".env"
print(f"Loading configs from {ENV_PATH}")
config = Config(env_file=ENV_PATH)
# Lambda config (Powertools)
# https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
LOG_FULL_EVENT: bool = config("LOG_FULL_EVENT", cast=bool)
LOG_LEVEL: str = config("LOG_LEVEL")
POWERTOOLS_METRICS_NAMESPACE: str = config("POWERTOOLS_METRICS_NAMESPACE")
POWERTOOLS_SERVICE_NAME: str = config("POWERTOOLS_SERVICE_NAME")
# FastAPI config
API_PREFIX: str = config("API_PREFIX")
ALLOWED_HOSTS: List[str] = config("ALLOWED_HOSTS", cast=CommaSeparatedStrings)
DEBUG: bool = config("DEBUG", cast=bool)
DESCRIPTION: str = config("DESCRIPTION")
PROJECT_NAME: str = config("PROJECT_NAME")
ROOT_PATH: str = config("ROOT_PATH", default="/") # Set in sam-template.yaml
SECRET_KEY_HEADER: Secret = config("SECRET_KEY_HEADER", cast=Secret)
TEST_SERVER: str = config("TEST_SERVER", cast=URL)
VERSION: str = config("VERSION")
# Static assets
# ASSETS_PATH: str = config("ASSETS_PATH")
# ASSETS_PATH.mkdir(exist_ok=True)
| 38.257143 | 79 | 0.78118 | from pathlib import Path
from typing import List
from starlette.config import Config
from starlette.datastructures import URL, CommaSeparatedStrings, Secret
SAM_APP_DIR = Path(__file__).resolve().parent.parent.parent
ENV_PATH = SAM_APP_DIR / ".env"
print(f"Loading configs from {ENV_PATH}")
config = Config(env_file=ENV_PATH)
= config("LOG_FULL_EVENT", cast=bool)
LOG_LEVEL: str = config("LOG_LEVEL")
POWERTOOLS_METRICS_NAMESPACE: str = config("POWERTOOLS_METRICS_NAMESPACE")
POWERTOOLS_SERVICE_NAME: str = config("POWERTOOLS_SERVICE_NAME")
API_PREFIX: str = config("API_PREFIX")
ALLOWED_HOSTS: List[str] = config("ALLOWED_HOSTS", cast=CommaSeparatedStrings)
DEBUG: bool = config("DEBUG", cast=bool)
DESCRIPTION: str = config("DESCRIPTION")
PROJECT_NAME: str = config("PROJECT_NAME")
ROOT_PATH: str = config("ROOT_PATH", default="/")
SECRET_KEY_HEADER: Secret = config("SECRET_KEY_HEADER", cast=Secret)
TEST_SERVER: str = config("TEST_SERVER", cast=URL)
VERSION: str = config("VERSION")
| true | true |
f7fa6cb3492812d61ef4505d7530e8195aca21d3 | 9,760 | py | Python | sawtooth/transaction_processor/enclave_registry_handler.py | cheevo/private-data-objects | a73f13633dc9e5d77cbd402645eb22f6d4086786 | [
"Apache-2.0"
] | null | null | null | sawtooth/transaction_processor/enclave_registry_handler.py | cheevo/private-data-objects | a73f13633dc9e5d77cbd402645eb22f6d4086786 | [
"Apache-2.0"
] | null | null | null | sawtooth/transaction_processor/enclave_registry_handler.py | cheevo/private-data-objects | a73f13633dc9e5d77cbd402645eb22f6d4086786 | [
"Apache-2.0"
] | 1 | 2018-06-20T18:15:32.000Z | 2018-06-20T18:15:32.000Z | # Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import hashlib
import base64
import json
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.exceptions import InvalidSignature
from sawtooth_sdk.processor.handler import TransactionHandler
from sawtooth_sdk.messaging.future import FutureTimeoutError
from sawtooth_sdk.processor.exceptions import InvalidTransaction
from sawtooth_sdk.processor.exceptions import InternalError
from sawtooth_sdk.protobuf.setting_pb2 import Setting
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveInfo
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveTransaction
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveRegister
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveUpdate
from sawtooth.helpers.pdo_debug import PdoDbgDump
from common.pdo_signing import verify_enclave_registration_info
from common.pdo_connect_tp import PdoTpConnectHelper
LOGGER = logging.getLogger(__name__)
STATE_TIMEOUT_SEC = 10
class ContractEnclaveRegistryTransactionHandler(TransactionHandler):
def __init__(self, debug_on, dbg_dump_to_logger=True):
self.connect = PdoTpConnectHelper()
self._debug_on = debug_on
if dbg_dump_to_logger:
self.dbg_dump = PdoDbgDump(LOGGER)
else:
self.dbg_dump = PdoDbgDump()
LOGGER.debug("Enclave namespace prefix: %s",
self.connect.get_enclave_prefix())
@property
def family_name(self):
family = self.connect.get_enclave_registry_family_name()
LOGGER.debug("Enclave family name: %s", family)
return family
@property
def family_versions(self):
return ['1.0']
@property
def namespaces(self):
return self.connect.get_enclave_prefix()
def _get_enclave_state(self, context, enclave_id):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.get_state(context, address, PdoContractEnclaveInfo)
def _delete_enclave_state(self, context, enclave_id):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.delete_state(context, address)
def _set_enclave_state(self, context, enclave_id, data):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.set_state(context, address, data)
def _verify_registration_info(self,
payload,
details,
public_key_hash,
context):
# TODO: Allowing no proof data should be removed in the production version
if not details.proof_data:
LOGGER.debug("*** Enclave proof data is empty - simulation mode")
if not self._debug_on:
raise InvalidTransaction(
'Simulation mode is not allowed when the debug support is OFF')
return
# Try to get the report key from the configuration setting.
# If it is not there, fail verification.
try:
report_public_key_pem = self.connect.get_report_public_key(context)
except KeyError:
raise \
ValueError(
'Failed to get report public key configuration setting {}'.format(
self.connect.get_report_public_key_setting_name()))
# Retrieve the valid enclave measurement values, converting the comma-
# delimited list. If it is not there, fail verification.
try:
valid_measurements = self.connect.get_valid_measurements(context)
except KeyError:
raise \
ValueError(
'Failed to get enclave measurements setting {}'.format(
self.connect.get_valid_measurements_setting_name()))
# Retrieve the valid enclave basename value. If it is not there,
# fail verification.
try:
valid_basenames = self.connect.get_valid_basenames(context)
except KeyError:
raise \
ValueError(
'Failed to get enclave basenames setting {}'.format(
self.connect.get_valid_basenames_setting_name()))
verify_enclave_registration_info(self.connect,
payload,
details,
public_key_hash,
context,
report_public_key_pem,
valid_measurements,
valid_basenames)
# def check_address(context, address, key, register_new):
def check_address(self, context, key, register_new):
try:
state = self._get_enclave_state(context, key)
if register_new:
if state.verifying_key:
raise InvalidTransaction(
'Contract enclave already exist with signing_key {}'\
.format(key))
else:
return state
else:
if not state.verifying_key:
raise InvalidTransaction(
'Enclave does not exist: {0}'.format(key))
else:
return state
except InternalError:
if not register_new:
raise InvalidTransaction(
'Enclave does not exist: {0}'.format(key))
else:
return PdoContractEnclaveInfo()
def apply(self, transaction, context):
txn_header = transaction.header
txn_signer_public_key = txn_header.signer_public_key
payload = PdoContractEnclaveTransaction()
payload.ParseFromString(transaction.payload)
self.dbg_dump.dump_contract_enclave_transaction(payload)
info = self.check_address(context,
payload.verifying_key,
payload.verb == 'register')
if payload.verb == 'register':
public_key_hash = hashlib.sha256(txn_signer_public_key.encode()).hexdigest()
details = PdoContractEnclaveRegister()
details.ParseFromString(payload.transaction_details)
try:
self._verify_registration_info(payload,
details,
public_key_hash,
context)
except ValueError as error:
raise InvalidTransaction\
('Invalid Signup Info: {}'.format(error))
info.verifying_key = payload.verifying_key
info.encryption_key = details.encryption_key
info.last_registration_block_context = \
details.registration_block_context
info.owner_id = txn_signer_public_key
info.registration_transaction_id = transaction.signature
info.proof_data = details.proof_data
self.dbg_dump.dump_contract_enclave_state(info, "Setting new PdoContractEnclaveInfo")
self._set_enclave_state(context,
payload.verifying_key,
info.SerializeToString())
elif payload.verb == 'delete' or payload.verb == 'update':
self.dbg_dump.dump_contract_enclave_state(info, "Received PdoContractEnclaveInfo")
if payload.verb == 'delete':
if not self._debug_on:
raise InvalidTransaction('Delete is not allowed, debug support is OFF')
LOGGER.info("Deleting PdoContractEnclaveInfo %s", payload.verifying_key)
self._delete_enclave_state(context, payload.verifying_key)
else:
# Check the contract enclave owner matches transaction signer.
if info.owner_id != txn_signer_public_key:
raise InvalidTransaction(
'Owner signature mismatch signer {}, owner {}'
.format(info.verifying_key, txn_signer_public_key))
details = PdoContractEnclaveUpdate()
details.ParseFromString(payload.transaction_details)
info.last_registration_block_context = \
details.registration_block_context
self.dbg_dump.dump_contract_enclave_state(info, "Updating existing PdoContractEnclaveInfo")
self._set_enclave_state(context,
payload.verifying_key,
info.SerializeToString())
else:
raise InvalidTransaction('Invalid transaction action {}'
.format(payload.verb))
| 42.434783 | 107 | 0.619262 |
import logging
import hashlib
import base64
import json
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.exceptions import InvalidSignature
from sawtooth_sdk.processor.handler import TransactionHandler
from sawtooth_sdk.messaging.future import FutureTimeoutError
from sawtooth_sdk.processor.exceptions import InvalidTransaction
from sawtooth_sdk.processor.exceptions import InternalError
from sawtooth_sdk.protobuf.setting_pb2 import Setting
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveInfo
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveTransaction
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveRegister
from sawtooth.pdo_protos.pdo_contract_enclave_registry_pb2 import PdoContractEnclaveUpdate
from sawtooth.helpers.pdo_debug import PdoDbgDump
from common.pdo_signing import verify_enclave_registration_info
from common.pdo_connect_tp import PdoTpConnectHelper
LOGGER = logging.getLogger(__name__)
STATE_TIMEOUT_SEC = 10
class ContractEnclaveRegistryTransactionHandler(TransactionHandler):
def __init__(self, debug_on, dbg_dump_to_logger=True):
self.connect = PdoTpConnectHelper()
self._debug_on = debug_on
if dbg_dump_to_logger:
self.dbg_dump = PdoDbgDump(LOGGER)
else:
self.dbg_dump = PdoDbgDump()
LOGGER.debug("Enclave namespace prefix: %s",
self.connect.get_enclave_prefix())
@property
def family_name(self):
family = self.connect.get_enclave_registry_family_name()
LOGGER.debug("Enclave family name: %s", family)
return family
@property
def family_versions(self):
return ['1.0']
@property
def namespaces(self):
return self.connect.get_enclave_prefix()
def _get_enclave_state(self, context, enclave_id):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.get_state(context, address, PdoContractEnclaveInfo)
def _delete_enclave_state(self, context, enclave_id):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.delete_state(context, address)
def _set_enclave_state(self, context, enclave_id, data):
address = self.connect.get_enclave_address(enclave_id)
return self.connect.set_state(context, address, data)
def _verify_registration_info(self,
payload,
details,
public_key_hash,
context):
if not details.proof_data:
LOGGER.debug("*** Enclave proof data is empty - simulation mode")
if not self._debug_on:
raise InvalidTransaction(
'Simulation mode is not allowed when the debug support is OFF')
return
try:
report_public_key_pem = self.connect.get_report_public_key(context)
except KeyError:
raise \
ValueError(
'Failed to get report public key configuration setting {}'.format(
self.connect.get_report_public_key_setting_name()))
try:
valid_measurements = self.connect.get_valid_measurements(context)
except KeyError:
raise \
ValueError(
'Failed to get enclave measurements setting {}'.format(
self.connect.get_valid_measurements_setting_name()))
try:
valid_basenames = self.connect.get_valid_basenames(context)
except KeyError:
raise \
ValueError(
'Failed to get enclave basenames setting {}'.format(
self.connect.get_valid_basenames_setting_name()))
verify_enclave_registration_info(self.connect,
payload,
details,
public_key_hash,
context,
report_public_key_pem,
valid_measurements,
valid_basenames)
def check_address(self, context, key, register_new):
try:
state = self._get_enclave_state(context, key)
if register_new:
if state.verifying_key:
raise InvalidTransaction(
'Contract enclave already exist with signing_key {}'\
.format(key))
else:
return state
else:
if not state.verifying_key:
raise InvalidTransaction(
'Enclave does not exist: {0}'.format(key))
else:
return state
except InternalError:
if not register_new:
raise InvalidTransaction(
'Enclave does not exist: {0}'.format(key))
else:
return PdoContractEnclaveInfo()
def apply(self, transaction, context):
txn_header = transaction.header
txn_signer_public_key = txn_header.signer_public_key
payload = PdoContractEnclaveTransaction()
payload.ParseFromString(transaction.payload)
self.dbg_dump.dump_contract_enclave_transaction(payload)
info = self.check_address(context,
payload.verifying_key,
payload.verb == 'register')
if payload.verb == 'register':
public_key_hash = hashlib.sha256(txn_signer_public_key.encode()).hexdigest()
details = PdoContractEnclaveRegister()
details.ParseFromString(payload.transaction_details)
try:
self._verify_registration_info(payload,
details,
public_key_hash,
context)
except ValueError as error:
raise InvalidTransaction\
('Invalid Signup Info: {}'.format(error))
info.verifying_key = payload.verifying_key
info.encryption_key = details.encryption_key
info.last_registration_block_context = \
details.registration_block_context
info.owner_id = txn_signer_public_key
info.registration_transaction_id = transaction.signature
info.proof_data = details.proof_data
self.dbg_dump.dump_contract_enclave_state(info, "Setting new PdoContractEnclaveInfo")
self._set_enclave_state(context,
payload.verifying_key,
info.SerializeToString())
elif payload.verb == 'delete' or payload.verb == 'update':
self.dbg_dump.dump_contract_enclave_state(info, "Received PdoContractEnclaveInfo")
if payload.verb == 'delete':
if not self._debug_on:
raise InvalidTransaction('Delete is not allowed, debug support is OFF')
LOGGER.info("Deleting PdoContractEnclaveInfo %s", payload.verifying_key)
self._delete_enclave_state(context, payload.verifying_key)
else:
if info.owner_id != txn_signer_public_key:
raise InvalidTransaction(
'Owner signature mismatch signer {}, owner {}'
.format(info.verifying_key, txn_signer_public_key))
details = PdoContractEnclaveUpdate()
details.ParseFromString(payload.transaction_details)
info.last_registration_block_context = \
details.registration_block_context
self.dbg_dump.dump_contract_enclave_state(info, "Updating existing PdoContractEnclaveInfo")
self._set_enclave_state(context,
payload.verifying_key,
info.SerializeToString())
else:
raise InvalidTransaction('Invalid transaction action {}'
.format(payload.verb))
| true | true |
f7fa6d040b4db47c1bf0a94459d8562527bc40ad | 1,982 | py | Python | model_zoo/official/cv/resnet/postprocess.py | kungfu-team/mindspore-bert | 71501cf52ae01db9d6a73fb64bcfe68a6509dc32 | [
"Apache-2.0"
] | 2 | 2021-07-08T13:10:42.000Z | 2021-11-08T02:48:57.000Z | model_zoo/official/cv/resnet/postprocess.py | peixinhou/mindspore | fcb2ec2779b753e95c762cf292b23bd81d1f561b | [
"Apache-2.0"
] | null | null | null | model_zoo/official/cv/resnet/postprocess.py | peixinhou/mindspore | fcb2ec2779b753e95c762cf292b23bd81d1f561b | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# less required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""post process for 310 inference"""
import os
import json
import argparse
import numpy as np
from src.config import config2 as config
batch_size = 1
parser = argparse.ArgumentParser(description="resnet inference")
parser.add_argument("--result_path", type=str, required=True, help="result files path.")
parser.add_argument("--label_path", type=str, required=True, help="image file path.")
args = parser.parse_args()
def get_result(result_path, label_path):
files = os.listdir(result_path)
with open(label_path, "r") as label:
labels = json.load(label)
top1 = 0
top5 = 0
total_data = len(files)
for file in files:
img_ids_name = file.split('_0.')[0]
data_path = os.path.join(result_path, img_ids_name + "_0.bin")
result = np.fromfile(data_path, dtype=np.float32).reshape(batch_size, config.class_num)
for batch in range(batch_size):
predict = np.argsort(-result[batch], axis=-1)
if labels[img_ids_name+".JPEG"] == predict[0]:
top1 += 1
if labels[img_ids_name+".JPEG"] in predict[:5]:
top5 += 1
print(f"Total data: {total_data}, top1 accuracy: {top1/total_data}, top5 accuracy: {top5/total_data}.")
if __name__ == '__main__':
get_result(args.result_path, args.label_path)
| 38.115385 | 107 | 0.67003 |
import os
import json
import argparse
import numpy as np
from src.config import config2 as config
batch_size = 1
parser = argparse.ArgumentParser(description="resnet inference")
parser.add_argument("--result_path", type=str, required=True, help="result files path.")
parser.add_argument("--label_path", type=str, required=True, help="image file path.")
args = parser.parse_args()
def get_result(result_path, label_path):
files = os.listdir(result_path)
with open(label_path, "r") as label:
labels = json.load(label)
top1 = 0
top5 = 0
total_data = len(files)
for file in files:
img_ids_name = file.split('_0.')[0]
data_path = os.path.join(result_path, img_ids_name + "_0.bin")
result = np.fromfile(data_path, dtype=np.float32).reshape(batch_size, config.class_num)
for batch in range(batch_size):
predict = np.argsort(-result[batch], axis=-1)
if labels[img_ids_name+".JPEG"] == predict[0]:
top1 += 1
if labels[img_ids_name+".JPEG"] in predict[:5]:
top5 += 1
print(f"Total data: {total_data}, top1 accuracy: {top1/total_data}, top5 accuracy: {top5/total_data}.")
if __name__ == '__main__':
get_result(args.result_path, args.label_path)
| true | true |
f7fa6da328997d03e1055b10ed84423e5c467e8c | 6,975 | py | Python | my_vim_files/python27/Lib/idlelib/aboutDialog.py | satsaeid/dotfiles | 401c3213b31dd941b44e553c6f0441187b01c19a | [
"MIT"
] | 81 | 2017-03-13T08:24:01.000Z | 2021-04-02T09:48:38.000Z | my_vim_files/python27/Lib/idlelib/aboutDialog.py | satsaeid/dotfiles | 401c3213b31dd941b44e553c6f0441187b01c19a | [
"MIT"
] | 6 | 2017-04-30T08:36:55.000Z | 2017-09-22T01:37:28.000Z | my_vim_files/python27/Lib/idlelib/aboutDialog.py | satsaeid/dotfiles | 401c3213b31dd941b44e553c6f0441187b01c19a | [
"MIT"
] | 41 | 2017-03-18T14:11:58.000Z | 2021-04-14T05:06:09.000Z | """About Dialog for IDLE
"""
from Tkinter import *
import os
from idlelib import textView
from idlelib import idlever
class AboutDialog(Toplevel):
"""Modal about dialog for idle
"""
def __init__(self,parent,title):
Toplevel.__init__(self, parent)
self.configure(borderwidth=5)
self.geometry("+%d+%d" % (parent.winfo_rootx()+30,
parent.winfo_rooty()+30))
self.bg = "#707070"
self.fg = "#ffffff"
self.CreateWidgets()
self.resizable(height=FALSE, width=FALSE)
self.title(title)
self.transient(parent)
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.Ok)
self.parent = parent
self.buttonOk.focus_set()
self.bind('<Return>',self.Ok) #dismiss dialog
self.bind('<Escape>',self.Ok) #dismiss dialog
self.wait_window()
def CreateWidgets(self):
frameMain = Frame(self, borderwidth=2, relief=SUNKEN)
frameButtons = Frame(self)
frameButtons.pack(side=BOTTOM, fill=X)
frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
self.buttonOk = Button(frameButtons, text='Close',
command=self.Ok)
self.buttonOk.pack(padx=5, pady=5)
#self.picture = Image('photo', data=self.pictureData)
frameBg = Frame(frameMain, bg=self.bg)
frameBg.pack(expand=TRUE, fill=BOTH)
labelTitle = Label(frameBg, text='IDLE', fg=self.fg, bg=self.bg,
font=('courier', 24, 'bold'))
labelTitle.grid(row=0, column=0, sticky=W, padx=10, pady=10)
#labelPicture = Label(frameBg, text='[picture]')
#image=self.picture, bg=self.bg)
#labelPicture.grid(row=1, column=1, sticky=W, rowspan=2,
# padx=0, pady=3)
byline = "Python's Integrated DeveLopment Environment" + 5*'\n'
labelDesc = Label(frameBg, text=byline, justify=LEFT,
fg=self.fg, bg=self.bg)
labelDesc.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5)
labelEmail = Label(frameBg, text='email: idle-dev@python.org',
justify=LEFT, fg=self.fg, bg=self.bg)
labelEmail.grid(row=6, column=0, columnspan=2,
sticky=W, padx=10, pady=0)
labelWWW = Label(frameBg, text='www: http://www.python.org/idle/',
justify=LEFT, fg=self.fg, bg=self.bg)
labelWWW.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0)
Frame(frameBg, borderwidth=1, relief=SUNKEN,
height=2, bg=self.bg).grid(row=8, column=0, sticky=EW,
columnspan=3, padx=5, pady=5)
labelPythonVer = Label(frameBg, text='Python version: ' + \
sys.version.split()[0], fg=self.fg, bg=self.bg)
labelPythonVer.grid(row=9, column=0, sticky=W, padx=10, pady=0)
# handle weird tk version num in windoze python >= 1.6 (?!?)
tkVer = repr(TkVersion).split('.')
tkVer[len(tkVer)-1] = str('%.3g' % (float('.'+tkVer[len(tkVer)-1])))[2:]
if tkVer[len(tkVer)-1] == '':
tkVer[len(tkVer)-1] = '0'
tkVer = '.'.join(tkVer)
labelTkVer = Label(frameBg, text='Tk version: '+
tkVer, fg=self.fg, bg=self.bg)
labelTkVer.grid(row=9, column=1, sticky=W, padx=2, pady=0)
py_button_f = Frame(frameBg, bg=self.bg)
py_button_f.grid(row=10, column=0, columnspan=2, sticky=NSEW)
buttonLicense = Button(py_button_f, text='License', width=8,
highlightbackground=self.bg,
command=self.ShowLicense)
buttonLicense.pack(side=LEFT, padx=10, pady=10)
buttonCopyright = Button(py_button_f, text='Copyright', width=8,
highlightbackground=self.bg,
command=self.ShowCopyright)
buttonCopyright.pack(side=LEFT, padx=10, pady=10)
buttonCredits = Button(py_button_f, text='Credits', width=8,
highlightbackground=self.bg,
command=self.ShowPythonCredits)
buttonCredits.pack(side=LEFT, padx=10, pady=10)
Frame(frameBg, borderwidth=1, relief=SUNKEN,
height=2, bg=self.bg).grid(row=11, column=0, sticky=EW,
columnspan=3, padx=5, pady=5)
idle_v = Label(frameBg, text='IDLE version: ' + idlever.IDLE_VERSION,
fg=self.fg, bg=self.bg)
idle_v.grid(row=12, column=0, sticky=W, padx=10, pady=0)
idle_button_f = Frame(frameBg, bg=self.bg)
idle_button_f.grid(row=13, column=0, columnspan=3, sticky=NSEW)
idle_about_b = Button(idle_button_f, text='README', width=8,
highlightbackground=self.bg,
command=self.ShowIDLEAbout)
idle_about_b.pack(side=LEFT, padx=10, pady=10)
idle_news_b = Button(idle_button_f, text='NEWS', width=8,
highlightbackground=self.bg,
command=self.ShowIDLENEWS)
idle_news_b.pack(side=LEFT, padx=10, pady=10)
idle_credits_b = Button(idle_button_f, text='Credits', width=8,
highlightbackground=self.bg,
command=self.ShowIDLECredits)
idle_credits_b.pack(side=LEFT, padx=10, pady=10)
def ShowLicense(self):
self.display_printer_text('About - License', license)
def ShowCopyright(self):
self.display_printer_text('About - Copyright', copyright)
def ShowPythonCredits(self):
self.display_printer_text('About - Python Credits', credits)
def ShowIDLECredits(self):
self.display_file_text('About - Credits', 'CREDITS.txt', 'iso-8859-1')
def ShowIDLEAbout(self):
self.display_file_text('About - Readme', 'README.txt')
def ShowIDLENEWS(self):
self.display_file_text('About - NEWS', 'NEWS.txt')
def display_printer_text(self, title, printer):
printer._Printer__setup()
text = '\n'.join(printer._Printer__lines)
textView.view_text(self, title, text)
def display_file_text(self, title, filename, encoding=None):
fn = os.path.join(os.path.abspath(os.path.dirname(__file__)), filename)
textView.view_file(self, title, fn, encoding)
def Ok(self, event=None):
self.destroy()
if __name__ == '__main__':
# test the dialog
root = Tk()
def run():
from idlelib import aboutDialog
aboutDialog.AboutDialog(root, 'About')
Button(root, text='Dialog', command=run).pack()
root.mainloop()
| 46.192053 | 81 | 0.567742 |
from Tkinter import *
import os
from idlelib import textView
from idlelib import idlever
class AboutDialog(Toplevel):
def __init__(self,parent,title):
Toplevel.__init__(self, parent)
self.configure(borderwidth=5)
self.geometry("+%d+%d" % (parent.winfo_rootx()+30,
parent.winfo_rooty()+30))
self.bg = "#707070"
self.fg = "#ffffff"
self.CreateWidgets()
self.resizable(height=FALSE, width=FALSE)
self.title(title)
self.transient(parent)
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.Ok)
self.parent = parent
self.buttonOk.focus_set()
self.bind('<Return>',self.Ok)
self.bind('<Escape>',self.Ok)
self.wait_window()
def CreateWidgets(self):
frameMain = Frame(self, borderwidth=2, relief=SUNKEN)
frameButtons = Frame(self)
frameButtons.pack(side=BOTTOM, fill=X)
frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
self.buttonOk = Button(frameButtons, text='Close',
command=self.Ok)
self.buttonOk.pack(padx=5, pady=5)
frameBg = Frame(frameMain, bg=self.bg)
frameBg.pack(expand=TRUE, fill=BOTH)
labelTitle = Label(frameBg, text='IDLE', fg=self.fg, bg=self.bg,
font=('courier', 24, 'bold'))
labelTitle.grid(row=0, column=0, sticky=W, padx=10, pady=10)
byline = "Python's Integrated DeveLopment Environment" + 5*'\n'
labelDesc = Label(frameBg, text=byline, justify=LEFT,
fg=self.fg, bg=self.bg)
labelDesc.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5)
labelEmail = Label(frameBg, text='email: idle-dev@python.org',
justify=LEFT, fg=self.fg, bg=self.bg)
labelEmail.grid(row=6, column=0, columnspan=2,
sticky=W, padx=10, pady=0)
labelWWW = Label(frameBg, text='www: http://www.python.org/idle/',
justify=LEFT, fg=self.fg, bg=self.bg)
labelWWW.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0)
Frame(frameBg, borderwidth=1, relief=SUNKEN,
height=2, bg=self.bg).grid(row=8, column=0, sticky=EW,
columnspan=3, padx=5, pady=5)
labelPythonVer = Label(frameBg, text='Python version: ' + \
sys.version.split()[0], fg=self.fg, bg=self.bg)
labelPythonVer.grid(row=9, column=0, sticky=W, padx=10, pady=0)
# handle weird tk version num in windoze python >= 1.6 (?!?)
tkVer = repr(TkVersion).split('.')
tkVer[len(tkVer)-1] = str('%.3g' % (float('.'+tkVer[len(tkVer)-1])))[2:]
if tkVer[len(tkVer)-1] == '':
tkVer[len(tkVer)-1] = '0'
tkVer = '.'.join(tkVer)
labelTkVer = Label(frameBg, text='Tk version: '+
tkVer, fg=self.fg, bg=self.bg)
labelTkVer.grid(row=9, column=1, sticky=W, padx=2, pady=0)
py_button_f = Frame(frameBg, bg=self.bg)
py_button_f.grid(row=10, column=0, columnspan=2, sticky=NSEW)
buttonLicense = Button(py_button_f, text='License', width=8,
highlightbackground=self.bg,
command=self.ShowLicense)
buttonLicense.pack(side=LEFT, padx=10, pady=10)
buttonCopyright = Button(py_button_f, text='Copyright', width=8,
highlightbackground=self.bg,
command=self.ShowCopyright)
buttonCopyright.pack(side=LEFT, padx=10, pady=10)
buttonCredits = Button(py_button_f, text='Credits', width=8,
highlightbackground=self.bg,
command=self.ShowPythonCredits)
buttonCredits.pack(side=LEFT, padx=10, pady=10)
Frame(frameBg, borderwidth=1, relief=SUNKEN,
height=2, bg=self.bg).grid(row=11, column=0, sticky=EW,
columnspan=3, padx=5, pady=5)
idle_v = Label(frameBg, text='IDLE version: ' + idlever.IDLE_VERSION,
fg=self.fg, bg=self.bg)
idle_v.grid(row=12, column=0, sticky=W, padx=10, pady=0)
idle_button_f = Frame(frameBg, bg=self.bg)
idle_button_f.grid(row=13, column=0, columnspan=3, sticky=NSEW)
idle_about_b = Button(idle_button_f, text='README', width=8,
highlightbackground=self.bg,
command=self.ShowIDLEAbout)
idle_about_b.pack(side=LEFT, padx=10, pady=10)
idle_news_b = Button(idle_button_f, text='NEWS', width=8,
highlightbackground=self.bg,
command=self.ShowIDLENEWS)
idle_news_b.pack(side=LEFT, padx=10, pady=10)
idle_credits_b = Button(idle_button_f, text='Credits', width=8,
highlightbackground=self.bg,
command=self.ShowIDLECredits)
idle_credits_b.pack(side=LEFT, padx=10, pady=10)
def ShowLicense(self):
self.display_printer_text('About - License', license)
def ShowCopyright(self):
self.display_printer_text('About - Copyright', copyright)
def ShowPythonCredits(self):
self.display_printer_text('About - Python Credits', credits)
def ShowIDLECredits(self):
self.display_file_text('About - Credits', 'CREDITS.txt', 'iso-8859-1')
def ShowIDLEAbout(self):
self.display_file_text('About - Readme', 'README.txt')
def ShowIDLENEWS(self):
self.display_file_text('About - NEWS', 'NEWS.txt')
def display_printer_text(self, title, printer):
printer._Printer__setup()
text = '\n'.join(printer._Printer__lines)
textView.view_text(self, title, text)
def display_file_text(self, title, filename, encoding=None):
fn = os.path.join(os.path.abspath(os.path.dirname(__file__)), filename)
textView.view_file(self, title, fn, encoding)
def Ok(self, event=None):
self.destroy()
if __name__ == '__main__':
# test the dialog
root = Tk()
def run():
from idlelib import aboutDialog
aboutDialog.AboutDialog(root, 'About')
Button(root, text='Dialog', command=run).pack()
root.mainloop()
| true | true |
f7fa6ea6dec634b73f485ce2b463bbe044f12872 | 816 | py | Python | src/srv_system.py | psby233/PiBot | 1ef5e3dd27e852dabe8651bbfc4cf49aa162873b | [
"MIT"
] | null | null | null | src/srv_system.py | psby233/PiBot | 1ef5e3dd27e852dabe8651bbfc4cf49aa162873b | [
"MIT"
] | null | null | null | src/srv_system.py | psby233/PiBot | 1ef5e3dd27e852dabe8651bbfc4cf49aa162873b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import datetime
from pibot import srv
def get_cpu_temp():
file = open("/sys/class/thermal/thermal_zone0/temp")
temp = float(file.read()) / 1000.0
file.close()
return temp
def handle_system_server(request):
response = srv.systemResponse()
now = datetime.datetime.now()
response.second = now.second
response.minute = now.minute
response.hour = now.hour
response.day = now.day
response.month = now.month
response.year = now.year
response.cpu_temp = get_cpu_temp()
return response
def init_system_server():
rospy.init_node('system')
s = rospy.Service('srv_system', srv.system, handle_system_server)
rospy.loginfo('srv_system initialized')
if __name__ == '__main__':
init_system_server()
rospy.spin()
| 22.054054 | 69 | 0.693627 |
import rospy
import datetime
from pibot import srv
def get_cpu_temp():
file = open("/sys/class/thermal/thermal_zone0/temp")
temp = float(file.read()) / 1000.0
file.close()
return temp
def handle_system_server(request):
response = srv.systemResponse()
now = datetime.datetime.now()
response.second = now.second
response.minute = now.minute
response.hour = now.hour
response.day = now.day
response.month = now.month
response.year = now.year
response.cpu_temp = get_cpu_temp()
return response
def init_system_server():
rospy.init_node('system')
s = rospy.Service('srv_system', srv.system, handle_system_server)
rospy.loginfo('srv_system initialized')
if __name__ == '__main__':
init_system_server()
rospy.spin()
| true | true |
f7fa72b61a0fc514948f4fa888d4fa3dfce170f1 | 1,323 | py | Python | edit_profile.py | GabrielGonzalezDiaz/friends_finder | 4ee64c9feac2641a01856a889e03424b5fe52f36 | [
"MIT"
] | 2 | 2019-07-29T15:22:22.000Z | 2019-07-30T04:07:15.000Z | edit_profile.py | GabrielGonzalezDiaz/friends_finder | 4ee64c9feac2641a01856a889e03424b5fe52f36 | [
"MIT"
] | null | null | null | edit_profile.py | GabrielGonzalezDiaz/friends_finder | 4ee64c9feac2641a01856a889e03424b5fe52f36 | [
"MIT"
] | null | null | null | import webapp2
import helper
import renderer
class Handler(webapp2.RequestHandler):
def get(self):
values = helper.get_template_parameters()
email = helper.get_user_email()
p = helper.get_user_profile(email)
interest_list = helper.original_interest_list
user_interest_state = []
if p:
user_interest = helper.get_user_interest(helper.get_user_email())
for interest in interest_list:
user_interest_state.append({
'name': interest,
'value': interest in user_interest
})
print user_interest_state
values['interest_list'] = user_interest_state
values['first_name'] = helper.get_user_first_name(email)
values['last_name'] = helper.get_user_last_name(email)
else:
for interest in interest_list:
user_interest_state.append({
'name': interest,
'value': False
})
values['interest_list'] = user_interest_state
renderer.render_template(self, "edit_profile.html", values)
| 38.911765 | 78 | 0.526077 | import webapp2
import helper
import renderer
class Handler(webapp2.RequestHandler):
def get(self):
values = helper.get_template_parameters()
email = helper.get_user_email()
p = helper.get_user_profile(email)
interest_list = helper.original_interest_list
user_interest_state = []
if p:
user_interest = helper.get_user_interest(helper.get_user_email())
for interest in interest_list:
user_interest_state.append({
'name': interest,
'value': interest in user_interest
})
print user_interest_state
values['interest_list'] = user_interest_state
values['first_name'] = helper.get_user_first_name(email)
values['last_name'] = helper.get_user_last_name(email)
else:
for interest in interest_list:
user_interest_state.append({
'name': interest,
'value': False
})
values['interest_list'] = user_interest_state
renderer.render_template(self, "edit_profile.html", values)
| false | true |
f7fa735f09431fcc0dd003629de09296e40fb2f9 | 421 | py | Python | libfuturize/fixes/fix_object.py | infoxchange/python-future | 132e6c57947fe384eeef4402a3b5487431b7d86a | [
"MIT"
] | null | null | null | libfuturize/fixes/fix_object.py | infoxchange/python-future | 132e6c57947fe384eeef4402a3b5487431b7d86a | [
"MIT"
] | null | null | null | libfuturize/fixes/fix_object.py | infoxchange/python-future | 132e6c57947fe384eeef4402a3b5487431b7d86a | [
"MIT"
] | null | null | null | """
Fixer that adds ``from future.builtins import object`` if there is a line
like this:
class Foo(object):
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixObject(fixer_base.BaseFix):
PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >"
def transform(self, node, results):
touch_import_top(u'future.builtins', 'object', node)
| 23.388889 | 77 | 0.700713 |
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixObject(fixer_base.BaseFix):
PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >"
def transform(self, node, results):
touch_import_top(u'future.builtins', 'object', node)
| true | true |
f7fa73c9d4c81a932fe9b7e9d539cb883a11a7cd | 2,979 | py | Python | samples/client/petstore/python/petstore_api/models/dog.py | FantasyTeddy/openapi-generator | 866dc03f4fda48800ba52e428f2f1010de8540c0 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/python/petstore_api/models/dog.py | FantasyTeddy/openapi-generator | 866dc03f4fda48800ba52e428f2f1010de8540c0 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/python/petstore_api/models/dog.py | FantasyTeddy/openapi-generator | 866dc03f4fda48800ba52e428f2f1010de8540c0 | [
"Apache-2.0"
] | 1 | 2019-11-25T15:03:05.000Z | 2019-11-25T15:03:05.000Z | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from petstore_api.models.animal import Animal # noqa: F401,E501
class Dog(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'breed': 'str'
}
attribute_map = {
'breed': 'breed'
}
def __init__(self, breed=None): # noqa: E501
"""Dog - a model defined in OpenAPI""" # noqa: E501
self._breed = None
self.discriminator = None
if breed is not None:
self.breed = breed
@property
def breed(self):
"""Gets the breed of this Dog. # noqa: E501
:return: The breed of this Dog. # noqa: E501
:rtype: str
"""
return self._breed
@breed.setter
def breed(self, breed):
"""Sets the breed of this Dog.
:param breed: The breed of this Dog. # noqa: E501
:type: str
"""
self._breed = breed
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Dog):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 25.904348 | 174 | 0.548171 |
import pprint
import re
import six
from petstore_api.models.animal import Animal
class Dog(object):
openapi_types = {
'breed': 'str'
}
attribute_map = {
'breed': 'breed'
}
def __init__(self, breed=None):
self._breed = None
self.discriminator = None
if breed is not None:
self.breed = breed
@property
def breed(self):
return self._breed
@breed.setter
def breed(self, breed):
self._breed = breed
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, Dog):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f7fa74956ef3c21187c738cd9caaf86dccfaca24 | 15,318 | py | Python | experimental_generate.py | zhengxxn/adaptive-knn-mt | 338ec0ddf02ed80b5cd4cbae922ad0ebe93e8339 | [
"MIT"
] | 35 | 2021-05-08T09:23:31.000Z | 2022-03-25T06:19:48.000Z | experimental_generate.py | zhengxxn/adaptive-knn-mt | 338ec0ddf02ed80b5cd4cbae922ad0ebe93e8339 | [
"MIT"
] | 4 | 2021-06-12T05:02:03.000Z | 2021-12-19T08:53:46.000Z | experimental_generate.py | zhengxxn/adaptive-knn-mt | 338ec0ddf02ed80b5cd4cbae922ad0ebe93e8339 | [
"MIT"
] | 3 | 2021-05-20T08:14:59.000Z | 2022-03-01T04:49:50.000Z | #!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Translate pre-processed data with a trained model.
"""
import ast
import logging
import math
import os
import sys
from itertools import chain
import numpy as np
import torch
from fairseq import checkpoint_utils, options, scoring, tasks, utils
from fairseq.logging import progress_bar
from fairseq.logging.meters import StopwatchMeter, TimeMeter
def main(args):
assert args.path is not None, "--path required for generation!"
assert (
not args.sampling or args.nbest == args.beam
), "--sampling requires --nbest to be equal to --beam"
assert (
args.replace_unk is None or args.dataset_impl == "raw"
), "--replace-unk requires a raw text dataset (--dataset-impl=raw)"
if args.results_path is not None:
os.makedirs(args.results_path, exist_ok=True)
output_path = os.path.join(
args.results_path, "generate-{}.txt".format(args.gen_subset)
)
with open(output_path, "w", buffering=1, encoding="utf-8") as h:
return _main(args, h)
else:
return _main(args, sys.stdout)
def get_symbols_to_strip_from_output(generator):
if hasattr(generator, "symbols_to_strip_from_output"):
return generator.symbols_to_strip_from_output
else:
return {generator.eos}
def _main(args, output_file):
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=os.environ.get("LOGLEVEL", "INFO").upper(),
stream=output_file,
)
logger = logging.getLogger("fairseq_cli.generate")
utils.import_user_module(args)
if args.max_tokens is None and args.batch_size is None:
args.max_tokens = 12000
logger.info(args)
# Fix seed for stochastic decoding
if args.seed is not None and not args.no_seed_provided:
np.random.seed(args.seed)
utils.set_torch_seed(args.seed)
use_cuda = torch.cuda.is_available() and not args.cpu
# Load dataset splits
task = tasks.setup_task(args)
task.load_dataset(args.gen_subset)
# Set dictionaries
try:
src_dict = getattr(task, "source_dictionary", None)
except NotImplementedError:
src_dict = None
tgt_dict = task.target_dictionary
print('---------')
overrides = ast.literal_eval(args.model_overrides)
print('---------', overrides)
# Load ensemble
logger.info("loading model(s) from {}".format(args.path))
models, _model_args = checkpoint_utils.load_model_ensemble(
utils.split_paths(args.path),
arg_overrides=overrides,
task=task,
suffix=getattr(args, "checkpoint_suffix", ""),
strict=(args.checkpoint_shard_count == 1),
num_shards=args.checkpoint_shard_count,
)
print(_model_args)
if args.lm_path is not None:
overrides["data"] = args.data
try:
lms, _ = checkpoint_utils.load_model_ensemble(
[args.lm_path],
arg_overrides=overrides,
task=None,
)
except:
logger.warning(
f"Failed to load language model! Please make sure that the language model dict is the same "
f"as target dict and is located in the data dir ({args.data})"
)
raise
assert len(lms) == 1
else:
lms = [None]
# Optimize ensemble for generation
for model in chain(models, lms):
if model is None:
continue
if args.fp16:
model.half()
if use_cuda and not args.pipeline_model_parallel:
model.cuda()
model.prepare_for_inference_(args)
# Load alignment dictionary for unknown word replacement
# (None if no unknown word replacement, empty if no path to align dictionary)
align_dict = utils.load_align_dict(args.replace_unk)
# Load dataset (possibly sharded)
itr = task.get_batch_iterator(
dataset=task.dataset(args.gen_subset),
max_tokens=args.max_tokens,
max_sentences=args.batch_size,
max_positions=utils.resolve_max_positions(
task.max_positions(), *[model.max_positions() for model in models]
),
ignore_invalid_inputs=args.skip_invalid_size_inputs_valid_test,
required_batch_size_multiple=args.required_batch_size_multiple,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
data_buffer_size=args.data_buffer_size,
).next_epoch_itr(shuffle=False)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
default_log_format=("tqdm" if not args.no_progress_bar else "none"),
)
# Initialize generator
gen_timer = StopwatchMeter()
extra_gen_cls_kwargs = {"lm_model": lms[0], "lm_weight": args.lm_weight, "knn_record_distance": args.knn_record_distance,
"knn_record_index": args.knn_record_index, "knn_record_lambda": args.knn_record_lambda,
"knn_record_label_counts": args.knn_record_label_counts}
generator = task.build_generator(
models, args, extra_gen_cls_kwargs=extra_gen_cls_kwargs
)
# Handle tokenization and BPE
tokenizer = task.build_tokenizer(args)
bpe = task.build_bpe(args)
def decode_fn(x):
if bpe is not None:
x = bpe.decode(x)
if tokenizer is not None:
x = tokenizer.decode(x)
return x
scorer = scoring.build_scorer(args, tgt_dict)
num_sentences = 0
has_target = True
wps_meter = TimeMeter()
for sample in progress:
sample = utils.move_to_cuda(sample) if use_cuda else sample
if "net_input" not in sample:
continue
prefix_tokens = None
if args.prefix_size > 0:
prefix_tokens = sample["target"][:, : args.prefix_size]
constraints = None
if "constraints" in sample:
constraints = sample["constraints"]
gen_timer.start()
hypos = task.inference_step(
generator,
models,
sample,
prefix_tokens=prefix_tokens,
constraints=constraints,
)
num_generated_tokens = sum(len(h[0]["tokens"]) for h in hypos)
gen_timer.stop(num_generated_tokens)
for i, sample_id in enumerate(sample["id"].tolist()):
has_target = sample["target"] is not None
# Remove padding
if "src_tokens" in sample["net_input"]:
src_tokens = utils.strip_pad(
sample["net_input"]["src_tokens"][i, :], tgt_dict.pad()
)
else:
src_tokens = None
target_tokens = None
if has_target:
target_tokens = (
utils.strip_pad(sample["target"][i, :], tgt_dict.pad()).int().cpu()
)
# Either retrieve the original sentences or regenerate them from tokens.
if align_dict is not None:
src_str = task.dataset(args.gen_subset).src.get_original_text(sample_id)
target_str = task.dataset(args.gen_subset).tgt.get_original_text(
sample_id
)
else:
if src_dict is not None:
src_str = src_dict.string(src_tokens, args.remove_bpe)
else:
src_str = ""
if has_target:
target_str = tgt_dict.string(
target_tokens,
args.remove_bpe,
escape_unk=True,
extra_symbols_to_ignore=get_symbols_to_strip_from_output(
generator
),
)
src_str = decode_fn(src_str)
if has_target:
target_str = decode_fn(target_str)
if not args.quiet:
if src_dict is not None:
print("S-{}\t{}".format(sample_id, src_str), file=output_file)
if has_target:
print("T-{}\t{}".format(sample_id, target_str), file=output_file)
# Process top predictions
for j, hypo in enumerate(hypos[i][: args.nbest]):
hypo_tokens, hypo_str, alignment = utils.post_process_prediction(
hypo_tokens=hypo["tokens"].int().cpu(),
src_str=src_str,
alignment=hypo["alignment"],
align_dict=align_dict,
tgt_dict=tgt_dict,
remove_bpe=args.remove_bpe,
extra_symbols_to_ignore=get_symbols_to_strip_from_output(generator),
)
detok_hypo_str = decode_fn(hypo_str)
if not args.quiet:
score = hypo["score"] / math.log(2) # convert to base 2
# original hypothesis (after tokenization and BPE)
print(
"H-{}\t{}\t{}".format(sample_id, score, hypo_str),
file=output_file,
)
# detokenized hypothesis
print(
"D-{}\t{}\t{}".format(sample_id, score, detok_hypo_str),
file=output_file,
)
print(
"P-{}\t{}".format(
sample_id,
" ".join(
map(
lambda x: "{:.4f}".format(x),
# convert from base e to base 2
hypo["positional_scores"]
.div_(math.log(2))
.tolist(),
)
),
),
file=output_file,
)
if args.knn_record_distance:
min_distance = hypo["knn_distance"][0].tolist()
avg_distance = torch.mean(hypo["knn_distance"], dim=0).tolist()
print("M-{}\t{}".format(sample_id, " ".join([str(int(elem)) for elem in min_distance])), file=output_file)
print("A-{}\t{}".format(sample_id, " ".join([str(int(elem)) for elem in avg_distance])), file=output_file)
if args.knn_record_label_counts:
print("N-{}\t{}".format(sample_id, " ".
join([str(elem) for elem in hypo["knn_label_counts"].squeeze(0).tolist()])),
file=output_file)
if args.knn_record_lambda:
print("L-{}\t{}".format(sample_id, " ".
join([str(round(elem, 2)) for elem in hypo["knn_lambda"].squeeze(0).tolist()])),
file=output_file)
if args.print_alignment:
print(
"A-{}\t{}".format(
sample_id,
" ".join(
[
"{}-{}".format(src_idx, tgt_idx)
for src_idx, tgt_idx in alignment
]
),
),
file=output_file,
)
if args.print_step:
print(
"I-{}\t{}".format(sample_id, hypo["steps"]),
file=output_file,
)
if getattr(args, "retain_iter_history", False):
for step, h in enumerate(hypo["history"]):
_, h_str, _ = utils.post_process_prediction(
hypo_tokens=h["tokens"].int().cpu(),
src_str=src_str,
alignment=None,
align_dict=None,
tgt_dict=tgt_dict,
remove_bpe=None,
)
print(
"E-{}_{}\t{}".format(sample_id, step, h_str),
file=output_file,
)
# Score only the top hypothesis
if has_target and j == 0:
if align_dict is not None or args.remove_bpe is not None:
# Convert back to tokens for evaluation with unk replacement and/or without BPE
target_tokens = tgt_dict.encode_line(
target_str, add_if_not_exist=True
)
hypo_tokens = tgt_dict.encode_line(
detok_hypo_str, add_if_not_exist=True
)
if hasattr(scorer, "add_string"):
scorer.add_string(target_str, detok_hypo_str)
else:
scorer.add(target_tokens, hypo_tokens)
wps_meter.update(num_generated_tokens)
progress.log({"wps": round(wps_meter.avg)})
num_sentences += (
sample["nsentences"] if "nsentences" in sample else sample["id"].numel()
)
logger.info("NOTE: hypothesis and token scores are output in base 2")
logger.info(
"Translated {} sentences ({} tokens) in {:.1f}s ({:.2f} sentences/s, {:.2f} tokens/s)".format(
num_sentences,
gen_timer.n,
gen_timer.sum,
num_sentences / gen_timer.sum,
1.0 / gen_timer.avg,
)
)
if has_target:
if args.bpe and not args.sacrebleu:
if args.remove_bpe:
logger.warning(
"BLEU score is being computed by splitting detokenized string on spaces, this is probably not what you want. Use --sacrebleu for standard 13a BLEU tokenization"
)
else:
logger.warning(
"If you are using BPE on the target side, the BLEU score is computed on BPE tokens, not on proper words. Use --sacrebleu for standard 13a BLEU tokenization"
)
# use print to be consistent with other main outputs: S-, H-, T-, D- and so on
print(
"Generate {} with beam={}: {}".format(
args.gen_subset, args.beam, scorer.result_string()
),
file=output_file,
)
return scorer
def cli_main():
parser = options.get_experimental_generation_parser()
args = options.parse_args_and_arch(parser)
main(args)
if __name__ == "__main__":
cli_main()
| 37.822222 | 180 | 0.526505 |
import ast
import logging
import math
import os
import sys
from itertools import chain
import numpy as np
import torch
from fairseq import checkpoint_utils, options, scoring, tasks, utils
from fairseq.logging import progress_bar
from fairseq.logging.meters import StopwatchMeter, TimeMeter
def main(args):
assert args.path is not None, "--path required for generation!"
assert (
not args.sampling or args.nbest == args.beam
), "--sampling requires --nbest to be equal to --beam"
assert (
args.replace_unk is None or args.dataset_impl == "raw"
), "--replace-unk requires a raw text dataset (--dataset-impl=raw)"
if args.results_path is not None:
os.makedirs(args.results_path, exist_ok=True)
output_path = os.path.join(
args.results_path, "generate-{}.txt".format(args.gen_subset)
)
with open(output_path, "w", buffering=1, encoding="utf-8") as h:
return _main(args, h)
else:
return _main(args, sys.stdout)
def get_symbols_to_strip_from_output(generator):
if hasattr(generator, "symbols_to_strip_from_output"):
return generator.symbols_to_strip_from_output
else:
return {generator.eos}
def _main(args, output_file):
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=os.environ.get("LOGLEVEL", "INFO").upper(),
stream=output_file,
)
logger = logging.getLogger("fairseq_cli.generate")
utils.import_user_module(args)
if args.max_tokens is None and args.batch_size is None:
args.max_tokens = 12000
logger.info(args)
if args.seed is not None and not args.no_seed_provided:
np.random.seed(args.seed)
utils.set_torch_seed(args.seed)
use_cuda = torch.cuda.is_available() and not args.cpu
task = tasks.setup_task(args)
task.load_dataset(args.gen_subset)
try:
src_dict = getattr(task, "source_dictionary", None)
except NotImplementedError:
src_dict = None
tgt_dict = task.target_dictionary
print('---------')
overrides = ast.literal_eval(args.model_overrides)
print('---------', overrides)
logger.info("loading model(s) from {}".format(args.path))
models, _model_args = checkpoint_utils.load_model_ensemble(
utils.split_paths(args.path),
arg_overrides=overrides,
task=task,
suffix=getattr(args, "checkpoint_suffix", ""),
strict=(args.checkpoint_shard_count == 1),
num_shards=args.checkpoint_shard_count,
)
print(_model_args)
if args.lm_path is not None:
overrides["data"] = args.data
try:
lms, _ = checkpoint_utils.load_model_ensemble(
[args.lm_path],
arg_overrides=overrides,
task=None,
)
except:
logger.warning(
f"Failed to load language model! Please make sure that the language model dict is the same "
f"as target dict and is located in the data dir ({args.data})"
)
raise
assert len(lms) == 1
else:
lms = [None]
for model in chain(models, lms):
if model is None:
continue
if args.fp16:
model.half()
if use_cuda and not args.pipeline_model_parallel:
model.cuda()
model.prepare_for_inference_(args)
align_dict = utils.load_align_dict(args.replace_unk)
itr = task.get_batch_iterator(
dataset=task.dataset(args.gen_subset),
max_tokens=args.max_tokens,
max_sentences=args.batch_size,
max_positions=utils.resolve_max_positions(
task.max_positions(), *[model.max_positions() for model in models]
),
ignore_invalid_inputs=args.skip_invalid_size_inputs_valid_test,
required_batch_size_multiple=args.required_batch_size_multiple,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
data_buffer_size=args.data_buffer_size,
).next_epoch_itr(shuffle=False)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
default_log_format=("tqdm" if not args.no_progress_bar else "none"),
)
gen_timer = StopwatchMeter()
extra_gen_cls_kwargs = {"lm_model": lms[0], "lm_weight": args.lm_weight, "knn_record_distance": args.knn_record_distance,
"knn_record_index": args.knn_record_index, "knn_record_lambda": args.knn_record_lambda,
"knn_record_label_counts": args.knn_record_label_counts}
generator = task.build_generator(
models, args, extra_gen_cls_kwargs=extra_gen_cls_kwargs
)
tokenizer = task.build_tokenizer(args)
bpe = task.build_bpe(args)
def decode_fn(x):
if bpe is not None:
x = bpe.decode(x)
if tokenizer is not None:
x = tokenizer.decode(x)
return x
scorer = scoring.build_scorer(args, tgt_dict)
num_sentences = 0
has_target = True
wps_meter = TimeMeter()
for sample in progress:
sample = utils.move_to_cuda(sample) if use_cuda else sample
if "net_input" not in sample:
continue
prefix_tokens = None
if args.prefix_size > 0:
prefix_tokens = sample["target"][:, : args.prefix_size]
constraints = None
if "constraints" in sample:
constraints = sample["constraints"]
gen_timer.start()
hypos = task.inference_step(
generator,
models,
sample,
prefix_tokens=prefix_tokens,
constraints=constraints,
)
num_generated_tokens = sum(len(h[0]["tokens"]) for h in hypos)
gen_timer.stop(num_generated_tokens)
for i, sample_id in enumerate(sample["id"].tolist()):
has_target = sample["target"] is not None
if "src_tokens" in sample["net_input"]:
src_tokens = utils.strip_pad(
sample["net_input"]["src_tokens"][i, :], tgt_dict.pad()
)
else:
src_tokens = None
target_tokens = None
if has_target:
target_tokens = (
utils.strip_pad(sample["target"][i, :], tgt_dict.pad()).int().cpu()
)
if align_dict is not None:
src_str = task.dataset(args.gen_subset).src.get_original_text(sample_id)
target_str = task.dataset(args.gen_subset).tgt.get_original_text(
sample_id
)
else:
if src_dict is not None:
src_str = src_dict.string(src_tokens, args.remove_bpe)
else:
src_str = ""
if has_target:
target_str = tgt_dict.string(
target_tokens,
args.remove_bpe,
escape_unk=True,
extra_symbols_to_ignore=get_symbols_to_strip_from_output(
generator
),
)
src_str = decode_fn(src_str)
if has_target:
target_str = decode_fn(target_str)
if not args.quiet:
if src_dict is not None:
print("S-{}\t{}".format(sample_id, src_str), file=output_file)
if has_target:
print("T-{}\t{}".format(sample_id, target_str), file=output_file)
for j, hypo in enumerate(hypos[i][: args.nbest]):
hypo_tokens, hypo_str, alignment = utils.post_process_prediction(
hypo_tokens=hypo["tokens"].int().cpu(),
src_str=src_str,
alignment=hypo["alignment"],
align_dict=align_dict,
tgt_dict=tgt_dict,
remove_bpe=args.remove_bpe,
extra_symbols_to_ignore=get_symbols_to_strip_from_output(generator),
)
detok_hypo_str = decode_fn(hypo_str)
if not args.quiet:
score = hypo["score"] / math.log(2)
print(
"H-{}\t{}\t{}".format(sample_id, score, hypo_str),
file=output_file,
)
print(
"D-{}\t{}\t{}".format(sample_id, score, detok_hypo_str),
file=output_file,
)
print(
"P-{}\t{}".format(
sample_id,
" ".join(
map(
lambda x: "{:.4f}".format(x),
hypo["positional_scores"]
.div_(math.log(2))
.tolist(),
)
),
),
file=output_file,
)
if args.knn_record_distance:
min_distance = hypo["knn_distance"][0].tolist()
avg_distance = torch.mean(hypo["knn_distance"], dim=0).tolist()
print("M-{}\t{}".format(sample_id, " ".join([str(int(elem)) for elem in min_distance])), file=output_file)
print("A-{}\t{}".format(sample_id, " ".join([str(int(elem)) for elem in avg_distance])), file=output_file)
if args.knn_record_label_counts:
print("N-{}\t{}".format(sample_id, " ".
join([str(elem) for elem in hypo["knn_label_counts"].squeeze(0).tolist()])),
file=output_file)
if args.knn_record_lambda:
print("L-{}\t{}".format(sample_id, " ".
join([str(round(elem, 2)) for elem in hypo["knn_lambda"].squeeze(0).tolist()])),
file=output_file)
if args.print_alignment:
print(
"A-{}\t{}".format(
sample_id,
" ".join(
[
"{}-{}".format(src_idx, tgt_idx)
for src_idx, tgt_idx in alignment
]
),
),
file=output_file,
)
if args.print_step:
print(
"I-{}\t{}".format(sample_id, hypo["steps"]),
file=output_file,
)
if getattr(args, "retain_iter_history", False):
for step, h in enumerate(hypo["history"]):
_, h_str, _ = utils.post_process_prediction(
hypo_tokens=h["tokens"].int().cpu(),
src_str=src_str,
alignment=None,
align_dict=None,
tgt_dict=tgt_dict,
remove_bpe=None,
)
print(
"E-{}_{}\t{}".format(sample_id, step, h_str),
file=output_file,
)
if has_target and j == 0:
if align_dict is not None or args.remove_bpe is not None:
target_tokens = tgt_dict.encode_line(
target_str, add_if_not_exist=True
)
hypo_tokens = tgt_dict.encode_line(
detok_hypo_str, add_if_not_exist=True
)
if hasattr(scorer, "add_string"):
scorer.add_string(target_str, detok_hypo_str)
else:
scorer.add(target_tokens, hypo_tokens)
wps_meter.update(num_generated_tokens)
progress.log({"wps": round(wps_meter.avg)})
num_sentences += (
sample["nsentences"] if "nsentences" in sample else sample["id"].numel()
)
logger.info("NOTE: hypothesis and token scores are output in base 2")
logger.info(
"Translated {} sentences ({} tokens) in {:.1f}s ({:.2f} sentences/s, {:.2f} tokens/s)".format(
num_sentences,
gen_timer.n,
gen_timer.sum,
num_sentences / gen_timer.sum,
1.0 / gen_timer.avg,
)
)
if has_target:
if args.bpe and not args.sacrebleu:
if args.remove_bpe:
logger.warning(
"BLEU score is being computed by splitting detokenized string on spaces, this is probably not what you want. Use --sacrebleu for standard 13a BLEU tokenization"
)
else:
logger.warning(
"If you are using BPE on the target side, the BLEU score is computed on BPE tokens, not on proper words. Use --sacrebleu for standard 13a BLEU tokenization"
)
print(
"Generate {} with beam={}: {}".format(
args.gen_subset, args.beam, scorer.result_string()
),
file=output_file,
)
return scorer
def cli_main():
parser = options.get_experimental_generation_parser()
args = options.parse_args_and_arch(parser)
main(args)
if __name__ == "__main__":
cli_main()
| true | true |
f7fa74b32aa6d4dfd5968b9e503c8c5f93e6078b | 21,581 | py | Python | tool/darknet2pytorch.py | ajsanjoaquin/pytorch-YOLOv4 | dbc10cdc43668f29647ea2019ec13c4109d590c1 | [
"Apache-2.0"
] | null | null | null | tool/darknet2pytorch.py | ajsanjoaquin/pytorch-YOLOv4 | dbc10cdc43668f29647ea2019ec13c4109d590c1 | [
"Apache-2.0"
] | null | null | null | tool/darknet2pytorch.py | ajsanjoaquin/pytorch-YOLOv4 | dbc10cdc43668f29647ea2019ec13c4109d590c1 | [
"Apache-2.0"
] | null | null | null | import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from tool.region_loss import RegionLoss
from tool.yolo_layer import YoloLayer
from tool.config import *
from tool.torch_utils import *
class Mish(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x
class MaxPoolDark(nn.Module):
def __init__(self, size=2, stride=1):
super(MaxPoolDark, self).__init__()
self.size = size
self.stride = stride
def forward(self, x):
'''
darknet output_size = (input_size + p - k) / s +1
p : padding = k - 1
k : size
s : stride
torch output_size = (input_size + 2*p -k) / s +1
p : padding = k//2
'''
p = self.size // 2
if ((x.shape[2] - 1) // self.stride) != ((x.shape[2] + 2 * p - self.size) // self.stride):
padding1 = (self.size - 1) // 2
padding2 = padding1 + 1
else:
padding1 = (self.size - 1) // 2
padding2 = padding1
if ((x.shape[3] - 1) // self.stride) != ((x.shape[3] + 2 * p - self.size) // self.stride):
padding3 = (self.size - 1) // 2
padding4 = padding3 + 1
else:
padding3 = (self.size - 1) // 2
padding4 = padding3
x = F.max_pool2d(F.pad(x, (padding3, padding4, padding1, padding2), mode='replicate'),
self.size, stride=self.stride)
return x
class Upsample_expand(nn.Module):
def __init__(self, stride=2):
super(Upsample_expand, self).__init__()
self.stride = stride
def forward(self, x):
assert (x.data.dim() == 4)
x = x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1).\
expand(x.size(0), x.size(1), x.size(2), self.stride, x.size(3), self.stride).contiguous().\
view(x.size(0), x.size(1), x.size(2) * self.stride, x.size(3) * self.stride)
return x
class Upsample_interpolate(nn.Module):
def __init__(self, stride):
super(Upsample_interpolate, self).__init__()
self.stride = stride
def forward(self, x):
assert (x.data.dim() == 4)
out = F.interpolate(x, size=(x.size(2) * self.stride, x.size(3) * self.stride), mode='nearest')
return out
class Reorg(nn.Module):
def __init__(self, stride=2):
super(Reorg, self).__init__()
self.stride = stride
def forward(self, x):
stride = self.stride
assert (x.data.dim() == 4)
B = x.data.size(0)
C = x.data.size(1)
H = x.data.size(2)
W = x.data.size(3)
assert (H % stride == 0)
assert (W % stride == 0)
ws = stride
hs = stride
x = x.view(B, C, H / hs, hs, W / ws, ws).transpose(3, 4).contiguous()
x = x.view(B, C, H / hs * W / ws, hs * ws).transpose(2, 3).contiguous()
x = x.view(B, C, hs * ws, H / hs, W / ws).transpose(1, 2).contiguous()
x = x.view(B, hs * ws * C, H / hs, W / ws)
return x
class GlobalAvgPool2d(nn.Module):
def __init__(self):
super(GlobalAvgPool2d, self).__init__()
def forward(self, x):
N = x.data.size(0)
C = x.data.size(1)
H = x.data.size(2)
W = x.data.size(3)
x = F.avg_pool2d(x, (H, W))
x = x.view(N, C)
return x
# for route, shortcut and sam
class EmptyModule(nn.Module):
def __init__(self):
super(EmptyModule, self).__init__()
def forward(self, x):
return x
# support route shortcut and reorg
class Darknet(nn.Module):
def __init__(self, cfgfile, inference=False):
super(Darknet, self).__init__()
self.inference = inference
self.training = not self.inference
self.blocks = parse_cfg(cfgfile)
self.width = int(self.blocks[0]['width'])
self.height = int(self.blocks[0]['height'])
self.models = self.create_network(self.blocks) # merge conv, bn,leaky
self.loss = self.models[len(self.models) - 1]
if self.blocks[(len(self.blocks) - 1)]['type'] == 'region':
self.anchors = self.loss.anchors
self.num_anchors = self.loss.num_anchors
self.anchor_step = self.loss.anchor_step
self.num_classes = self.loss.num_classes
self.header = torch.IntTensor([0, 0, 0, 0])
self.seen = 0
def forward(self, x):
ind = -2
self.loss = None
outputs = dict()
out_boxes = []
for block in self.blocks:
ind = ind + 1
# if ind > 0:
# return x
if block['type'] == 'net':
continue
elif block['type'] in ['convolutional', 'maxpool', 'reorg', 'upsample', 'avgpool', 'softmax', 'connected']:
x = self.models[ind](x)
outputs[ind] = x
elif block['type'] == 'route':
layers = block['layers'].split(',')
layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers]
if len(layers) == 1:
if 'groups' not in block.keys() or int(block['groups']) == 1:
x = outputs[layers[0]]
outputs[ind] = x
else:
groups = int(block['groups'])
group_id = int(block['group_id'])
_, b, _, _ = outputs[layers[0]].shape
x = outputs[layers[0]][:, b // groups * group_id:b // groups * (group_id + 1)]
outputs[ind] = x
elif len(layers) == 2:
x1 = outputs[layers[0]]
x2 = outputs[layers[1]]
x = torch.cat((x1, x2), 1)
outputs[ind] = x
elif len(layers) == 4:
x1 = outputs[layers[0]]
x2 = outputs[layers[1]]
x3 = outputs[layers[2]]
x4 = outputs[layers[3]]
x = torch.cat((x1, x2, x3, x4), 1)
outputs[ind] = x
else:
print("rounte number > 2 ,is {}".format(len(layers)))
elif block['type'] == 'shortcut':
from_layer = int(block['from'])
activation = block['activation']
from_layer = from_layer if from_layer > 0 else from_layer + ind
x1 = outputs[from_layer]
x2 = outputs[ind - 1]
x = x1 + x2
if activation == 'leaky':
x = F.leaky_relu(x, 0.1, inplace=True)
elif activation == 'relu':
x = F.relu(x, inplace=True)
outputs[ind] = x
elif block['type'] == 'sam':
from_layer = int(block['from'])
from_layer = from_layer if from_layer > 0 else from_layer + ind
x1 = outputs[from_layer]
x2 = outputs[ind - 1]
x = x1 * x2
outputs[ind] = x
elif block['type'] == 'region':
continue
if self.loss:
self.loss = self.loss + self.models[ind](x)
else:
self.loss = self.models[ind](x)
outputs[ind] = None
elif block['type'] == 'yolo':
# if self.training:
# pass
# else:
# boxes = self.models[ind](x)
# out_boxes.append(boxes)
boxes = self.models[ind](x)
out_boxes.append(boxes)
elif block['type'] == 'cost':
continue
else:
print('unknown type %s' % (block['type']))
if self.training:
return out_boxes
else:
return get_region_boxes(out_boxes)
def print_network(self):
print_cfg(self.blocks)
def create_network(self, blocks):
models = nn.ModuleList()
prev_filters = 3
out_filters = []
prev_stride = 1
out_strides = []
conv_id = 0
for block in blocks:
if block['type'] == 'net':
prev_filters = int(block['channels'])
continue
elif block['type'] == 'convolutional':
conv_id = conv_id + 1
batch_normalize = int(block['batch_normalize'])
filters = int(block['filters'])
kernel_size = int(block['size'])
stride = int(block['stride'])
is_pad = int(block['pad'])
pad = (kernel_size - 1) // 2 if is_pad else 0
activation = block['activation']
model = nn.Sequential()
if batch_normalize:
model.add_module('conv{0}'.format(conv_id),
nn.Conv2d(prev_filters, filters, kernel_size, stride, pad, bias=False))
model.add_module('bn{0}'.format(conv_id), nn.BatchNorm2d(filters))
# model.add_module('bn{0}'.format(conv_id), BN2d(filters))
else:
model.add_module('conv{0}'.format(conv_id),
nn.Conv2d(prev_filters, filters, kernel_size, stride, pad))
if activation == 'leaky':
model.add_module('leaky{0}'.format(conv_id), nn.LeakyReLU(0.1, inplace=True))
elif activation == 'relu':
model.add_module('relu{0}'.format(conv_id), nn.ReLU(inplace=True))
elif activation == 'mish':
model.add_module('mish{0}'.format(conv_id), Mish())
elif activation == 'linear':
pass
elif activation == 'logistic':
model.add_module('sigmoid{0}'.format(conv_id), nn.Sigmoid())
else:
print("No convolutional activation named {}".format(activation))
prev_filters = filters
out_filters.append(prev_filters)
prev_stride = stride * prev_stride
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'maxpool':
pool_size = int(block['size'])
stride = int(block['stride'])
if stride == 1 and pool_size % 2:
# You can use Maxpooldark instead, here is convenient to convert onnx.
# Example: [maxpool] size=3 stride=1
model = nn.MaxPool2d(kernel_size=pool_size, stride=stride, padding=pool_size // 2)
elif stride == pool_size:
# You can use Maxpooldark instead, here is convenient to convert onnx.
# Example: [maxpool] size=2 stride=2
model = nn.MaxPool2d(kernel_size=pool_size, stride=stride, padding=0)
else:
model = MaxPoolDark(pool_size, stride)
out_filters.append(prev_filters)
prev_stride = stride * prev_stride
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'avgpool':
model = GlobalAvgPool2d()
out_filters.append(prev_filters)
models.append(model)
elif block['type'] == 'softmax':
model = nn.Softmax()
out_strides.append(prev_stride)
out_filters.append(prev_filters)
models.append(model)
elif block['type'] == 'cost':
if block['_type'] == 'sse':
model = nn.MSELoss(reduction='mean')
elif block['_type'] == 'L1':
model = nn.L1Loss(reduction='mean')
elif block['_type'] == 'smooth':
model = nn.SmoothL1Loss(reduction='mean')
out_filters.append(1)
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'reorg':
stride = int(block['stride'])
prev_filters = stride * stride * prev_filters
out_filters.append(prev_filters)
prev_stride = prev_stride * stride
out_strides.append(prev_stride)
models.append(Reorg(stride))
elif block['type'] == 'upsample':
stride = int(block['stride'])
out_filters.append(prev_filters)
prev_stride = prev_stride // stride
out_strides.append(prev_stride)
models.append(Upsample_expand(stride))
# models.append(Upsample_interpolate(stride))
elif block['type'] == 'route':
layers = block['layers'].split(',')
ind = len(models)
layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers]
if len(layers) == 1:
if 'groups' not in block.keys() or int(block['groups']) == 1:
prev_filters = out_filters[layers[0]]
prev_stride = out_strides[layers[0]]
else:
prev_filters = out_filters[layers[0]] // int(block['groups'])
prev_stride = out_strides[layers[0]] // int(block['groups'])
elif len(layers) == 2:
assert (layers[0] == ind - 1 or layers[1] == ind - 1)
prev_filters = out_filters[layers[0]] + out_filters[layers[1]]
prev_stride = out_strides[layers[0]]
elif len(layers) == 4:
assert (layers[0] == ind - 1)
prev_filters = out_filters[layers[0]] + out_filters[layers[1]] + out_filters[layers[2]] + \
out_filters[layers[3]]
prev_stride = out_strides[layers[0]]
else:
print("route error!!!")
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'shortcut':
ind = len(models)
prev_filters = out_filters[ind - 1]
out_filters.append(prev_filters)
prev_stride = out_strides[ind - 1]
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'sam':
ind = len(models)
prev_filters = out_filters[ind - 1]
out_filters.append(prev_filters)
prev_stride = out_strides[ind - 1]
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'connected':
filters = int(block['output'])
if block['activation'] == 'linear':
model = nn.Linear(prev_filters, filters)
elif block['activation'] == 'leaky':
model = nn.Sequential(
nn.Linear(prev_filters, filters),
nn.LeakyReLU(0.1, inplace=True))
elif block['activation'] == 'relu':
model = nn.Sequential(
nn.Linear(prev_filters, filters),
nn.ReLU(inplace=True))
prev_filters = filters
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'region':
loss = RegionLoss()
anchors = block['anchors'].split(',')
loss.anchors = [float(i) for i in anchors]
loss.num_classes = int(block['classes'])
loss.num_anchors = int(block['num'])
loss.anchor_step = len(loss.anchors) // loss.num_anchors
loss.object_scale = float(block['object_scale'])
loss.noobject_scale = float(block['noobject_scale'])
loss.class_scale = float(block['class_scale'])
loss.coord_scale = float(block['coord_scale'])
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(loss)
elif block['type'] == 'yolo':
yolo_layer = YoloLayer()
anchors = block['anchors'].split(',')
anchor_mask = block['mask'].split(',')
yolo_layer.anchor_mask = [int(i) for i in anchor_mask]
yolo_layer.anchors = [float(i) for i in anchors]
yolo_layer.num_classes = int(block['classes'])
self.num_classes = yolo_layer.num_classes
yolo_layer.num_anchors = int(block['num'])
yolo_layer.anchor_step = len(yolo_layer.anchors) // yolo_layer.num_anchors
yolo_layer.stride = prev_stride
yolo_layer.scale_x_y = float(block['scale_x_y'])
# yolo_layer.object_scale = float(block['object_scale'])
# yolo_layer.noobject_scale = float(block['noobject_scale'])
# yolo_layer.class_scale = float(block['class_scale'])
# yolo_layer.coord_scale = float(block['coord_scale'])
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(yolo_layer)
else:
print('unknown type %s' % (block['type']))
return models
def load_weights(self, weightfile):
fp = open(weightfile, 'rb')
header = np.fromfile(fp, count=5, dtype=np.int32)
self.header = torch.from_numpy(header)
self.seen = self.header[3]
buf = np.fromfile(fp, dtype=np.float32)
fp.close()
start = 0
ind = -2
for block in self.blocks:
if start >= buf.size:
break
ind = ind + 1
if block['type'] == 'net':
continue
elif block['type'] == 'convolutional':
model = self.models[ind]
batch_normalize = int(block['batch_normalize'])
if batch_normalize:
start = load_conv_bn(buf, start, model[0], model[1])
else:
start = load_conv(buf, start, model[0])
elif block['type'] == 'connected':
model = self.models[ind]
if block['activation'] != 'linear':
start = load_fc(buf, start, model[0])
else:
start = load_fc(buf, start, model)
elif block['type'] == 'maxpool':
pass
elif block['type'] == 'reorg':
pass
elif block['type'] == 'upsample':
pass
elif block['type'] == 'route':
pass
elif block['type'] == 'shortcut':
pass
elif block['type'] == 'sam':
pass
elif block['type'] == 'region':
pass
elif block['type'] == 'yolo':
pass
elif block['type'] == 'avgpool':
pass
elif block['type'] == 'softmax':
pass
elif block['type'] == 'cost':
pass
else:
print('unknown type %s' % (block['type']))
# def save_weights(self, outfile, cutoff=0):
# if cutoff <= 0:
# cutoff = len(self.blocks) - 1
#
# fp = open(outfile, 'wb')
# self.header[3] = self.seen
# header = self.header
# header.numpy().tofile(fp)
#
# ind = -1
# for blockId in range(1, cutoff + 1):
# ind = ind + 1
# block = self.blocks[blockId]
# if block['type'] == 'convolutional':
# model = self.models[ind]
# batch_normalize = int(block['batch_normalize'])
# if batch_normalize:
# save_conv_bn(fp, model[0], model[1])
# else:
# save_conv(fp, model[0])
# elif block['type'] == 'connected':
# model = self.models[ind]
# if block['activation'] != 'linear':
# save_fc(fc, model)
# else:
# save_fc(fc, model[0])
# elif block['type'] == 'maxpool':
# pass
# elif block['type'] == 'reorg':
# pass
# elif block['type'] == 'upsample':
# pass
# elif block['type'] == 'route':
# pass
# elif block['type'] == 'shortcut':
# pass
# elif block['type'] == 'sam':
# pass
# elif block['type'] == 'region':
# pass
# elif block['type'] == 'yolo':
# pass
# elif block['type'] == 'avgpool':
# pass
# elif block['type'] == 'softmax':
# pass
# elif block['type'] == 'cost':
# pass
# else:
# print('unknown type %s' % (block['type']))
# fp.close()
| 40.188082 | 119 | 0.481952 | import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from tool.region_loss import RegionLoss
from tool.yolo_layer import YoloLayer
from tool.config import *
from tool.torch_utils import *
class Mish(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x
class MaxPoolDark(nn.Module):
def __init__(self, size=2, stride=1):
super(MaxPoolDark, self).__init__()
self.size = size
self.stride = stride
def forward(self, x):
p = self.size // 2
if ((x.shape[2] - 1) // self.stride) != ((x.shape[2] + 2 * p - self.size) // self.stride):
padding1 = (self.size - 1) // 2
padding2 = padding1 + 1
else:
padding1 = (self.size - 1) // 2
padding2 = padding1
if ((x.shape[3] - 1) // self.stride) != ((x.shape[3] + 2 * p - self.size) // self.stride):
padding3 = (self.size - 1) // 2
padding4 = padding3 + 1
else:
padding3 = (self.size - 1) // 2
padding4 = padding3
x = F.max_pool2d(F.pad(x, (padding3, padding4, padding1, padding2), mode='replicate'),
self.size, stride=self.stride)
return x
class Upsample_expand(nn.Module):
def __init__(self, stride=2):
super(Upsample_expand, self).__init__()
self.stride = stride
def forward(self, x):
assert (x.data.dim() == 4)
x = x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1).\
expand(x.size(0), x.size(1), x.size(2), self.stride, x.size(3), self.stride).contiguous().\
view(x.size(0), x.size(1), x.size(2) * self.stride, x.size(3) * self.stride)
return x
class Upsample_interpolate(nn.Module):
def __init__(self, stride):
super(Upsample_interpolate, self).__init__()
self.stride = stride
def forward(self, x):
assert (x.data.dim() == 4)
out = F.interpolate(x, size=(x.size(2) * self.stride, x.size(3) * self.stride), mode='nearest')
return out
class Reorg(nn.Module):
def __init__(self, stride=2):
super(Reorg, self).__init__()
self.stride = stride
def forward(self, x):
stride = self.stride
assert (x.data.dim() == 4)
B = x.data.size(0)
C = x.data.size(1)
H = x.data.size(2)
W = x.data.size(3)
assert (H % stride == 0)
assert (W % stride == 0)
ws = stride
hs = stride
x = x.view(B, C, H / hs, hs, W / ws, ws).transpose(3, 4).contiguous()
x = x.view(B, C, H / hs * W / ws, hs * ws).transpose(2, 3).contiguous()
x = x.view(B, C, hs * ws, H / hs, W / ws).transpose(1, 2).contiguous()
x = x.view(B, hs * ws * C, H / hs, W / ws)
return x
class GlobalAvgPool2d(nn.Module):
def __init__(self):
super(GlobalAvgPool2d, self).__init__()
def forward(self, x):
N = x.data.size(0)
C = x.data.size(1)
H = x.data.size(2)
W = x.data.size(3)
x = F.avg_pool2d(x, (H, W))
x = x.view(N, C)
return x
class EmptyModule(nn.Module):
def __init__(self):
super(EmptyModule, self).__init__()
def forward(self, x):
return x
class Darknet(nn.Module):
def __init__(self, cfgfile, inference=False):
super(Darknet, self).__init__()
self.inference = inference
self.training = not self.inference
self.blocks = parse_cfg(cfgfile)
self.width = int(self.blocks[0]['width'])
self.height = int(self.blocks[0]['height'])
self.models = self.create_network(self.blocks)
self.loss = self.models[len(self.models) - 1]
if self.blocks[(len(self.blocks) - 1)]['type'] == 'region':
self.anchors = self.loss.anchors
self.num_anchors = self.loss.num_anchors
self.anchor_step = self.loss.anchor_step
self.num_classes = self.loss.num_classes
self.header = torch.IntTensor([0, 0, 0, 0])
self.seen = 0
def forward(self, x):
ind = -2
self.loss = None
outputs = dict()
out_boxes = []
for block in self.blocks:
ind = ind + 1
if block['type'] == 'net':
continue
elif block['type'] in ['convolutional', 'maxpool', 'reorg', 'upsample', 'avgpool', 'softmax', 'connected']:
x = self.models[ind](x)
outputs[ind] = x
elif block['type'] == 'route':
layers = block['layers'].split(',')
layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers]
if len(layers) == 1:
if 'groups' not in block.keys() or int(block['groups']) == 1:
x = outputs[layers[0]]
outputs[ind] = x
else:
groups = int(block['groups'])
group_id = int(block['group_id'])
_, b, _, _ = outputs[layers[0]].shape
x = outputs[layers[0]][:, b // groups * group_id:b // groups * (group_id + 1)]
outputs[ind] = x
elif len(layers) == 2:
x1 = outputs[layers[0]]
x2 = outputs[layers[1]]
x = torch.cat((x1, x2), 1)
outputs[ind] = x
elif len(layers) == 4:
x1 = outputs[layers[0]]
x2 = outputs[layers[1]]
x3 = outputs[layers[2]]
x4 = outputs[layers[3]]
x = torch.cat((x1, x2, x3, x4), 1)
outputs[ind] = x
else:
print("rounte number > 2 ,is {}".format(len(layers)))
elif block['type'] == 'shortcut':
from_layer = int(block['from'])
activation = block['activation']
from_layer = from_layer if from_layer > 0 else from_layer + ind
x1 = outputs[from_layer]
x2 = outputs[ind - 1]
x = x1 + x2
if activation == 'leaky':
x = F.leaky_relu(x, 0.1, inplace=True)
elif activation == 'relu':
x = F.relu(x, inplace=True)
outputs[ind] = x
elif block['type'] == 'sam':
from_layer = int(block['from'])
from_layer = from_layer if from_layer > 0 else from_layer + ind
x1 = outputs[from_layer]
x2 = outputs[ind - 1]
x = x1 * x2
outputs[ind] = x
elif block['type'] == 'region':
continue
if self.loss:
self.loss = self.loss + self.models[ind](x)
else:
self.loss = self.models[ind](x)
outputs[ind] = None
elif block['type'] == 'yolo':
boxes = self.models[ind](x)
out_boxes.append(boxes)
elif block['type'] == 'cost':
continue
else:
print('unknown type %s' % (block['type']))
if self.training:
return out_boxes
else:
return get_region_boxes(out_boxes)
def print_network(self):
print_cfg(self.blocks)
def create_network(self, blocks):
models = nn.ModuleList()
prev_filters = 3
out_filters = []
prev_stride = 1
out_strides = []
conv_id = 0
for block in blocks:
if block['type'] == 'net':
prev_filters = int(block['channels'])
continue
elif block['type'] == 'convolutional':
conv_id = conv_id + 1
batch_normalize = int(block['batch_normalize'])
filters = int(block['filters'])
kernel_size = int(block['size'])
stride = int(block['stride'])
is_pad = int(block['pad'])
pad = (kernel_size - 1) // 2 if is_pad else 0
activation = block['activation']
model = nn.Sequential()
if batch_normalize:
model.add_module('conv{0}'.format(conv_id),
nn.Conv2d(prev_filters, filters, kernel_size, stride, pad, bias=False))
model.add_module('bn{0}'.format(conv_id), nn.BatchNorm2d(filters))
else:
model.add_module('conv{0}'.format(conv_id),
nn.Conv2d(prev_filters, filters, kernel_size, stride, pad))
if activation == 'leaky':
model.add_module('leaky{0}'.format(conv_id), nn.LeakyReLU(0.1, inplace=True))
elif activation == 'relu':
model.add_module('relu{0}'.format(conv_id), nn.ReLU(inplace=True))
elif activation == 'mish':
model.add_module('mish{0}'.format(conv_id), Mish())
elif activation == 'linear':
pass
elif activation == 'logistic':
model.add_module('sigmoid{0}'.format(conv_id), nn.Sigmoid())
else:
print("No convolutional activation named {}".format(activation))
prev_filters = filters
out_filters.append(prev_filters)
prev_stride = stride * prev_stride
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'maxpool':
pool_size = int(block['size'])
stride = int(block['stride'])
if stride == 1 and pool_size % 2:
model = nn.MaxPool2d(kernel_size=pool_size, stride=stride, padding=pool_size // 2)
elif stride == pool_size:
model = nn.MaxPool2d(kernel_size=pool_size, stride=stride, padding=0)
else:
model = MaxPoolDark(pool_size, stride)
out_filters.append(prev_filters)
prev_stride = stride * prev_stride
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'avgpool':
model = GlobalAvgPool2d()
out_filters.append(prev_filters)
models.append(model)
elif block['type'] == 'softmax':
model = nn.Softmax()
out_strides.append(prev_stride)
out_filters.append(prev_filters)
models.append(model)
elif block['type'] == 'cost':
if block['_type'] == 'sse':
model = nn.MSELoss(reduction='mean')
elif block['_type'] == 'L1':
model = nn.L1Loss(reduction='mean')
elif block['_type'] == 'smooth':
model = nn.SmoothL1Loss(reduction='mean')
out_filters.append(1)
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'reorg':
stride = int(block['stride'])
prev_filters = stride * stride * prev_filters
out_filters.append(prev_filters)
prev_stride = prev_stride * stride
out_strides.append(prev_stride)
models.append(Reorg(stride))
elif block['type'] == 'upsample':
stride = int(block['stride'])
out_filters.append(prev_filters)
prev_stride = prev_stride // stride
out_strides.append(prev_stride)
models.append(Upsample_expand(stride))
elif block['type'] == 'route':
layers = block['layers'].split(',')
ind = len(models)
layers = [int(i) if int(i) > 0 else int(i) + ind for i in layers]
if len(layers) == 1:
if 'groups' not in block.keys() or int(block['groups']) == 1:
prev_filters = out_filters[layers[0]]
prev_stride = out_strides[layers[0]]
else:
prev_filters = out_filters[layers[0]] // int(block['groups'])
prev_stride = out_strides[layers[0]] // int(block['groups'])
elif len(layers) == 2:
assert (layers[0] == ind - 1 or layers[1] == ind - 1)
prev_filters = out_filters[layers[0]] + out_filters[layers[1]]
prev_stride = out_strides[layers[0]]
elif len(layers) == 4:
assert (layers[0] == ind - 1)
prev_filters = out_filters[layers[0]] + out_filters[layers[1]] + out_filters[layers[2]] + \
out_filters[layers[3]]
prev_stride = out_strides[layers[0]]
else:
print("route error!!!")
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'shortcut':
ind = len(models)
prev_filters = out_filters[ind - 1]
out_filters.append(prev_filters)
prev_stride = out_strides[ind - 1]
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'sam':
ind = len(models)
prev_filters = out_filters[ind - 1]
out_filters.append(prev_filters)
prev_stride = out_strides[ind - 1]
out_strides.append(prev_stride)
models.append(EmptyModule())
elif block['type'] == 'connected':
filters = int(block['output'])
if block['activation'] == 'linear':
model = nn.Linear(prev_filters, filters)
elif block['activation'] == 'leaky':
model = nn.Sequential(
nn.Linear(prev_filters, filters),
nn.LeakyReLU(0.1, inplace=True))
elif block['activation'] == 'relu':
model = nn.Sequential(
nn.Linear(prev_filters, filters),
nn.ReLU(inplace=True))
prev_filters = filters
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(model)
elif block['type'] == 'region':
loss = RegionLoss()
anchors = block['anchors'].split(',')
loss.anchors = [float(i) for i in anchors]
loss.num_classes = int(block['classes'])
loss.num_anchors = int(block['num'])
loss.anchor_step = len(loss.anchors) // loss.num_anchors
loss.object_scale = float(block['object_scale'])
loss.noobject_scale = float(block['noobject_scale'])
loss.class_scale = float(block['class_scale'])
loss.coord_scale = float(block['coord_scale'])
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(loss)
elif block['type'] == 'yolo':
yolo_layer = YoloLayer()
anchors = block['anchors'].split(',')
anchor_mask = block['mask'].split(',')
yolo_layer.anchor_mask = [int(i) for i in anchor_mask]
yolo_layer.anchors = [float(i) for i in anchors]
yolo_layer.num_classes = int(block['classes'])
self.num_classes = yolo_layer.num_classes
yolo_layer.num_anchors = int(block['num'])
yolo_layer.anchor_step = len(yolo_layer.anchors) // yolo_layer.num_anchors
yolo_layer.stride = prev_stride
yolo_layer.scale_x_y = float(block['scale_x_y'])
out_filters.append(prev_filters)
out_strides.append(prev_stride)
models.append(yolo_layer)
else:
print('unknown type %s' % (block['type']))
return models
def load_weights(self, weightfile):
fp = open(weightfile, 'rb')
header = np.fromfile(fp, count=5, dtype=np.int32)
self.header = torch.from_numpy(header)
self.seen = self.header[3]
buf = np.fromfile(fp, dtype=np.float32)
fp.close()
start = 0
ind = -2
for block in self.blocks:
if start >= buf.size:
break
ind = ind + 1
if block['type'] == 'net':
continue
elif block['type'] == 'convolutional':
model = self.models[ind]
batch_normalize = int(block['batch_normalize'])
if batch_normalize:
start = load_conv_bn(buf, start, model[0], model[1])
else:
start = load_conv(buf, start, model[0])
elif block['type'] == 'connected':
model = self.models[ind]
if block['activation'] != 'linear':
start = load_fc(buf, start, model[0])
else:
start = load_fc(buf, start, model)
elif block['type'] == 'maxpool':
pass
elif block['type'] == 'reorg':
pass
elif block['type'] == 'upsample':
pass
elif block['type'] == 'route':
pass
elif block['type'] == 'shortcut':
pass
elif block['type'] == 'sam':
pass
elif block['type'] == 'region':
pass
elif block['type'] == 'yolo':
pass
elif block['type'] == 'avgpool':
pass
elif block['type'] == 'softmax':
pass
elif block['type'] == 'cost':
pass
else:
print('unknown type %s' % (block['type']))
| true | true |
f7fa751aa4d051e7d640125056ad7b4a6cbbf91c | 2,354 | py | Python | migrate_dir.py | open-data/pd_warehouse | 996251143702d1516f48f421aa9a8b933d86c04c | [
"MIT"
] | null | null | null | migrate_dir.py | open-data/pd_warehouse | 996251143702d1516f48f421aa9a8b933d86c04c | [
"MIT"
] | null | null | null | migrate_dir.py | open-data/pd_warehouse | 996251143702d1516f48f421aa9a8b933d86c04c | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
import argparse
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
def run_migrations(source_dir, target_dir):
for csvfile in os.listdir(source_dir):
print("Migrating {0} from directory {1} to {2}".format(csvfile, source_dir, target_dir))
# If the csv is empty, do not run the migrationsscript
file_size = os.path.getsize(os.path.join(source_dir, csvfile))
if file_size == 0:
shutil.copyfile( os.path.join(source_dir, csvfile), os.path.join(target_dir, csvfile))
print("{0} is empty. Not migrating".format(csvfile))
continue
proc = subprocess.Popen([sys.executable, 'migrate_all.py', os.path.join(source_dir, csvfile),
os.path.join(target_dir, csvfile)])
if proc.wait():
sys.exit(1)
def main():
parser = argparse.ArgumentParser(description='Run the CKAN PD Migrations scripts against all the archived files in a directory')
parser.add_argument('-d', '--dir', help='Directory to migrate', required=True)
parser.add_argument('-t', '--target', help='Target directory', required=True)
args = parser.parse_args()
if not os.path.isdir(args.dir):
print('Directory {0) does not exist'.format(args.dir))
exit(1)
elif not os.path.isdir(args.target):
print('Target directory {0} does not exist'.format(args.target))
exit(1)
for tar_file in os.listdir(args.dir):
if not tar_file.endswith('.tar.gz'):
continue
print('Migrating {0}'.format(tar_file))
tmp_dir1 = tempfile.mkdtemp()
tmp_dir2 = tempfile.mkdtemp()
print('Extracting {0} to {1}'.format(tar_file, tmp_dir1))
tar = tarfile.open(os.path.join(args.dir, tar_file))
tar.extractall(tmp_dir1)
tar.close()
run_migrations(tmp_dir1, tmp_dir2)
tar2 = tarfile.open(os.path.join(args.target, tar_file), 'w:gz')
print('Creating {0}'.format(tar_file))
for root, dirs, files in os.walk(tmp_dir2):
for file in files:
# tar2.add(os.path.join(root, file), os.path.join(tar_file[3:11], file))
tar2.add(os.path.join(root, file), file)
tar2.close()
shutil.rmtree(tmp_dir1)
shutil.rmtree(tmp_dir2)
main()
exit(0)
| 34.617647 | 132 | 0.636364 |
import argparse
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
def run_migrations(source_dir, target_dir):
for csvfile in os.listdir(source_dir):
print("Migrating {0} from directory {1} to {2}".format(csvfile, source_dir, target_dir))
file_size = os.path.getsize(os.path.join(source_dir, csvfile))
if file_size == 0:
shutil.copyfile( os.path.join(source_dir, csvfile), os.path.join(target_dir, csvfile))
print("{0} is empty. Not migrating".format(csvfile))
continue
proc = subprocess.Popen([sys.executable, 'migrate_all.py', os.path.join(source_dir, csvfile),
os.path.join(target_dir, csvfile)])
if proc.wait():
sys.exit(1)
def main():
parser = argparse.ArgumentParser(description='Run the CKAN PD Migrations scripts against all the archived files in a directory')
parser.add_argument('-d', '--dir', help='Directory to migrate', required=True)
parser.add_argument('-t', '--target', help='Target directory', required=True)
args = parser.parse_args()
if not os.path.isdir(args.dir):
print('Directory {0) does not exist'.format(args.dir))
exit(1)
elif not os.path.isdir(args.target):
print('Target directory {0} does not exist'.format(args.target))
exit(1)
for tar_file in os.listdir(args.dir):
if not tar_file.endswith('.tar.gz'):
continue
print('Migrating {0}'.format(tar_file))
tmp_dir1 = tempfile.mkdtemp()
tmp_dir2 = tempfile.mkdtemp()
print('Extracting {0} to {1}'.format(tar_file, tmp_dir1))
tar = tarfile.open(os.path.join(args.dir, tar_file))
tar.extractall(tmp_dir1)
tar.close()
run_migrations(tmp_dir1, tmp_dir2)
tar2 = tarfile.open(os.path.join(args.target, tar_file), 'w:gz')
print('Creating {0}'.format(tar_file))
for root, dirs, files in os.walk(tmp_dir2):
for file in files:
tar2.add(os.path.join(root, file), file)
tar2.close()
shutil.rmtree(tmp_dir1)
shutil.rmtree(tmp_dir2)
main()
exit(0)
| false | true |
f7fa75aaa36b1502ddaa4d220d6445bce41c54f4 | 15,398 | py | Python | tasks.py | nicoddemus/qmxgraph | 87bc4ef121dd40ba82b6588ae43325225cd9a931 | [
"MIT"
] | null | null | null | tasks.py | nicoddemus/qmxgraph | 87bc4ef121dd40ba82b6588ae43325225cd9a931 | [
"MIT"
] | null | null | null | tasks.py | nicoddemus/qmxgraph | 87bc4ef121dd40ba82b6588ae43325225cd9a931 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, print_function
import os
import sys
import invoke
from colorama import Fore, Style
@invoke.task
def qrc(
ctx,
):
"""
Compiles mxGraph and QmxGraph own static web files in Qt resource files.
This simplifies a lot embedding these contents on Qt web views. It also
helps freezing an executable, as dependency with static web files becomes
explicit in Python because of imported resource file.
Generates 2 resource files located in `qmxgraph` package:
* `resource_qmxgraph`: static files found in `page/` of QmxGraph;
* `resource_mxgraph`: static files in mxGraph library, specifically
all files located in `javascript/src`.
These resources are imported by QmxGraph widget and must be generated
before its use.
"""
import qmxgraph
from qmxgraph import deploy
WEB_EXTENSIONS = (
'.js',
'.gif',
'.png',
'.html',
'.css',
'.txt', # used by mxGraph resources
'.xml', # used by mxGraph resources
)
indent = ' '
print_message('qrc', color=Fore.BLUE, bright=True)
def create_web_resource(resource_name, src_dir):
print_message(
'{}- resource: {}'.format(indent, resource_name),
color=Fore.BLUE, bright=True)
target_dir = os.path.dirname(qmxgraph.__file__)
qrc_file, py_file = generate_qrc_from_folder(
basename='resource_{}'.format(resource_name),
alias=resource_name,
source_dir=src_dir,
target_dir=target_dir,
include=WEB_EXTENSIONS,
)
print_message('{}* generated {}'.format(indent * 2, qrc_file))
print_message('{}* generated {}'.format(indent * 2, py_file))
mxgraph = os.environ.get('MXGRAPHPATH', None)
if mxgraph is None:
env_dir = deploy.get_conda_env_path()
if env_dir is None:
raise IOError("Unable to determine MxGraph mxgraph in "
"environment")
mxgraph = '{env_dir}/mxgraph'.format(env_dir=env_dir)
create_web_resource(
resource_name='mxgraph',
src_dir='{folder}/javascript/src'.format(folder=mxgraph))
qgraph_root = os.path.dirname(qmxgraph.__file__)
create_web_resource(
resource_name='qmxgraph',
src_dir=os.path.join(qgraph_root, 'page'),
)
@invoke.task(help={
'python-version': (
'Can be used to define the python version used when creating the'
' work environment'
),
})
def docs(ctx, python_version=None):
"""
Create the documentation html locally.
"""
import json
import subprocess
import tempfile
from pathlib import Path
conda_info_json = subprocess.check_output(['conda', 'info', '--json'])
conda_info = json.loads(conda_info_json)
current_env_name = conda_info["active_prefix_name"]
if current_env_name in (None, 'base'):
raise invoke.Exit("Activate the project's conda environment first")
else:
docs_env_name = f'{current_env_name}-docs'
new_environ = os.environ.copy()
new_environ['TEST_QMXGRAPH'] = '0'
if python_version is not None:
new_environ['PYTHON_VERSION'] = python_version
script = [
'', # To have a new line at the start (see windows new line).
f'conda devenv --name {docs_env_name} --file docs_environment.devenv.yml',
f'conda activate {docs_env_name}',
'cd docs',
'sphinx-build . _build -W',
]
if sys.platform == 'win32':
suffix = '.bat'
new_line = '\n@echo on\ncall '
command = ['cmd', '/C']
else:
suffix = '.bash'
new_line = '\n'
command = ['bash', '-x']
script_file = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
try:
script_file.close()
script_file = Path(script_file.name)
script_file.write_text(new_line.join(script))
command.append(str(script_file))
subprocess.check_call(command, env=new_environ)
finally:
script_file.unlink()
@invoke.task
def test(ctx):
print_message('test'.format(), color=Fore.BLUE, bright=True)
cmd = 'pytest --cov=qmxgraph --timeout=10'
import subprocess
raise invoke.Exit(subprocess.call(cmd, shell=True))
@invoke.task
def lint(ctx):
print_message('lint'.format(), color=Fore.BLUE, bright=True)
cmd = 'flake8 -v qmxgraph'
import subprocess
raise invoke.Exit(subprocess.call(cmd, shell=True))
@invoke.task(help={
'svg_path': 'A SVG file',
})
def svgtostencil(ctx, svg_path):
"""
Converts a SVG file to a stencil file compatible with mxGraph, output is printed in standard
output.
"""
qmxgraph_scripts = os.path.join(os.getcwd(), 'scripts')
import subprocess
svg_to_stencil_script = os.path.join(qmxgraph_scripts, 'svg_to_stencil.py')
raise invoke.Exit(subprocess.call(['python', svg_to_stencil_script, svg_path]))
def generate_qrc(target_filename, file_map):
"""
Generates a Qt resource collection file. It is an XML file used to specify
which resource files are to be embedded, using .qrc as extension.
Consider call below:
```python
generate_qrc('resource.qrc', ['foo/bar.txt', '/home/dent/bar.txt'])
```
It would generate a .qrc file with contents like:
```
<!DOCTYPE RCC>
<RCC version="1.0">
<qresource>
<file alias="foo/bar.txt">/home/dent/bar.txt</file>
</qresource>
</RCC>
```
Once compiled to a Python module (see `generate_qrc_py`), developer could
access resource like this, for instance:
```python
QFile(':/foo/bar.txt')
```
References:
* http://doc.qt.io/qt-5/resources.html
* http://pyqt.sourceforge.net/Docs/PyQt5/resources.html
:param str target_filename: Path of generated resource collection file.
:param iterable[tuple[str, str]] file_map: A list of pairs. Each
pair must be formed by, respectively, alias for file in resource
collection and path of file to be included in resource collection.
"""
target_dir = os.path.dirname(target_filename)
contents = generate_qrc_contents(file_map, target_dir)
# UTF-8 is the encoding adopted by Qt (and subsequently PyQt) resource
# collection tools. It seems to not be officially stated anywhere in docs
# unfortunately, but if it is possible to see this encoding in use by
# Python modules generated by `pyrcc5`, for instance. Also one moderator
# in a Qt official forum stated UTF-8 is Qt preference, which is the
# closest thing to a official documentation about this choice (
# https://forum.qt.io/topic/42641/the-qt-resource-system-compile-error/4).
import io
with io.open(target_filename, 'w', encoding='utf8') as f:
f.write(contents)
def generate_qrc_contents(file_map, target_dir):
"""
Generates just the contents of a Qt resource collection file. See
`generate_qrc` for more details.
:param iterable[tuple[str, str]] file_map: See `generate_qrc`.
:param str target_dir: The tool that compiles QRC to a Python module
requires files in QRC to be relative to its execution.
:rtype: str
:return: Contents of a resource collection file.
"""
# Relative paths on Windows can be a pain in the ass if virtual drives
# (through `subst` command) are used. This make sure all files are
# using their *actual* absolute path.
target_dir = follow_subst(target_dir)
def create_entry(alias_, path_):
path_ = follow_subst(path_)
rel_path = os.path.relpath(path_, target_dir)
return ' ' + QRC_ENTRY_TEMPLATE.format(alias=alias_, path=rel_path)
entries = '\n'.join(
[
create_entry(alias, path)
for (alias, path) in file_map
]
)
return QRC_FILE_TEMPLATE.format(entries=entries)
def generate_qrc_py(qrc_filename, target_filename):
"""
Generates a Python module that only needs to be imported by a Qt
application in order for those resources to be made available just as if
they were the original files.
References:
* http://doc.qt.io/qt-5/resources.html
* http://pyqt.sourceforge.net/Docs/PyQt5/resources.html
:param str qrc_filename: A .qrc resource collection file.
:param str target_filename: Path of generated Python module.
"""
import subprocess
cwd, local_filename = os.path.split(qrc_filename)
# Needs to be executed on same *actual* absolute path as generated
# contents, so it also needs to deal with subst.
cwd = follow_subst(cwd)
subprocess.check_call(
['pyrcc5', local_filename, '-o', target_filename], cwd=cwd)
def generate_qrc_from_folder(
basename, alias, source_dir, target_dir, include=None):
"""
Collect files from a folder, include them in a resource collection file and
then compiles it to a Python module.
All collected files are aliased in resource collection with relative path
in source dir prefixed by `alias`.
For instance, consider folder below:
```
- /home/dent/foo/
* file1.txt
* file2.txt
- bar/
* file3.txt
```
With a call like:
```python
generate_qrc_from_folder(
'resource_foo', 'rsc_foo', '/home/dent/foo/', '/home/dent/foo/')
```
It would result in a .qrc like:
```
<!DOCTYPE RCC>
<RCC version="1.0">
<qresource>
<file alias="rsc_foo/file1.txt">/home/dent/foo/file1.txt</file>
<file alias="rsc_foo/file2.txt">/home/dent/foo/file2.txt</file>
<file alias="rsc_foo/bar/file3.txt">/home/dent/foo/bar/file3.txt</file>
</qresource>
</RCC>
```
:param str basename: Basename used for .qrc and .py files generated
for resource collection.
:param str alias: Basename used for aliases in .qrc file.
:param str source_dir: Folder that will have its files included in
resource collection.
:param str target_dir: Folder where generated .qrc and .py files are
going to be written.
:param iterable|None include: Allowed extensions to be collected, if None
all are allowed.
"""
if not os.path.isdir(source_dir):
raise IOError("Invalid source directory: {}".format(source_dir))
if not os.path.isdir(target_dir):
raise IOError("Invalid target directory: {}".format(target_dir))
if sys.platform.startswith('win'):
def fix_alias(a):
return a.replace('\\', '/')
else:
def fix_alias(a):
return a
files = [
(
fix_alias('{alias}/{rel_file}'.format(
alias=alias,
rel_file=os.path.relpath(f, source_dir))),
f
)
for f in collect_files_in_folder(source_dir, include=include)
]
if not files:
raise RuntimeError(
"Unable to collect anything for "
".qrc file in folder {}".format(source_dir))
qrc_filename = os.path.join(
target_dir, '{basename}{ext}'.format(basename=basename, ext='.qrc'))
generate_qrc(qrc_filename, files)
py_filename = os.path.join(target_dir, '{basename}{ext}'.format(
basename=basename, ext='.py'))
generate_qrc_py(qrc_filename, py_filename)
return qrc_filename, py_filename
def collect_files_in_folder(folder, include=None):
collected = []
for root, dirs, files in os.walk(folder):
for file_ in files:
if include is None or os.path.splitext(file_)[1] in include:
collected.append(os.path.normpath(os.path.join(root, file_)))
return collected
def print_message(message, color=None, bright=True, endline='\n'):
"""
Print a message to the standard output.
:param unicode message: The message to print.
:param unicode|None color: The ANSI color used to colorize the message
(see `colorama.Fore`). When `None` the message is printed as is.
Defaults to `None`.
:param bool bright: Control if the output message is bright or dim. This
value is ignored if `color is None`. Default to `True`.
:param unicode endline: The character printed after `message`. Default to
"new line character".
"""
import sys
if color is not None:
style = Style.BRIGHT if bright else Style.DIM
message = '{color}{style}{msg}{reset}'.format(
color=color,
style=style,
reset=Style.RESET_ALL,
msg=message,
)
# The subprocesses are going to write directly to stdout/stderr, so we
# need to flush to make sure the output does not get out of order
sys.stdout.flush()
sys.stderr.flush()
print(message, end=endline)
sys.stdout.flush()
sys.stderr.flush()
if sys.platform.startswith('win'):
def follow_subst(path, deep=True):
"""
Windows has support for virtual drives through `subst` command (
https://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/subst.mspx?mfr=true)
Unfortunately Python doesn't acknowledge that and functions like
`os.path.relpath` may fail if files in valid relative paths are
mounted in different virtual drives.
This function detects all virtual drives on system and replaces one or
all virtual drives in path (depending on `deep` argument), returning
actual absolute path.
:param str path: A path.
:param bool deep: If should follow all virtual drives on just the
first one.
:rtype: str
:return: Absolute path with virtual drives replaced by actual driver.
"""
import os
path = os.path.abspath(path)
while True:
drive = path[0] + ':'
universal_drive = drive.lower()
subst = parse_subst()
if universal_drive in subst:
path = path.replace(drive, subst[universal_drive], 1)
else:
break
if not deep:
break
return path
def parse_subst():
import re
import subprocess
output = subprocess.check_output('subst')
def parse_subst_line(line):
import locale
if not isinstance(line, str):
line = line.decode(locale.getpreferredencoding(False))
match = re.match(r'^(\w:)\\: => (.+)$', line)
drive = match.group(1)
replace = match.group(2)
return drive.lower(), replace
return dict([parse_subst_line(line) for line in output.splitlines()])
else:
def follow_subst(path, deep=True):
"""
Noop, only Windows has virtual drives.
:param str path: A path.
:param bool deep: If should follow all virtual drives on just the
first one.
:rtype: str
:return: Path as it is.
"""
return path
QRC_ENTRY_TEMPLATE = '<file alias="{alias}">{path}</file>'
QRC_FILE_TEMPLATE = '''\
<!DOCTYPE RCC>
<RCC version="1.0">
<qresource>
{entries}
</qresource>
</RCC>'''
# Only task registered in this global collection will be detected by invoke.
ns = invoke.Collection()
ns.add_task(qrc)
ns.add_task(docs)
ns.add_task(test)
ns.add_task(lint)
ns.add_task(svgtostencil)
| 30.796 | 108 | 0.641707 | from __future__ import absolute_import, print_function
import os
import sys
import invoke
from colorama import Fore, Style
@invoke.task
def qrc(
ctx,
):
import qmxgraph
from qmxgraph import deploy
WEB_EXTENSIONS = (
'.js',
'.gif',
'.png',
'.html',
'.css',
'.txt',
'.xml',
)
indent = ' '
print_message('qrc', color=Fore.BLUE, bright=True)
def create_web_resource(resource_name, src_dir):
print_message(
'{}- resource: {}'.format(indent, resource_name),
color=Fore.BLUE, bright=True)
target_dir = os.path.dirname(qmxgraph.__file__)
qrc_file, py_file = generate_qrc_from_folder(
basename='resource_{}'.format(resource_name),
alias=resource_name,
source_dir=src_dir,
target_dir=target_dir,
include=WEB_EXTENSIONS,
)
print_message('{}* generated {}'.format(indent * 2, qrc_file))
print_message('{}* generated {}'.format(indent * 2, py_file))
mxgraph = os.environ.get('MXGRAPHPATH', None)
if mxgraph is None:
env_dir = deploy.get_conda_env_path()
if env_dir is None:
raise IOError("Unable to determine MxGraph mxgraph in "
"environment")
mxgraph = '{env_dir}/mxgraph'.format(env_dir=env_dir)
create_web_resource(
resource_name='mxgraph',
src_dir='{folder}/javascript/src'.format(folder=mxgraph))
qgraph_root = os.path.dirname(qmxgraph.__file__)
create_web_resource(
resource_name='qmxgraph',
src_dir=os.path.join(qgraph_root, 'page'),
)
@invoke.task(help={
'python-version': (
'Can be used to define the python version used when creating the'
' work environment'
),
})
def docs(ctx, python_version=None):
import json
import subprocess
import tempfile
from pathlib import Path
conda_info_json = subprocess.check_output(['conda', 'info', '--json'])
conda_info = json.loads(conda_info_json)
current_env_name = conda_info["active_prefix_name"]
if current_env_name in (None, 'base'):
raise invoke.Exit("Activate the project's conda environment first")
else:
docs_env_name = f'{current_env_name}-docs'
new_environ = os.environ.copy()
new_environ['TEST_QMXGRAPH'] = '0'
if python_version is not None:
new_environ['PYTHON_VERSION'] = python_version
script = [
'', # To have a new line at the start (see windows new line).
f'conda devenv --name {docs_env_name} --file docs_environment.devenv.yml',
f'conda activate {docs_env_name}',
'cd docs',
'sphinx-build . _build -W',
]
if sys.platform == 'win32':
suffix = '.bat'
new_line = '\n@echo on\ncall '
command = ['cmd', '/C']
else:
suffix = '.bash'
new_line = '\n'
command = ['bash', '-x']
script_file = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
try:
script_file.close()
script_file = Path(script_file.name)
script_file.write_text(new_line.join(script))
command.append(str(script_file))
subprocess.check_call(command, env=new_environ)
finally:
script_file.unlink()
@invoke.task
def test(ctx):
print_message('test'.format(), color=Fore.BLUE, bright=True)
cmd = 'pytest --cov=qmxgraph --timeout=10'
import subprocess
raise invoke.Exit(subprocess.call(cmd, shell=True))
@invoke.task
def lint(ctx):
print_message('lint'.format(), color=Fore.BLUE, bright=True)
cmd = 'flake8 -v qmxgraph'
import subprocess
raise invoke.Exit(subprocess.call(cmd, shell=True))
@invoke.task(help={
'svg_path': 'A SVG file',
})
def svgtostencil(ctx, svg_path):
qmxgraph_scripts = os.path.join(os.getcwd(), 'scripts')
import subprocess
svg_to_stencil_script = os.path.join(qmxgraph_scripts, 'svg_to_stencil.py')
raise invoke.Exit(subprocess.call(['python', svg_to_stencil_script, svg_path]))
def generate_qrc(target_filename, file_map):
target_dir = os.path.dirname(target_filename)
contents = generate_qrc_contents(file_map, target_dir)
# UTF-8 is the encoding adopted by Qt (and subsequently PyQt) resource
# collection tools. It seems to not be officially stated anywhere in docs
# unfortunately, but if it is possible to see this encoding in use by
# Python modules generated by `pyrcc5`, for instance. Also one moderator
# in a Qt official forum stated UTF-8 is Qt preference, which is the
# closest thing to a official documentation about this choice (
# https://forum.qt.io/topic/42641/the-qt-resource-system-compile-error/4).
import io
with io.open(target_filename, 'w', encoding='utf8') as f:
f.write(contents)
def generate_qrc_contents(file_map, target_dir):
# Relative paths on Windows can be a pain in the ass if virtual drives
# (through `subst` command) are used. This make sure all files are
# using their *actual* absolute path.
target_dir = follow_subst(target_dir)
def create_entry(alias_, path_):
path_ = follow_subst(path_)
rel_path = os.path.relpath(path_, target_dir)
return ' ' + QRC_ENTRY_TEMPLATE.format(alias=alias_, path=rel_path)
entries = '\n'.join(
[
create_entry(alias, path)
for (alias, path) in file_map
]
)
return QRC_FILE_TEMPLATE.format(entries=entries)
def generate_qrc_py(qrc_filename, target_filename):
import subprocess
cwd, local_filename = os.path.split(qrc_filename)
# Needs to be executed on same *actual* absolute path as generated
# contents, so it also needs to deal with subst.
cwd = follow_subst(cwd)
subprocess.check_call(
['pyrcc5', local_filename, '-o', target_filename], cwd=cwd)
def generate_qrc_from_folder(
basename, alias, source_dir, target_dir, include=None):
if not os.path.isdir(source_dir):
raise IOError("Invalid source directory: {}".format(source_dir))
if not os.path.isdir(target_dir):
raise IOError("Invalid target directory: {}".format(target_dir))
if sys.platform.startswith('win'):
def fix_alias(a):
return a.replace('\\', '/')
else:
def fix_alias(a):
return a
files = [
(
fix_alias('{alias}/{rel_file}'.format(
alias=alias,
rel_file=os.path.relpath(f, source_dir))),
f
)
for f in collect_files_in_folder(source_dir, include=include)
]
if not files:
raise RuntimeError(
"Unable to collect anything for "
".qrc file in folder {}".format(source_dir))
qrc_filename = os.path.join(
target_dir, '{basename}{ext}'.format(basename=basename, ext='.qrc'))
generate_qrc(qrc_filename, files)
py_filename = os.path.join(target_dir, '{basename}{ext}'.format(
basename=basename, ext='.py'))
generate_qrc_py(qrc_filename, py_filename)
return qrc_filename, py_filename
def collect_files_in_folder(folder, include=None):
collected = []
for root, dirs, files in os.walk(folder):
for file_ in files:
if include is None or os.path.splitext(file_)[1] in include:
collected.append(os.path.normpath(os.path.join(root, file_)))
return collected
def print_message(message, color=None, bright=True, endline='\n'):
import sys
if color is not None:
style = Style.BRIGHT if bright else Style.DIM
message = '{color}{style}{msg}{reset}'.format(
color=color,
style=style,
reset=Style.RESET_ALL,
msg=message,
)
# The subprocesses are going to write directly to stdout/stderr, so we
# need to flush to make sure the output does not get out of order
sys.stdout.flush()
sys.stderr.flush()
print(message, end=endline)
sys.stdout.flush()
sys.stderr.flush()
if sys.platform.startswith('win'):
def follow_subst(path, deep=True):
import os
path = os.path.abspath(path)
while True:
drive = path[0] + ':'
universal_drive = drive.lower()
subst = parse_subst()
if universal_drive in subst:
path = path.replace(drive, subst[universal_drive], 1)
else:
break
if not deep:
break
return path
def parse_subst():
import re
import subprocess
output = subprocess.check_output('subst')
def parse_subst_line(line):
import locale
if not isinstance(line, str):
line = line.decode(locale.getpreferredencoding(False))
match = re.match(r'^(\w:)\\: => (.+)$', line)
drive = match.group(1)
replace = match.group(2)
return drive.lower(), replace
return dict([parse_subst_line(line) for line in output.splitlines()])
else:
def follow_subst(path, deep=True):
"""
Noop, only Windows has virtual drives.
:param str path: A path.
:param bool deep: If should follow all virtual drives on just the
first one.
:rtype: str
:return: Path as it is.
"""
return path
QRC_ENTRY_TEMPLATE = '<file alias="{alias}">{path}</file>'
QRC_FILE_TEMPLATE = '''\
<!DOCTYPE RCC>
<RCC version="1.0">
<qresource>
{entries}
</qresource>
</RCC>'''
# Only task registered in this global collection will be detected by invoke.
ns = invoke.Collection()
ns.add_task(qrc)
ns.add_task(docs)
ns.add_task(test)
ns.add_task(lint)
ns.add_task(svgtostencil)
| true | true |
f7fa7668ebbc49f146a4704ab24e3003bea753bf | 1,984 | py | Python | test/test_with_latest_from.py | dbrattli/aioreactive | e057264a5905964c68d443b98b3e602279b3b9ed | [
"MIT"
] | 280 | 2016-10-08T11:01:11.000Z | 2022-03-31T02:36:30.000Z | test/test_with_latest_from.py | webclinic017/aioreactive | e057264a5905964c68d443b98b3e602279b3b9ed | [
"MIT"
] | 26 | 2016-10-22T09:14:47.000Z | 2021-06-15T12:04:04.000Z | test/test_with_latest_from.py | webclinic017/aioreactive | e057264a5905964c68d443b98b3e602279b3b9ed | [
"MIT"
] | 18 | 2017-01-15T15:29:15.000Z | 2021-09-14T15:35:13.000Z | import asyncio
import logging
from asyncio.exceptions import CancelledError
from typing import Tuple
import aioreactive as rx
import pytest
from aioreactive.notification import OnCompleted, OnNext
from aioreactive.testing import AsyncTestSubject, AsyncTestObserver, VirtualTimeEventLoop
from aioreactive.types import AsyncObservable, AsyncObserver
from expression.core import pipe
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
@pytest.yield_fixture() # type: ignore
def event_loop():
loop = VirtualTimeEventLoop()
yield loop
loop.close()
@pytest.mark.asyncio
async def test_withlatestfrom_never_never():
xs: AsyncObservable[int] = rx.never()
ys: AsyncObservable[int] = rx.never()
result = []
zs = pipe(xs, rx.with_latest_from(ys))
obv: AsyncObserver[Tuple[int, int]] = AsyncTestObserver()
await zs.subscribe_async(obv)
await asyncio.sleep(1)
assert result == []
@pytest.mark.asyncio
async def test_withlatestfrom_never_empty():
xs: AsyncObservable[int] = rx.empty()
ys: AsyncObservable[int] = rx.never()
zs = pipe(xs, rx.with_latest_from(ys))
obv: AsyncTestObserver[Tuple[int, int]] = AsyncTestObserver()
with pytest.raises(CancelledError):
await rx.run(zs, obv)
assert obv.values == [(0, OnCompleted)]
@pytest.mark.asyncio
async def test_withlatestfrom_done():
xs: AsyncTestSubject[int] = AsyncTestSubject()
ys: AsyncTestSubject[int] = AsyncTestSubject()
zs = pipe(xs, rx.with_latest_from(ys), rx.starmap(lambda x, y: x + y))
obv: AsyncTestObserver[int] = AsyncTestObserver()
async with await zs.subscribe_async(obv):
await xs.asend(1)
await ys.asend(2)
await xs.asend(3)
await xs.aclose()
await obv
assert obv.values == [(0, OnNext(5)), (0, OnCompleted)]
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(test_withlatestfrom_done())
loop.close()
| 26.453333 | 89 | 0.714718 | import asyncio
import logging
from asyncio.exceptions import CancelledError
from typing import Tuple
import aioreactive as rx
import pytest
from aioreactive.notification import OnCompleted, OnNext
from aioreactive.testing import AsyncTestSubject, AsyncTestObserver, VirtualTimeEventLoop
from aioreactive.types import AsyncObservable, AsyncObserver
from expression.core import pipe
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
@pytest.yield_fixture()
def event_loop():
loop = VirtualTimeEventLoop()
yield loop
loop.close()
@pytest.mark.asyncio
async def test_withlatestfrom_never_never():
xs: AsyncObservable[int] = rx.never()
ys: AsyncObservable[int] = rx.never()
result = []
zs = pipe(xs, rx.with_latest_from(ys))
obv: AsyncObserver[Tuple[int, int]] = AsyncTestObserver()
await zs.subscribe_async(obv)
await asyncio.sleep(1)
assert result == []
@pytest.mark.asyncio
async def test_withlatestfrom_never_empty():
xs: AsyncObservable[int] = rx.empty()
ys: AsyncObservable[int] = rx.never()
zs = pipe(xs, rx.with_latest_from(ys))
obv: AsyncTestObserver[Tuple[int, int]] = AsyncTestObserver()
with pytest.raises(CancelledError):
await rx.run(zs, obv)
assert obv.values == [(0, OnCompleted)]
@pytest.mark.asyncio
async def test_withlatestfrom_done():
xs: AsyncTestSubject[int] = AsyncTestSubject()
ys: AsyncTestSubject[int] = AsyncTestSubject()
zs = pipe(xs, rx.with_latest_from(ys), rx.starmap(lambda x, y: x + y))
obv: AsyncTestObserver[int] = AsyncTestObserver()
async with await zs.subscribe_async(obv):
await xs.asend(1)
await ys.asend(2)
await xs.asend(3)
await xs.aclose()
await obv
assert obv.values == [(0, OnNext(5)), (0, OnCompleted)]
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(test_withlatestfrom_done())
loop.close()
| true | true |
f7fa76fd1d4d983c072dd23016dc26b2ab036730 | 913 | py | Python | test/unit/channel/channel_test.py | jstg004/fabric-sdk-py | d90faf65605e4bfffa7f2f49db415dae3ecc4438 | [
"Apache-2.0"
] | 389 | 2016-09-18T11:50:10.000Z | 2022-03-29T21:45:40.000Z | test/unit/channel/channel_test.py | jstg004/fabric-sdk-py | d90faf65605e4bfffa7f2f49db415dae3ecc4438 | [
"Apache-2.0"
] | 112 | 2017-08-18T00:32:21.000Z | 2022-02-25T18:55:57.000Z | test/unit/channel/channel_test.py | jstg004/fabric-sdk-py | d90faf65605e4bfffa7f2f49db415dae3ecc4438 | [
"Apache-2.0"
] | 268 | 2016-10-12T02:56:58.000Z | 2022-03-30T09:50:54.000Z | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from hfc.fabric.channel.channel import Channel
from hfc.fabric.orderer import Orderer
class ChannelTest(unittest.TestCase):
# @unittest.expectedFailure
def test_create_channel(self):
# TODO impl
channel = Channel('test-channel', None)
channel.add_orderer(Orderer())
if __name__ == '__main__':
unittest.main()
| 30.433333 | 74 | 0.740416 |
import unittest
from hfc.fabric.channel.channel import Channel
from hfc.fabric.orderer import Orderer
class ChannelTest(unittest.TestCase):
def test_create_channel(self):
channel = Channel('test-channel', None)
channel.add_orderer(Orderer())
if __name__ == '__main__':
unittest.main()
| true | true |
f7fa777df5b464d3756faac74427e83bdedd37dd | 101,615 | py | Python | sdk/python/pulumi_kubernetes/apiextensions/v1/_inputs.py | axis-edge/pulumi-kubernetes | cb2803c54ec2131c04564f863dd3577284fa1650 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_kubernetes/apiextensions/v1/_inputs.py | axis-edge/pulumi-kubernetes | cb2803c54ec2131c04564f863dd3577284fa1650 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_kubernetes/apiextensions/v1/_inputs.py | axis-edge/pulumi-kubernetes | cb2803c54ec2131c04564f863dd3577284fa1650 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import meta as _meta
__all__ = [
'CustomResourceColumnDefinitionArgs',
'CustomResourceConversionArgs',
'CustomResourceDefinitionConditionArgs',
'CustomResourceDefinitionNamesArgs',
'CustomResourceDefinitionSpecArgs',
'CustomResourceDefinitionStatusArgs',
'CustomResourceDefinitionVersionArgs',
'CustomResourceDefinitionArgs',
'CustomResourceSubresourceScaleArgs',
'CustomResourceSubresourcesArgs',
'CustomResourceValidationArgs',
'ExternalDocumentationArgs',
'JSONSchemaPropsArgs',
'ServiceReferenceArgs',
'ValidationRuleArgs',
'WebhookClientConfigArgs',
'WebhookConversionArgs',
]
@pulumi.input_type
class CustomResourceColumnDefinitionArgs:
def __init__(__self__, *,
json_path: pulumi.Input[str],
name: pulumi.Input[str],
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
format: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None):
"""
CustomResourceColumnDefinition specifies a column for server side printing.
:param pulumi.Input[str] json_path: jsonPath is a simple JSON path (i.e. with array notation) which is evaluated against each custom resource to produce the value for this column.
:param pulumi.Input[str] name: name is a human readable name for the column.
:param pulumi.Input[str] type: type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
:param pulumi.Input[str] description: description is a human readable description of this column.
:param pulumi.Input[str] format: format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
:param pulumi.Input[int] priority: priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a priority greater than 0.
"""
pulumi.set(__self__, "json_path", json_path)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
if description is not None:
pulumi.set(__self__, "description", description)
if format is not None:
pulumi.set(__self__, "format", format)
if priority is not None:
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter(name="jsonPath")
def json_path(self) -> pulumi.Input[str]:
"""
jsonPath is a simple JSON path (i.e. with array notation) which is evaluated against each custom resource to produce the value for this column.
"""
return pulumi.get(self, "json_path")
@json_path.setter
def json_path(self, value: pulumi.Input[str]):
pulumi.set(self, "json_path", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
name is a human readable name for the column.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
description is a human readable description of this column.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def format(self) -> Optional[pulumi.Input[str]]:
"""
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
"""
return pulumi.get(self, "format")
@format.setter
def format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "format", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a priority greater than 0.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@pulumi.input_type
class CustomResourceConversionArgs:
def __init__(__self__, *,
strategy: pulumi.Input[str],
webhook: Optional[pulumi.Input['WebhookConversionArgs']] = None):
"""
CustomResourceConversion describes how to convert different versions of a CR.
:param pulumi.Input[str] strategy: strategy specifies how custom resources are converted between versions. Allowed values are: - `None`: The converter only change the apiVersion and would not touch any other field in the custom resource. - `Webhook`: API Server will call to an external webhook to do the conversion. Additional information
is needed for this option. This requires spec.preserveUnknownFields to be false, and spec.conversion.webhook to be set.
:param pulumi.Input['WebhookConversionArgs'] webhook: webhook describes how to call the conversion webhook. Required when `strategy` is set to `Webhook`.
"""
pulumi.set(__self__, "strategy", strategy)
if webhook is not None:
pulumi.set(__self__, "webhook", webhook)
@property
@pulumi.getter
def strategy(self) -> pulumi.Input[str]:
"""
strategy specifies how custom resources are converted between versions. Allowed values are: - `None`: The converter only change the apiVersion and would not touch any other field in the custom resource. - `Webhook`: API Server will call to an external webhook to do the conversion. Additional information
is needed for this option. This requires spec.preserveUnknownFields to be false, and spec.conversion.webhook to be set.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter
def webhook(self) -> Optional[pulumi.Input['WebhookConversionArgs']]:
"""
webhook describes how to call the conversion webhook. Required when `strategy` is set to `Webhook`.
"""
return pulumi.get(self, "webhook")
@webhook.setter
def webhook(self, value: Optional[pulumi.Input['WebhookConversionArgs']]):
pulumi.set(self, "webhook", value)
@pulumi.input_type
class CustomResourceDefinitionConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
CustomResourceDefinitionCondition contains details for the current condition of this pod.
:param pulumi.Input[str] status: status is the status of the condition. Can be True, False, Unknown.
:param pulumi.Input[str] type: type is the type of the condition. Types include Established, NamesAccepted and Terminating.
:param pulumi.Input[str] last_transition_time: lastTransitionTime last time the condition transitioned from one status to another.
:param pulumi.Input[str] message: message is a human-readable message indicating details about last transition.
:param pulumi.Input[str] reason: reason is a unique, one-word, CamelCase reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
status is the status of the condition. Can be True, False, Unknown.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
type is the type of the condition. Types include Established, NamesAccepted and Terminating.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
lastTransitionTime last time the condition transitioned from one status to another.
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
message is a human-readable message indicating details about last transition.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
reason is a unique, one-word, CamelCase reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class CustomResourceDefinitionNamesArgs:
def __init__(__self__, *,
kind: pulumi.Input[str],
plural: pulumi.Input[str],
categories: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
list_kind: Optional[pulumi.Input[str]] = None,
short_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
singular: Optional[pulumi.Input[str]] = None):
"""
CustomResourceDefinitionNames indicates the names to serve this CustomResourceDefinition
:param pulumi.Input[str] kind: kind is the serialized kind of the resource. It is normally CamelCase and singular. Custom resource instances will use this value as the `kind` attribute in API calls.
:param pulumi.Input[str] plural: plural is the plural name of the resource to serve. The custom resources are served under `/apis/<group>/<version>/.../<plural>`. Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`). Must be all lowercase.
:param pulumi.Input[Sequence[pulumi.Input[str]]] categories: categories is a list of grouped resources this custom resource belongs to (e.g. 'all'). This is published in API discovery documents, and used by clients to support invocations like `kubectl get all`.
:param pulumi.Input[str] list_kind: listKind is the serialized kind of the list for this resource. Defaults to "`kind`List".
:param pulumi.Input[Sequence[pulumi.Input[str]]] short_names: shortNames are short names for the resource, exposed in API discovery documents, and used by clients to support invocations like `kubectl get <shortname>`. It must be all lowercase.
:param pulumi.Input[str] singular: singular is the singular name of the resource. It must be all lowercase. Defaults to lowercased `kind`.
"""
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "plural", plural)
if categories is not None:
pulumi.set(__self__, "categories", categories)
if list_kind is not None:
pulumi.set(__self__, "list_kind", list_kind)
if short_names is not None:
pulumi.set(__self__, "short_names", short_names)
if singular is not None:
pulumi.set(__self__, "singular", singular)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
"""
kind is the serialized kind of the resource. It is normally CamelCase and singular. Custom resource instances will use this value as the `kind` attribute in API calls.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def plural(self) -> pulumi.Input[str]:
"""
plural is the plural name of the resource to serve. The custom resources are served under `/apis/<group>/<version>/.../<plural>`. Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`). Must be all lowercase.
"""
return pulumi.get(self, "plural")
@plural.setter
def plural(self, value: pulumi.Input[str]):
pulumi.set(self, "plural", value)
@property
@pulumi.getter
def categories(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
categories is a list of grouped resources this custom resource belongs to (e.g. 'all'). This is published in API discovery documents, and used by clients to support invocations like `kubectl get all`.
"""
return pulumi.get(self, "categories")
@categories.setter
def categories(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "categories", value)
@property
@pulumi.getter(name="listKind")
def list_kind(self) -> Optional[pulumi.Input[str]]:
"""
listKind is the serialized kind of the list for this resource. Defaults to "`kind`List".
"""
return pulumi.get(self, "list_kind")
@list_kind.setter
def list_kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "list_kind", value)
@property
@pulumi.getter(name="shortNames")
def short_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
shortNames are short names for the resource, exposed in API discovery documents, and used by clients to support invocations like `kubectl get <shortname>`. It must be all lowercase.
"""
return pulumi.get(self, "short_names")
@short_names.setter
def short_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "short_names", value)
@property
@pulumi.getter
def singular(self) -> Optional[pulumi.Input[str]]:
"""
singular is the singular name of the resource. It must be all lowercase. Defaults to lowercased `kind`.
"""
return pulumi.get(self, "singular")
@singular.setter
def singular(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "singular", value)
@pulumi.input_type
class CustomResourceDefinitionSpecArgs:
def __init__(__self__, *,
group: pulumi.Input[str],
names: pulumi.Input['CustomResourceDefinitionNamesArgs'],
scope: pulumi.Input[str],
versions: pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]],
conversion: Optional[pulumi.Input['CustomResourceConversionArgs']] = None,
preserve_unknown_fields: Optional[pulumi.Input[bool]] = None):
"""
CustomResourceDefinitionSpec describes how a user wants their resource to appear
:param pulumi.Input[str] group: group is the API group of the defined custom resource. The custom resources are served under `/apis/<group>/...`. Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`).
:param pulumi.Input['CustomResourceDefinitionNamesArgs'] names: names specify the resource and kind names for the custom resource.
:param pulumi.Input[str] scope: scope indicates whether the defined custom resource is cluster- or namespace-scoped. Allowed values are `Cluster` and `Namespaced`.
:param pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]] versions: versions is the list of all API versions of the defined custom resource. Version names are used to compute the order in which served versions are listed in API discovery. If the version string is "kube-like", it will sort above non "kube-like" version strings, which are ordered lexicographically. "Kube-like" versions start with a "v", then are followed by a number (the major version), then optionally the string "alpha" or "beta" and another number (the minor version). These are sorted first by GA > beta > alpha (where GA is a version with no suffix such as beta or alpha), and then by comparing major version, then minor version. An example sorted list of versions: v10, v2, v1, v11beta2, v10beta3, v3beta1, v12alpha1, v11alpha2, foo1, foo10.
:param pulumi.Input['CustomResourceConversionArgs'] conversion: conversion defines conversion settings for the CRD.
:param pulumi.Input[bool] preserve_unknown_fields: preserveUnknownFields indicates that object fields which are not specified in the OpenAPI schema should be preserved when persisting to storage. apiVersion, kind, metadata and known fields inside metadata are always preserved. This field is deprecated in favor of setting `x-preserve-unknown-fields` to true in `spec.versions[*].schema.openAPIV3Schema`. See https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#pruning-versus-preserving-unknown-fields for details.
"""
pulumi.set(__self__, "group", group)
pulumi.set(__self__, "names", names)
pulumi.set(__self__, "scope", scope)
pulumi.set(__self__, "versions", versions)
if conversion is not None:
pulumi.set(__self__, "conversion", conversion)
if preserve_unknown_fields is not None:
pulumi.set(__self__, "preserve_unknown_fields", preserve_unknown_fields)
@property
@pulumi.getter
def group(self) -> pulumi.Input[str]:
"""
group is the API group of the defined custom resource. The custom resources are served under `/apis/<group>/...`. Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`).
"""
return pulumi.get(self, "group")
@group.setter
def group(self, value: pulumi.Input[str]):
pulumi.set(self, "group", value)
@property
@pulumi.getter
def names(self) -> pulumi.Input['CustomResourceDefinitionNamesArgs']:
"""
names specify the resource and kind names for the custom resource.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: pulumi.Input['CustomResourceDefinitionNamesArgs']):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def scope(self) -> pulumi.Input[str]:
"""
scope indicates whether the defined custom resource is cluster- or namespace-scoped. Allowed values are `Cluster` and `Namespaced`.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: pulumi.Input[str]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter
def versions(self) -> pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]]:
"""
versions is the list of all API versions of the defined custom resource. Version names are used to compute the order in which served versions are listed in API discovery. If the version string is "kube-like", it will sort above non "kube-like" version strings, which are ordered lexicographically. "Kube-like" versions start with a "v", then are followed by a number (the major version), then optionally the string "alpha" or "beta" and another number (the minor version). These are sorted first by GA > beta > alpha (where GA is a version with no suffix such as beta or alpha), and then by comparing major version, then minor version. An example sorted list of versions: v10, v2, v1, v11beta2, v10beta3, v3beta1, v12alpha1, v11alpha2, foo1, foo10.
"""
return pulumi.get(self, "versions")
@versions.setter
def versions(self, value: pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]]):
pulumi.set(self, "versions", value)
@property
@pulumi.getter
def conversion(self) -> Optional[pulumi.Input['CustomResourceConversionArgs']]:
"""
conversion defines conversion settings for the CRD.
"""
return pulumi.get(self, "conversion")
@conversion.setter
def conversion(self, value: Optional[pulumi.Input['CustomResourceConversionArgs']]):
pulumi.set(self, "conversion", value)
@property
@pulumi.getter(name="preserveUnknownFields")
def preserve_unknown_fields(self) -> Optional[pulumi.Input[bool]]:
"""
preserveUnknownFields indicates that object fields which are not specified in the OpenAPI schema should be preserved when persisting to storage. apiVersion, kind, metadata and known fields inside metadata are always preserved. This field is deprecated in favor of setting `x-preserve-unknown-fields` to true in `spec.versions[*].schema.openAPIV3Schema`. See https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#pruning-versus-preserving-unknown-fields for details.
"""
return pulumi.get(self, "preserve_unknown_fields")
@preserve_unknown_fields.setter
def preserve_unknown_fields(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "preserve_unknown_fields", value)
@pulumi.input_type
class CustomResourceDefinitionStatusArgs:
def __init__(__self__, *,
accepted_names: pulumi.Input['CustomResourceDefinitionNamesArgs'],
stored_versions: pulumi.Input[Sequence[pulumi.Input[str]]],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]] = None):
"""
CustomResourceDefinitionStatus indicates the state of the CustomResourceDefinition
:param pulumi.Input['CustomResourceDefinitionNamesArgs'] accepted_names: acceptedNames are the names that are actually being used to serve discovery. They may be different than the names in spec.
:param pulumi.Input[Sequence[pulumi.Input[str]]] stored_versions: storedVersions lists all versions of CustomResources that were ever persisted. Tracking these versions allows a migration path for stored versions in etcd. The field is mutable so a migration controller can finish a migration to another version (ensuring no old objects are left in storage), and then remove the rest of the versions from this list. Versions may not be removed from `spec.versions` while they exist in this list.
:param pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]] conditions: conditions indicate state for particular aspects of a CustomResourceDefinition
"""
pulumi.set(__self__, "accepted_names", accepted_names)
pulumi.set(__self__, "stored_versions", stored_versions)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter(name="acceptedNames")
def accepted_names(self) -> pulumi.Input['CustomResourceDefinitionNamesArgs']:
"""
acceptedNames are the names that are actually being used to serve discovery. They may be different than the names in spec.
"""
return pulumi.get(self, "accepted_names")
@accepted_names.setter
def accepted_names(self, value: pulumi.Input['CustomResourceDefinitionNamesArgs']):
pulumi.set(self, "accepted_names", value)
@property
@pulumi.getter(name="storedVersions")
def stored_versions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
storedVersions lists all versions of CustomResources that were ever persisted. Tracking these versions allows a migration path for stored versions in etcd. The field is mutable so a migration controller can finish a migration to another version (ensuring no old objects are left in storage), and then remove the rest of the versions from this list. Versions may not be removed from `spec.versions` while they exist in this list.
"""
return pulumi.get(self, "stored_versions")
@stored_versions.setter
def stored_versions(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "stored_versions", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]]:
"""
conditions indicate state for particular aspects of a CustomResourceDefinition
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class CustomResourceDefinitionVersionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
served: pulumi.Input[bool],
storage: pulumi.Input[bool],
additional_printer_columns: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]] = None,
deprecated: Optional[pulumi.Input[bool]] = None,
deprecation_warning: Optional[pulumi.Input[str]] = None,
schema: Optional[pulumi.Input['CustomResourceValidationArgs']] = None,
subresources: Optional[pulumi.Input['CustomResourceSubresourcesArgs']] = None):
"""
CustomResourceDefinitionVersion describes a version for CRD.
:param pulumi.Input[str] name: name is the version name, e.g. “v1”, “v2beta1”, etc. The custom resources are served under this version at `/apis/<group>/<version>/...` if `served` is true.
:param pulumi.Input[bool] served: served is a flag enabling/disabling this version from being served via REST APIs
:param pulumi.Input[bool] storage: storage indicates this version should be used when persisting custom resources to storage. There must be exactly one version with storage=true.
:param pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]] additional_printer_columns: additionalPrinterColumns specifies additional columns returned in Table output. See https://kubernetes.io/docs/reference/using-api/api-concepts/#receiving-resources-as-tables for details. If no columns are specified, a single column displaying the age of the custom resource is used.
:param pulumi.Input[bool] deprecated: deprecated indicates this version of the custom resource API is deprecated. When set to true, API requests to this version receive a warning header in the server response. Defaults to false.
:param pulumi.Input[str] deprecation_warning: deprecationWarning overrides the default warning returned to API clients. May only be set when `deprecated` is true. The default warning indicates this version is deprecated and recommends use of the newest served version of equal or greater stability, if one exists.
:param pulumi.Input['CustomResourceValidationArgs'] schema: schema describes the schema used for validation, pruning, and defaulting of this version of the custom resource.
:param pulumi.Input['CustomResourceSubresourcesArgs'] subresources: subresources specify what subresources this version of the defined custom resource have.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "served", served)
pulumi.set(__self__, "storage", storage)
if additional_printer_columns is not None:
pulumi.set(__self__, "additional_printer_columns", additional_printer_columns)
if deprecated is not None:
pulumi.set(__self__, "deprecated", deprecated)
if deprecation_warning is not None:
pulumi.set(__self__, "deprecation_warning", deprecation_warning)
if schema is not None:
pulumi.set(__self__, "schema", schema)
if subresources is not None:
pulumi.set(__self__, "subresources", subresources)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
name is the version name, e.g. “v1”, “v2beta1”, etc. The custom resources are served under this version at `/apis/<group>/<version>/...` if `served` is true.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def served(self) -> pulumi.Input[bool]:
"""
served is a flag enabling/disabling this version from being served via REST APIs
"""
return pulumi.get(self, "served")
@served.setter
def served(self, value: pulumi.Input[bool]):
pulumi.set(self, "served", value)
@property
@pulumi.getter
def storage(self) -> pulumi.Input[bool]:
"""
storage indicates this version should be used when persisting custom resources to storage. There must be exactly one version with storage=true.
"""
return pulumi.get(self, "storage")
@storage.setter
def storage(self, value: pulumi.Input[bool]):
pulumi.set(self, "storage", value)
@property
@pulumi.getter(name="additionalPrinterColumns")
def additional_printer_columns(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]]:
"""
additionalPrinterColumns specifies additional columns returned in Table output. See https://kubernetes.io/docs/reference/using-api/api-concepts/#receiving-resources-as-tables for details. If no columns are specified, a single column displaying the age of the custom resource is used.
"""
return pulumi.get(self, "additional_printer_columns")
@additional_printer_columns.setter
def additional_printer_columns(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]]):
pulumi.set(self, "additional_printer_columns", value)
@property
@pulumi.getter
def deprecated(self) -> Optional[pulumi.Input[bool]]:
"""
deprecated indicates this version of the custom resource API is deprecated. When set to true, API requests to this version receive a warning header in the server response. Defaults to false.
"""
return pulumi.get(self, "deprecated")
@deprecated.setter
def deprecated(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deprecated", value)
@property
@pulumi.getter(name="deprecationWarning")
def deprecation_warning(self) -> Optional[pulumi.Input[str]]:
"""
deprecationWarning overrides the default warning returned to API clients. May only be set when `deprecated` is true. The default warning indicates this version is deprecated and recommends use of the newest served version of equal or greater stability, if one exists.
"""
return pulumi.get(self, "deprecation_warning")
@deprecation_warning.setter
def deprecation_warning(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deprecation_warning", value)
@property
@pulumi.getter
def schema(self) -> Optional[pulumi.Input['CustomResourceValidationArgs']]:
"""
schema describes the schema used for validation, pruning, and defaulting of this version of the custom resource.
"""
return pulumi.get(self, "schema")
@schema.setter
def schema(self, value: Optional[pulumi.Input['CustomResourceValidationArgs']]):
pulumi.set(self, "schema", value)
@property
@pulumi.getter
def subresources(self) -> Optional[pulumi.Input['CustomResourceSubresourcesArgs']]:
"""
subresources specify what subresources this version of the defined custom resource have.
"""
return pulumi.get(self, "subresources")
@subresources.setter
def subresources(self, value: Optional[pulumi.Input['CustomResourceSubresourcesArgs']]):
pulumi.set(self, "subresources", value)
@pulumi.input_type
class CustomResourceDefinitionArgs:
def __init__(__self__, *,
spec: pulumi.Input['CustomResourceDefinitionSpecArgs'],
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
status: Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']] = None):
"""
CustomResourceDefinition represents a resource that should be exposed on the API server. Its name MUST be in the format <.spec.name>.<.spec.group>.
:param pulumi.Input['CustomResourceDefinitionSpecArgs'] spec: spec describes how the user wants the resources to appear
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['CustomResourceDefinitionStatusArgs'] status: status indicates the actual state of the CustomResourceDefinition
"""
pulumi.set(__self__, "spec", spec)
if api_version is not None:
pulumi.set(__self__, "api_version", 'apiextensions.k8s.io/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'CustomResourceDefinition')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def spec(self) -> pulumi.Input['CustomResourceDefinitionSpecArgs']:
"""
spec describes how the user wants the resources to appear
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: pulumi.Input['CustomResourceDefinitionSpecArgs']):
pulumi.set(self, "spec", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']]:
"""
status indicates the actual state of the CustomResourceDefinition
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class CustomResourceSubresourceScaleArgs:
def __init__(__self__, *,
spec_replicas_path: pulumi.Input[str],
status_replicas_path: pulumi.Input[str],
label_selector_path: Optional[pulumi.Input[str]] = None):
"""
CustomResourceSubresourceScale defines how to serve the scale subresource for CustomResources.
:param pulumi.Input[str] spec_replicas_path: specReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `spec.replicas`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.spec`. If there is no value under the given path in the custom resource, the `/scale` subresource will return an error on GET.
:param pulumi.Input[str] status_replicas_path: statusReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `status.replicas`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.status`. If there is no value under the given path in the custom resource, the `status.replicas` value in the `/scale` subresource will default to 0.
:param pulumi.Input[str] label_selector_path: labelSelectorPath defines the JSON path inside of a custom resource that corresponds to Scale `status.selector`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.status` or `.spec`. Must be set to work with HorizontalPodAutoscaler. The field pointed by this JSON path must be a string field (not a complex selector struct) which contains a serialized label selector in string form. More info: https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions#scale-subresource If there is no value under the given path in the custom resource, the `status.selector` value in the `/scale` subresource will default to the empty string.
"""
pulumi.set(__self__, "spec_replicas_path", spec_replicas_path)
pulumi.set(__self__, "status_replicas_path", status_replicas_path)
if label_selector_path is not None:
pulumi.set(__self__, "label_selector_path", label_selector_path)
@property
@pulumi.getter(name="specReplicasPath")
def spec_replicas_path(self) -> pulumi.Input[str]:
"""
specReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `spec.replicas`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.spec`. If there is no value under the given path in the custom resource, the `/scale` subresource will return an error on GET.
"""
return pulumi.get(self, "spec_replicas_path")
@spec_replicas_path.setter
def spec_replicas_path(self, value: pulumi.Input[str]):
pulumi.set(self, "spec_replicas_path", value)
@property
@pulumi.getter(name="statusReplicasPath")
def status_replicas_path(self) -> pulumi.Input[str]:
"""
statusReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `status.replicas`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.status`. If there is no value under the given path in the custom resource, the `status.replicas` value in the `/scale` subresource will default to 0.
"""
return pulumi.get(self, "status_replicas_path")
@status_replicas_path.setter
def status_replicas_path(self, value: pulumi.Input[str]):
pulumi.set(self, "status_replicas_path", value)
@property
@pulumi.getter(name="labelSelectorPath")
def label_selector_path(self) -> Optional[pulumi.Input[str]]:
"""
labelSelectorPath defines the JSON path inside of a custom resource that corresponds to Scale `status.selector`. Only JSON paths without the array notation are allowed. Must be a JSON Path under `.status` or `.spec`. Must be set to work with HorizontalPodAutoscaler. The field pointed by this JSON path must be a string field (not a complex selector struct) which contains a serialized label selector in string form. More info: https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions#scale-subresource If there is no value under the given path in the custom resource, the `status.selector` value in the `/scale` subresource will default to the empty string.
"""
return pulumi.get(self, "label_selector_path")
@label_selector_path.setter
def label_selector_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label_selector_path", value)
@pulumi.input_type
class CustomResourceSubresourcesArgs:
def __init__(__self__, *,
scale: Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']] = None,
status: Optional[Any] = None):
"""
CustomResourceSubresources defines the status and scale subresources for CustomResources.
:param pulumi.Input['CustomResourceSubresourceScaleArgs'] scale: scale indicates the custom resource should serve a `/scale` subresource that returns an `autoscaling/v1` Scale object.
:param Any status: status indicates the custom resource should serve a `/status` subresource. When enabled: 1. requests to the custom resource primary endpoint ignore changes to the `status` stanza of the object. 2. requests to the custom resource `/status` subresource ignore changes to anything other than the `status` stanza of the object.
"""
if scale is not None:
pulumi.set(__self__, "scale", scale)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def scale(self) -> Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']]:
"""
scale indicates the custom resource should serve a `/scale` subresource that returns an `autoscaling/v1` Scale object.
"""
return pulumi.get(self, "scale")
@scale.setter
def scale(self, value: Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']]):
pulumi.set(self, "scale", value)
@property
@pulumi.getter
def status(self) -> Optional[Any]:
"""
status indicates the custom resource should serve a `/status` subresource. When enabled: 1. requests to the custom resource primary endpoint ignore changes to the `status` stanza of the object. 2. requests to the custom resource `/status` subresource ignore changes to anything other than the `status` stanza of the object.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[Any]):
pulumi.set(self, "status", value)
@pulumi.input_type
class CustomResourceValidationArgs:
def __init__(__self__, *,
open_apiv3_schema: Optional[pulumi.Input['JSONSchemaPropsArgs']] = None):
"""
CustomResourceValidation is a list of validation methods for CustomResources.
:param pulumi.Input['JSONSchemaPropsArgs'] open_apiv3_schema: openAPIV3Schema is the OpenAPI v3 schema to use for validation and pruning.
"""
if open_apiv3_schema is not None:
pulumi.set(__self__, "open_apiv3_schema", open_apiv3_schema)
@property
@pulumi.getter(name="openAPIV3Schema")
def open_apiv3_schema(self) -> Optional[pulumi.Input['JSONSchemaPropsArgs']]:
"""
openAPIV3Schema is the OpenAPI v3 schema to use for validation and pruning.
"""
return pulumi.get(self, "open_apiv3_schema")
@open_apiv3_schema.setter
def open_apiv3_schema(self, value: Optional[pulumi.Input['JSONSchemaPropsArgs']]):
pulumi.set(self, "open_apiv3_schema", value)
@pulumi.input_type
class ExternalDocumentationArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
ExternalDocumentation allows referencing an external resource for extended documentation.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class JSONSchemaPropsArgs:
def __init__(__self__, *,
_ref: Optional[pulumi.Input[str]] = None,
_schema: Optional[pulumi.Input[str]] = None,
additional_items: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]] = None,
additional_properties: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]] = None,
all_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
any_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
default: Optional[Any] = None,
definitions: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
dependencies: Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enum: Optional[pulumi.Input[Sequence[Any]]] = None,
example: Optional[Any] = None,
exclusive_maximum: Optional[pulumi.Input[bool]] = None,
exclusive_minimum: Optional[pulumi.Input[bool]] = None,
external_docs: Optional[pulumi.Input['ExternalDocumentationArgs']] = None,
format: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
items: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]] = None,
max_items: Optional[pulumi.Input[int]] = None,
max_length: Optional[pulumi.Input[int]] = None,
max_properties: Optional[pulumi.Input[int]] = None,
maximum: Optional[pulumi.Input[float]] = None,
min_items: Optional[pulumi.Input[int]] = None,
min_length: Optional[pulumi.Input[int]] = None,
min_properties: Optional[pulumi.Input[int]] = None,
minimum: Optional[pulumi.Input[float]] = None,
multiple_of: Optional[pulumi.Input[float]] = None,
not_: Optional[pulumi.Input['JSONSchemaPropsArgs']] = None,
nullable: Optional[pulumi.Input[bool]] = None,
one_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
pattern: Optional[pulumi.Input[str]] = None,
pattern_properties: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
properties: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
required: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
title: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
unique_items: Optional[pulumi.Input[bool]] = None,
x_kubernetes_embedded_resource: Optional[pulumi.Input[bool]] = None,
x_kubernetes_int_or_string: Optional[pulumi.Input[bool]] = None,
x_kubernetes_list_map_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
x_kubernetes_list_type: Optional[pulumi.Input[str]] = None,
x_kubernetes_map_type: Optional[pulumi.Input[str]] = None,
x_kubernetes_preserve_unknown_fields: Optional[pulumi.Input[bool]] = None,
x_kubernetes_validations: Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]] = None):
"""
JSONSchemaProps is a JSON-Schema following Specification Draft 4 (http://json-schema.org/).
:param Any default: default is a default value for undefined object fields. Defaulting is a beta feature under the CustomResourceDefaulting feature gate. Defaulting requires spec.preserveUnknownFields to be false.
:param pulumi.Input[str] format: format is an OpenAPI v3 format string. Unknown formats are ignored. The following formats are validated:
- bsonobjectid: a bson object ID, i.e. a 24 characters hex string - uri: an URI as parsed by Golang net/url.ParseRequestURI - email: an email address as parsed by Golang net/mail.ParseAddress - hostname: a valid representation for an Internet host name, as defined by RFC 1034, section 3.1 [RFC1034]. - ipv4: an IPv4 IP as parsed by Golang net.ParseIP - ipv6: an IPv6 IP as parsed by Golang net.ParseIP - cidr: a CIDR as parsed by Golang net.ParseCIDR - mac: a MAC address as parsed by Golang net.ParseMAC - uuid: an UUID that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{12}$ - uuid3: an UUID3 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?3[0-9a-f]{3}-?[0-9a-f]{4}-?[0-9a-f]{12}$ - uuid4: an UUID4 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?4[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$ - uuid5: an UUID5 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?5[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$ - isbn: an ISBN10 or ISBN13 number string like "0321751043" or "978-0321751041" - isbn10: an ISBN10 number string like "0321751043" - isbn13: an ISBN13 number string like "978-0321751041" - creditcard: a credit card number defined by the regex ^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11})$ with any non digit characters mixed in - ssn: a U.S. social security number following the regex ^\d{3}[- ]?\d{2}[- ]?\d{4}$ - hexcolor: an hexadecimal color code like "#FFFFFF: following the regex ^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$ - rgbcolor: an RGB color code like rgb like "rgb(255,255,2559" - byte: base64 encoded binary data - password: any kind of string - date: a date string like "2006-01-02" as defined by full-date in RFC3339 - duration: a duration string like "22 ns" as parsed by Golang time.ParseDuration or compatible with Scala duration format - datetime: a date time string like "2014-12-15T19:30:20.000Z" as defined by date-time in RFC3339.
:param pulumi.Input[bool] x_kubernetes_embedded_resource: x-kubernetes-embedded-resource defines that the value is an embedded Kubernetes runtime.Object, with TypeMeta and ObjectMeta. The type must be object. It is allowed to further restrict the embedded object. kind, apiVersion and metadata are validated automatically. x-kubernetes-preserve-unknown-fields is allowed to be true, but does not have to be if the object is fully specified (up to kind, apiVersion, metadata).
:param pulumi.Input[bool] x_kubernetes_int_or_string: x-kubernetes-int-or-string specifies that this value is either an integer or a string. If this is true, an empty type is allowed and type as child of anyOf is permitted if following one of the following patterns:
1) anyOf:
- type: integer
- type: string
2) allOf:
- anyOf:
- type: integer
- type: string
- ... zero or more
:param pulumi.Input[Sequence[pulumi.Input[str]]] x_kubernetes_list_map_keys: x-kubernetes-list-map-keys annotates an array with the x-kubernetes-list-type `map` by specifying the keys used as the index of the map.
This tag MUST only be used on lists that have the "x-kubernetes-list-type" extension set to "map". Also, the values specified for this attribute must be a scalar typed field of the child structure (no nesting is supported).
The properties specified must either be required or have a default value, to ensure those properties are present for all list items.
:param pulumi.Input[str] x_kubernetes_list_type: x-kubernetes-list-type annotates an array to further describe its topology. This extension must only be used on lists and may have 3 possible values:
1) `atomic`: the list is treated as a single entity, like a scalar.
Atomic lists will be entirely replaced when updated. This extension
may be used on any type of list (struct, scalar, ...).
2) `set`:
Sets are lists that must not have multiple items with the same value. Each
value must be a scalar, an object with x-kubernetes-map-type `atomic` or an
array with x-kubernetes-list-type `atomic`.
3) `map`:
These lists are like maps in that their elements have a non-index key
used to identify them. Order is preserved upon merge. The map tag
must only be used on a list with elements of type object.
Defaults to atomic for arrays.
:param pulumi.Input[str] x_kubernetes_map_type: x-kubernetes-map-type annotates an object to further describe its topology. This extension must only be used when type is object and may have 2 possible values:
1) `granular`:
These maps are actual maps (key-value pairs) and each fields are independent
from each other (they can each be manipulated by separate actors). This is
the default behaviour for all maps.
2) `atomic`: the list is treated as a single entity, like a scalar.
Atomic maps will be entirely replaced when updated.
:param pulumi.Input[bool] x_kubernetes_preserve_unknown_fields: x-kubernetes-preserve-unknown-fields stops the API server decoding step from pruning fields which are not specified in the validation schema. This affects fields recursively, but switches back to normal pruning behaviour if nested properties or additionalProperties are specified in the schema. This can either be true or undefined. False is forbidden.
:param pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]] x_kubernetes_validations: x-kubernetes-validations describes a list of validation rules written in the CEL expression language. This field is an alpha-level. Using this field requires the feature gate `CustomResourceValidationExpressions` to be enabled.
"""
if _ref is not None:
pulumi.set(__self__, "_ref", _ref)
if _schema is not None:
pulumi.set(__self__, "_schema", _schema)
if additional_items is not None:
pulumi.set(__self__, "additional_items", additional_items)
if additional_properties is not None:
pulumi.set(__self__, "additional_properties", additional_properties)
if all_of is not None:
pulumi.set(__self__, "all_of", all_of)
if any_of is not None:
pulumi.set(__self__, "any_of", any_of)
if default is not None:
pulumi.set(__self__, "default", default)
if definitions is not None:
pulumi.set(__self__, "definitions", definitions)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if description is not None:
pulumi.set(__self__, "description", description)
if enum is not None:
pulumi.set(__self__, "enum", enum)
if example is not None:
pulumi.set(__self__, "example", example)
if exclusive_maximum is not None:
pulumi.set(__self__, "exclusive_maximum", exclusive_maximum)
if exclusive_minimum is not None:
pulumi.set(__self__, "exclusive_minimum", exclusive_minimum)
if external_docs is not None:
pulumi.set(__self__, "external_docs", external_docs)
if format is not None:
pulumi.set(__self__, "format", format)
if id is not None:
pulumi.set(__self__, "id", id)
if items is not None:
pulumi.set(__self__, "items", items)
if max_items is not None:
pulumi.set(__self__, "max_items", max_items)
if max_length is not None:
pulumi.set(__self__, "max_length", max_length)
if max_properties is not None:
pulumi.set(__self__, "max_properties", max_properties)
if maximum is not None:
pulumi.set(__self__, "maximum", maximum)
if min_items is not None:
pulumi.set(__self__, "min_items", min_items)
if min_length is not None:
pulumi.set(__self__, "min_length", min_length)
if min_properties is not None:
pulumi.set(__self__, "min_properties", min_properties)
if minimum is not None:
pulumi.set(__self__, "minimum", minimum)
if multiple_of is not None:
pulumi.set(__self__, "multiple_of", multiple_of)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if nullable is not None:
pulumi.set(__self__, "nullable", nullable)
if one_of is not None:
pulumi.set(__self__, "one_of", one_of)
if pattern is not None:
pulumi.set(__self__, "pattern", pattern)
if pattern_properties is not None:
pulumi.set(__self__, "pattern_properties", pattern_properties)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if required is not None:
pulumi.set(__self__, "required", required)
if title is not None:
pulumi.set(__self__, "title", title)
if type is not None:
pulumi.set(__self__, "type", type)
if unique_items is not None:
pulumi.set(__self__, "unique_items", unique_items)
if x_kubernetes_embedded_resource is not None:
pulumi.set(__self__, "x_kubernetes_embedded_resource", x_kubernetes_embedded_resource)
if x_kubernetes_int_or_string is not None:
pulumi.set(__self__, "x_kubernetes_int_or_string", x_kubernetes_int_or_string)
if x_kubernetes_list_map_keys is not None:
pulumi.set(__self__, "x_kubernetes_list_map_keys", x_kubernetes_list_map_keys)
if x_kubernetes_list_type is not None:
pulumi.set(__self__, "x_kubernetes_list_type", x_kubernetes_list_type)
if x_kubernetes_map_type is not None:
pulumi.set(__self__, "x_kubernetes_map_type", x_kubernetes_map_type)
if x_kubernetes_preserve_unknown_fields is not None:
pulumi.set(__self__, "x_kubernetes_preserve_unknown_fields", x_kubernetes_preserve_unknown_fields)
if x_kubernetes_validations is not None:
pulumi.set(__self__, "x_kubernetes_validations", x_kubernetes_validations)
@property
@pulumi.getter(name="$ref")
def _ref(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "_ref")
@_ref.setter
def _ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "_ref", value)
@property
@pulumi.getter(name="$schema")
def _schema(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "_schema")
@_schema.setter
def _schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "_schema", value)
@property
@pulumi.getter(name="additionalItems")
def additional_items(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]:
return pulumi.get(self, "additional_items")
@additional_items.setter
def additional_items(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]):
pulumi.set(self, "additional_items", value)
@property
@pulumi.getter(name="additionalProperties")
def additional_properties(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]:
return pulumi.get(self, "additional_properties")
@additional_properties.setter
def additional_properties(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]):
pulumi.set(self, "additional_properties", value)
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "all_of")
@all_of.setter
def all_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "all_of", value)
@property
@pulumi.getter(name="anyOf")
def any_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "any_of")
@any_of.setter
def any_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "any_of", value)
@property
@pulumi.getter
def default(self) -> Optional[Any]:
"""
default is a default value for undefined object fields. Defaulting is a beta feature under the CustomResourceDefaulting feature gate. Defaulting requires spec.preserveUnknownFields to be false.
"""
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[Any]):
pulumi.set(self, "default", value)
@property
@pulumi.getter
def definitions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "definitions")
@definitions.setter
def definitions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "definitions", value)
@property
@pulumi.getter
def dependencies(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]]:
return pulumi.get(self, "dependencies")
@dependencies.setter
def dependencies(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]]):
pulumi.set(self, "dependencies", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enum(self) -> Optional[pulumi.Input[Sequence[Any]]]:
return pulumi.get(self, "enum")
@enum.setter
def enum(self, value: Optional[pulumi.Input[Sequence[Any]]]):
pulumi.set(self, "enum", value)
@property
@pulumi.getter
def example(self) -> Optional[Any]:
return pulumi.get(self, "example")
@example.setter
def example(self, value: Optional[Any]):
pulumi.set(self, "example", value)
@property
@pulumi.getter(name="exclusiveMaximum")
def exclusive_maximum(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "exclusive_maximum")
@exclusive_maximum.setter
def exclusive_maximum(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclusive_maximum", value)
@property
@pulumi.getter(name="exclusiveMinimum")
def exclusive_minimum(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "exclusive_minimum")
@exclusive_minimum.setter
def exclusive_minimum(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclusive_minimum", value)
@property
@pulumi.getter(name="externalDocs")
def external_docs(self) -> Optional[pulumi.Input['ExternalDocumentationArgs']]:
return pulumi.get(self, "external_docs")
@external_docs.setter
def external_docs(self, value: Optional[pulumi.Input['ExternalDocumentationArgs']]):
pulumi.set(self, "external_docs", value)
@property
@pulumi.getter
def format(self) -> Optional[pulumi.Input[str]]:
"""
format is an OpenAPI v3 format string. Unknown formats are ignored. The following formats are validated:
- bsonobjectid: a bson object ID, i.e. a 24 characters hex string - uri: an URI as parsed by Golang net/url.ParseRequestURI - email: an email address as parsed by Golang net/mail.ParseAddress - hostname: a valid representation for an Internet host name, as defined by RFC 1034, section 3.1 [RFC1034]. - ipv4: an IPv4 IP as parsed by Golang net.ParseIP - ipv6: an IPv6 IP as parsed by Golang net.ParseIP - cidr: a CIDR as parsed by Golang net.ParseCIDR - mac: a MAC address as parsed by Golang net.ParseMAC - uuid: an UUID that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{12}$ - uuid3: an UUID3 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?3[0-9a-f]{3}-?[0-9a-f]{4}-?[0-9a-f]{12}$ - uuid4: an UUID4 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?4[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$ - uuid5: an UUID5 that allows uppercase defined by the regex (?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?5[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$ - isbn: an ISBN10 or ISBN13 number string like "0321751043" or "978-0321751041" - isbn10: an ISBN10 number string like "0321751043" - isbn13: an ISBN13 number string like "978-0321751041" - creditcard: a credit card number defined by the regex ^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11})$ with any non digit characters mixed in - ssn: a U.S. social security number following the regex ^\d{3}[- ]?\d{2}[- ]?\d{4}$ - hexcolor: an hexadecimal color code like "#FFFFFF: following the regex ^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$ - rgbcolor: an RGB color code like rgb like "rgb(255,255,2559" - byte: base64 encoded binary data - password: any kind of string - date: a date string like "2006-01-02" as defined by full-date in RFC3339 - duration: a duration string like "22 ns" as parsed by Golang time.ParseDuration or compatible with Scala duration format - datetime: a date time string like "2014-12-15T19:30:20.000Z" as defined by date-time in RFC3339.
"""
return pulumi.get(self, "format")
@format.setter
def format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "format", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def items(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]]:
return pulumi.get(self, "items")
@items.setter
def items(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]]):
pulumi.set(self, "items", value)
@property
@pulumi.getter(name="maxItems")
def max_items(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_items")
@max_items.setter
def max_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_items", value)
@property
@pulumi.getter(name="maxLength")
def max_length(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_length")
@max_length.setter
def max_length(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_length", value)
@property
@pulumi.getter(name="maxProperties")
def max_properties(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_properties")
@max_properties.setter
def max_properties(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_properties", value)
@property
@pulumi.getter
def maximum(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "maximum")
@maximum.setter
def maximum(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "maximum", value)
@property
@pulumi.getter(name="minItems")
def min_items(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_items")
@min_items.setter
def min_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_items", value)
@property
@pulumi.getter(name="minLength")
def min_length(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_length")
@min_length.setter
def min_length(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_length", value)
@property
@pulumi.getter(name="minProperties")
def min_properties(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_properties")
@min_properties.setter
def min_properties(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_properties", value)
@property
@pulumi.getter
def minimum(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "minimum")
@minimum.setter
def minimum(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "minimum", value)
@property
@pulumi.getter(name="multipleOf")
def multiple_of(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "multiple_of")
@multiple_of.setter
def multiple_of(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "multiple_of", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input['JSONSchemaPropsArgs']]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input['JSONSchemaPropsArgs']]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def nullable(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "nullable")
@nullable.setter
def nullable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "nullable", value)
@property
@pulumi.getter(name="oneOf")
def one_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "one_of")
@one_of.setter
def one_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "one_of", value)
@property
@pulumi.getter
def pattern(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "pattern")
@pattern.setter
def pattern(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pattern", value)
@property
@pulumi.getter(name="patternProperties")
def pattern_properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "pattern_properties")
@pattern_properties.setter
def pattern_properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "pattern_properties", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter
def required(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "required")
@required.setter
def required(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "required", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="uniqueItems")
def unique_items(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "unique_items")
@unique_items.setter
def unique_items(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "unique_items", value)
@property
@pulumi.getter
def x_kubernetes_embedded_resource(self) -> Optional[pulumi.Input[bool]]:
"""
x-kubernetes-embedded-resource defines that the value is an embedded Kubernetes runtime.Object, with TypeMeta and ObjectMeta. The type must be object. It is allowed to further restrict the embedded object. kind, apiVersion and metadata are validated automatically. x-kubernetes-preserve-unknown-fields is allowed to be true, but does not have to be if the object is fully specified (up to kind, apiVersion, metadata).
"""
return pulumi.get(self, "x_kubernetes_embedded_resource")
@x_kubernetes_embedded_resource.setter
def x_kubernetes_embedded_resource(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_embedded_resource", value)
@property
@pulumi.getter
def x_kubernetes_int_or_string(self) -> Optional[pulumi.Input[bool]]:
"""
x-kubernetes-int-or-string specifies that this value is either an integer or a string. If this is true, an empty type is allowed and type as child of anyOf is permitted if following one of the following patterns:
1) anyOf:
- type: integer
- type: string
2) allOf:
- anyOf:
- type: integer
- type: string
- ... zero or more
"""
return pulumi.get(self, "x_kubernetes_int_or_string")
@x_kubernetes_int_or_string.setter
def x_kubernetes_int_or_string(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_int_or_string", value)
@property
@pulumi.getter
def x_kubernetes_list_map_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
x-kubernetes-list-map-keys annotates an array with the x-kubernetes-list-type `map` by specifying the keys used as the index of the map.
This tag MUST only be used on lists that have the "x-kubernetes-list-type" extension set to "map". Also, the values specified for this attribute must be a scalar typed field of the child structure (no nesting is supported).
The properties specified must either be required or have a default value, to ensure those properties are present for all list items.
"""
return pulumi.get(self, "x_kubernetes_list_map_keys")
@x_kubernetes_list_map_keys.setter
def x_kubernetes_list_map_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "x_kubernetes_list_map_keys", value)
@property
@pulumi.getter
def x_kubernetes_list_type(self) -> Optional[pulumi.Input[str]]:
"""
x-kubernetes-list-type annotates an array to further describe its topology. This extension must only be used on lists and may have 3 possible values:
1) `atomic`: the list is treated as a single entity, like a scalar.
Atomic lists will be entirely replaced when updated. This extension
may be used on any type of list (struct, scalar, ...).
2) `set`:
Sets are lists that must not have multiple items with the same value. Each
value must be a scalar, an object with x-kubernetes-map-type `atomic` or an
array with x-kubernetes-list-type `atomic`.
3) `map`:
These lists are like maps in that their elements have a non-index key
used to identify them. Order is preserved upon merge. The map tag
must only be used on a list with elements of type object.
Defaults to atomic for arrays.
"""
return pulumi.get(self, "x_kubernetes_list_type")
@x_kubernetes_list_type.setter
def x_kubernetes_list_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "x_kubernetes_list_type", value)
@property
@pulumi.getter
def x_kubernetes_map_type(self) -> Optional[pulumi.Input[str]]:
"""
x-kubernetes-map-type annotates an object to further describe its topology. This extension must only be used when type is object and may have 2 possible values:
1) `granular`:
These maps are actual maps (key-value pairs) and each fields are independent
from each other (they can each be manipulated by separate actors). This is
the default behaviour for all maps.
2) `atomic`: the list is treated as a single entity, like a scalar.
Atomic maps will be entirely replaced when updated.
"""
return pulumi.get(self, "x_kubernetes_map_type")
@x_kubernetes_map_type.setter
def x_kubernetes_map_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "x_kubernetes_map_type", value)
@property
@pulumi.getter
def x_kubernetes_preserve_unknown_fields(self) -> Optional[pulumi.Input[bool]]:
"""
x-kubernetes-preserve-unknown-fields stops the API server decoding step from pruning fields which are not specified in the validation schema. This affects fields recursively, but switches back to normal pruning behaviour if nested properties or additionalProperties are specified in the schema. This can either be true or undefined. False is forbidden.
"""
return pulumi.get(self, "x_kubernetes_preserve_unknown_fields")
@x_kubernetes_preserve_unknown_fields.setter
def x_kubernetes_preserve_unknown_fields(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_preserve_unknown_fields", value)
@property
@pulumi.getter
def x_kubernetes_validations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]]:
"""
x-kubernetes-validations describes a list of validation rules written in the CEL expression language. This field is an alpha-level. Using this field requires the feature gate `CustomResourceValidationExpressions` to be enabled.
"""
return pulumi.get(self, "x_kubernetes_validations")
@x_kubernetes_validations.setter
def x_kubernetes_validations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]]):
pulumi.set(self, "x_kubernetes_validations", value)
@pulumi.input_type
class ServiceReferenceArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
namespace: pulumi.Input[str],
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None):
"""
ServiceReference holds a reference to Service.legacy.k8s.io
:param pulumi.Input[str] name: name is the name of the service. Required
:param pulumi.Input[str] namespace: namespace is the namespace of the service. Required
:param pulumi.Input[str] path: path is an optional URL path at which the webhook will be contacted.
:param pulumi.Input[int] port: port is an optional service port at which the webhook will be contacted. `port` should be a valid port number (1-65535, inclusive). Defaults to 443 for backward compatibility.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "namespace", namespace)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
name is the name of the service. Required
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> pulumi.Input[str]:
"""
namespace is the namespace of the service. Required
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: pulumi.Input[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
path is an optional URL path at which the webhook will be contacted.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
port is an optional service port at which the webhook will be contacted. `port` should be a valid port number (1-65535, inclusive). Defaults to 443 for backward compatibility.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class ValidationRuleArgs:
def __init__(__self__, *,
rule: pulumi.Input[str],
message: Optional[pulumi.Input[str]] = None):
"""
ValidationRule describes a validation rule written in the CEL expression language.
:param pulumi.Input[str] rule: Rule represents the expression which will be evaluated by CEL. ref: https://github.com/google/cel-spec The Rule is scoped to the location of the x-kubernetes-validations extension in the schema. The `self` variable in the CEL expression is bound to the scoped value. Example: - Rule scoped to the root of a resource with a status subresource: {"rule": "self.status.actual <= self.spec.maxDesired"}
If the Rule is scoped to an object with properties, the accessible properties of the object are field selectable via `self.field` and field presence can be checked via `has(self.field)`. Null valued fields are treated as absent fields in CEL expressions. If the Rule is scoped to an object with additionalProperties (i.e. a map) the value of the map are accessible via `self[mapKey]`, map containment can be checked via `mapKey in self` and all entries of the map are accessible via CEL macros and functions such as `self.all(...)`. If the Rule is scoped to an array, the elements of the array are accessible via `self[i]` and also by macros and functions. If the Rule is scoped to a scalar, `self` is bound to the scalar value. Examples: - Rule scoped to a map of objects: {"rule": "self.components['Widget'].priority < 10"} - Rule scoped to a list of integers: {"rule": "self.values.all(value, value >= 0 && value < 100)"} - Rule scoped to a string value: {"rule": "self.startsWith('kube')"}
The `apiVersion`, `kind`, `metadata.name` and `metadata.generateName` are always accessible from the root of the object and from any x-kubernetes-embedded-resource annotated objects. No other metadata properties are accessible.
Unknown data preserved in custom resources via x-kubernetes-preserve-unknown-fields is not accessible in CEL expressions. This includes: - Unknown field values that are preserved by object schemas with x-kubernetes-preserve-unknown-fields. - Object properties where the property schema is of an "unknown type". An "unknown type" is recursively defined as:
- A schema with no type and x-kubernetes-preserve-unknown-fields set to true
- An array where the items schema is of an "unknown type"
- An object where the additionalProperties schema is of an "unknown type"
Only property names of the form `[a-zA-Z_.-/][a-zA-Z0-9_.-/]*` are accessible. Accessible property names are escaped according to the following rules when accessed in the expression: - '__' escapes to '__underscores__' - '.' escapes to '__dot__' - '-' escapes to '__dash__' - '/' escapes to '__slash__' - Property names that exactly match a CEL RESERVED keyword escape to '__{keyword}__'. The keywords are:
"true", "false", "null", "in", "as", "break", "const", "continue", "else", "for", "function", "if",
"import", "let", "loop", "package", "namespace", "return".
Examples:
- Rule accessing a property named "namespace": {"rule": "self.__namespace__ > 0"}
- Rule accessing a property named "x-prop": {"rule": "self.x__dash__prop > 0"}
- Rule accessing a property named "redact__d": {"rule": "self.redact__underscores__d > 0"}
Equality on arrays with x-kubernetes-list-type of 'set' or 'map' ignores element order, i.e. [1, 2] == [2, 1]. Concatenation on arrays with x-kubernetes-list-type use the semantics of the list type:
- 'set': `X + Y` performs a union where the array positions of all elements in `X` are preserved and
non-intersecting elements in `Y` are appended, retaining their partial order.
- 'map': `X + Y` performs a merge where the array positions of all keys in `X` are preserved but the values
are overwritten by values in `Y` when the key sets of `X` and `Y` intersect. Elements in `Y` with
non-intersecting keys are appended, retaining their partial order.
:param pulumi.Input[str] message: Message represents the message displayed when validation fails. The message is required if the Rule contains line breaks. The message must not contain line breaks. If unset, the message is "failed rule: {Rule}". e.g. "must be a URL with the host matching spec.host"
"""
pulumi.set(__self__, "rule", rule)
if message is not None:
pulumi.set(__self__, "message", message)
@property
@pulumi.getter
def rule(self) -> pulumi.Input[str]:
"""
Rule represents the expression which will be evaluated by CEL. ref: https://github.com/google/cel-spec The Rule is scoped to the location of the x-kubernetes-validations extension in the schema. The `self` variable in the CEL expression is bound to the scoped value. Example: - Rule scoped to the root of a resource with a status subresource: {"rule": "self.status.actual <= self.spec.maxDesired"}
If the Rule is scoped to an object with properties, the accessible properties of the object are field selectable via `self.field` and field presence can be checked via `has(self.field)`. Null valued fields are treated as absent fields in CEL expressions. If the Rule is scoped to an object with additionalProperties (i.e. a map) the value of the map are accessible via `self[mapKey]`, map containment can be checked via `mapKey in self` and all entries of the map are accessible via CEL macros and functions such as `self.all(...)`. If the Rule is scoped to an array, the elements of the array are accessible via `self[i]` and also by macros and functions. If the Rule is scoped to a scalar, `self` is bound to the scalar value. Examples: - Rule scoped to a map of objects: {"rule": "self.components['Widget'].priority < 10"} - Rule scoped to a list of integers: {"rule": "self.values.all(value, value >= 0 && value < 100)"} - Rule scoped to a string value: {"rule": "self.startsWith('kube')"}
The `apiVersion`, `kind`, `metadata.name` and `metadata.generateName` are always accessible from the root of the object and from any x-kubernetes-embedded-resource annotated objects. No other metadata properties are accessible.
Unknown data preserved in custom resources via x-kubernetes-preserve-unknown-fields is not accessible in CEL expressions. This includes: - Unknown field values that are preserved by object schemas with x-kubernetes-preserve-unknown-fields. - Object properties where the property schema is of an "unknown type". An "unknown type" is recursively defined as:
- A schema with no type and x-kubernetes-preserve-unknown-fields set to true
- An array where the items schema is of an "unknown type"
- An object where the additionalProperties schema is of an "unknown type"
Only property names of the form `[a-zA-Z_.-/][a-zA-Z0-9_.-/]*` are accessible. Accessible property names are escaped according to the following rules when accessed in the expression: - '__' escapes to '__underscores__' - '.' escapes to '__dot__' - '-' escapes to '__dash__' - '/' escapes to '__slash__' - Property names that exactly match a CEL RESERVED keyword escape to '__{keyword}__'. The keywords are:
"true", "false", "null", "in", "as", "break", "const", "continue", "else", "for", "function", "if",
"import", "let", "loop", "package", "namespace", "return".
Examples:
- Rule accessing a property named "namespace": {"rule": "self.__namespace__ > 0"}
- Rule accessing a property named "x-prop": {"rule": "self.x__dash__prop > 0"}
- Rule accessing a property named "redact__d": {"rule": "self.redact__underscores__d > 0"}
Equality on arrays with x-kubernetes-list-type of 'set' or 'map' ignores element order, i.e. [1, 2] == [2, 1]. Concatenation on arrays with x-kubernetes-list-type use the semantics of the list type:
- 'set': `X + Y` performs a union where the array positions of all elements in `X` are preserved and
non-intersecting elements in `Y` are appended, retaining their partial order.
- 'map': `X + Y` performs a merge where the array positions of all keys in `X` are preserved but the values
are overwritten by values in `Y` when the key sets of `X` and `Y` intersect. Elements in `Y` with
non-intersecting keys are appended, retaining their partial order.
"""
return pulumi.get(self, "rule")
@rule.setter
def rule(self, value: pulumi.Input[str]):
pulumi.set(self, "rule", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
Message represents the message displayed when validation fails. The message is required if the Rule contains line breaks. The message must not contain line breaks. If unset, the message is "failed rule: {Rule}". e.g. "must be a URL with the host matching spec.host"
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@pulumi.input_type
class WebhookClientConfigArgs:
def __init__(__self__, *,
ca_bundle: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input['ServiceReferenceArgs']] = None,
url: Optional[pulumi.Input[str]] = None):
"""
WebhookClientConfig contains the information to make a TLS connection with the webhook.
:param pulumi.Input[str] ca_bundle: caBundle is a PEM encoded CA bundle which will be used to validate the webhook's server certificate. If unspecified, system trust roots on the apiserver are used.
:param pulumi.Input['ServiceReferenceArgs'] service: service is a reference to the service for this webhook. Either service or url must be specified.
If the webhook is running within the cluster, then you should use `service`.
:param pulumi.Input[str] url: url gives the location of the webhook, in standard URL form (`scheme://host:port/path`). Exactly one of `url` or `service` must be specified.
The `host` should not refer to a service running in the cluster; use the `service` field instead. The host might be resolved via external DNS in some apiservers (e.g., `kube-apiserver` cannot resolve in-cluster DNS as that would be a layering violation). `host` may also be an IP address.
Please note that using `localhost` or `127.0.0.1` as a `host` is risky unless you take great care to run this webhook on all hosts which run an apiserver which might need to make calls to this webhook. Such installs are likely to be non-portable, i.e., not easy to turn up in a new cluster.
The scheme must be "https"; the URL must begin with "https://".
A path is optional, and if present may be any string permissible in a URL. You may use the path to pass an arbitrary string to the webhook, for example, a cluster identifier.
Attempting to use a user or basic auth e.g. "user:password@" is not allowed. Fragments ("#...") and query parameters ("?...") are not allowed, either.
"""
if ca_bundle is not None:
pulumi.set(__self__, "ca_bundle", ca_bundle)
if service is not None:
pulumi.set(__self__, "service", service)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter(name="caBundle")
def ca_bundle(self) -> Optional[pulumi.Input[str]]:
"""
caBundle is a PEM encoded CA bundle which will be used to validate the webhook's server certificate. If unspecified, system trust roots on the apiserver are used.
"""
return pulumi.get(self, "ca_bundle")
@ca_bundle.setter
def ca_bundle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ca_bundle", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ServiceReferenceArgs']]:
"""
service is a reference to the service for this webhook. Either service or url must be specified.
If the webhook is running within the cluster, then you should use `service`.
"""
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ServiceReferenceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
url gives the location of the webhook, in standard URL form (`scheme://host:port/path`). Exactly one of `url` or `service` must be specified.
The `host` should not refer to a service running in the cluster; use the `service` field instead. The host might be resolved via external DNS in some apiservers (e.g., `kube-apiserver` cannot resolve in-cluster DNS as that would be a layering violation). `host` may also be an IP address.
Please note that using `localhost` or `127.0.0.1` as a `host` is risky unless you take great care to run this webhook on all hosts which run an apiserver which might need to make calls to this webhook. Such installs are likely to be non-portable, i.e., not easy to turn up in a new cluster.
The scheme must be "https"; the URL must begin with "https://".
A path is optional, and if present may be any string permissible in a URL. You may use the path to pass an arbitrary string to the webhook, for example, a cluster identifier.
Attempting to use a user or basic auth e.g. "user:password@" is not allowed. Fragments ("#...") and query parameters ("?...") are not allowed, either.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class WebhookConversionArgs:
def __init__(__self__, *,
conversion_review_versions: pulumi.Input[Sequence[pulumi.Input[str]]],
client_config: Optional[pulumi.Input['WebhookClientConfigArgs']] = None):
"""
WebhookConversion describes how to call a conversion webhook
:param pulumi.Input[Sequence[pulumi.Input[str]]] conversion_review_versions: conversionReviewVersions is an ordered list of preferred `ConversionReview` versions the Webhook expects. The API server will use the first version in the list which it supports. If none of the versions specified in this list are supported by API server, conversion will fail for the custom resource. If a persisted Webhook configuration specifies allowed versions and does not include any versions known to the API Server, calls to the webhook will fail.
:param pulumi.Input['WebhookClientConfigArgs'] client_config: clientConfig is the instructions for how to call the webhook if strategy is `Webhook`.
"""
pulumi.set(__self__, "conversion_review_versions", conversion_review_versions)
if client_config is not None:
pulumi.set(__self__, "client_config", client_config)
@property
@pulumi.getter(name="conversionReviewVersions")
def conversion_review_versions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
conversionReviewVersions is an ordered list of preferred `ConversionReview` versions the Webhook expects. The API server will use the first version in the list which it supports. If none of the versions specified in this list are supported by API server, conversion will fail for the custom resource. If a persisted Webhook configuration specifies allowed versions and does not include any versions known to the API Server, calls to the webhook will fail.
"""
return pulumi.get(self, "conversion_review_versions")
@conversion_review_versions.setter
def conversion_review_versions(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "conversion_review_versions", value)
@property
@pulumi.getter(name="clientConfig")
def client_config(self) -> Optional[pulumi.Input['WebhookClientConfigArgs']]:
"""
clientConfig is the instructions for how to call the webhook if strategy is `Webhook`.
"""
return pulumi.get(self, "client_config")
@client_config.setter
def client_config(self, value: Optional[pulumi.Input['WebhookClientConfigArgs']]):
pulumi.set(self, "client_config", value)
| 56.327605 | 2,114 | 0.683236 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import meta as _meta
__all__ = [
'CustomResourceColumnDefinitionArgs',
'CustomResourceConversionArgs',
'CustomResourceDefinitionConditionArgs',
'CustomResourceDefinitionNamesArgs',
'CustomResourceDefinitionSpecArgs',
'CustomResourceDefinitionStatusArgs',
'CustomResourceDefinitionVersionArgs',
'CustomResourceDefinitionArgs',
'CustomResourceSubresourceScaleArgs',
'CustomResourceSubresourcesArgs',
'CustomResourceValidationArgs',
'ExternalDocumentationArgs',
'JSONSchemaPropsArgs',
'ServiceReferenceArgs',
'ValidationRuleArgs',
'WebhookClientConfigArgs',
'WebhookConversionArgs',
]
@pulumi.input_type
class CustomResourceColumnDefinitionArgs:
def __init__(__self__, *,
json_path: pulumi.Input[str],
name: pulumi.Input[str],
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
format: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None):
pulumi.set(__self__, "json_path", json_path)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
if description is not None:
pulumi.set(__self__, "description", description)
if format is not None:
pulumi.set(__self__, "format", format)
if priority is not None:
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter(name="jsonPath")
def json_path(self) -> pulumi.Input[str]:
return pulumi.get(self, "json_path")
@json_path.setter
def json_path(self, value: pulumi.Input[str]):
pulumi.set(self, "json_path", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def format(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "format")
@format.setter
def format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "format", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@pulumi.input_type
class CustomResourceConversionArgs:
def __init__(__self__, *,
strategy: pulumi.Input[str],
webhook: Optional[pulumi.Input['WebhookConversionArgs']] = None):
pulumi.set(__self__, "strategy", strategy)
if webhook is not None:
pulumi.set(__self__, "webhook", webhook)
@property
@pulumi.getter
def strategy(self) -> pulumi.Input[str]:
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter
def webhook(self) -> Optional[pulumi.Input['WebhookConversionArgs']]:
return pulumi.get(self, "webhook")
@webhook.setter
def webhook(self, value: Optional[pulumi.Input['WebhookConversionArgs']]):
pulumi.set(self, "webhook", value)
@pulumi.input_type
class CustomResourceDefinitionConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class CustomResourceDefinitionNamesArgs:
def __init__(__self__, *,
kind: pulumi.Input[str],
plural: pulumi.Input[str],
categories: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
list_kind: Optional[pulumi.Input[str]] = None,
short_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
singular: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "plural", plural)
if categories is not None:
pulumi.set(__self__, "categories", categories)
if list_kind is not None:
pulumi.set(__self__, "list_kind", list_kind)
if short_names is not None:
pulumi.set(__self__, "short_names", short_names)
if singular is not None:
pulumi.set(__self__, "singular", singular)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def plural(self) -> pulumi.Input[str]:
return pulumi.get(self, "plural")
@plural.setter
def plural(self, value: pulumi.Input[str]):
pulumi.set(self, "plural", value)
@property
@pulumi.getter
def categories(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "categories")
@categories.setter
def categories(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "categories", value)
@property
@pulumi.getter(name="listKind")
def list_kind(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "list_kind")
@list_kind.setter
def list_kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "list_kind", value)
@property
@pulumi.getter(name="shortNames")
def short_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "short_names")
@short_names.setter
def short_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "short_names", value)
@property
@pulumi.getter
def singular(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "singular")
@singular.setter
def singular(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "singular", value)
@pulumi.input_type
class CustomResourceDefinitionSpecArgs:
def __init__(__self__, *,
group: pulumi.Input[str],
names: pulumi.Input['CustomResourceDefinitionNamesArgs'],
scope: pulumi.Input[str],
versions: pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]],
conversion: Optional[pulumi.Input['CustomResourceConversionArgs']] = None,
preserve_unknown_fields: Optional[pulumi.Input[bool]] = None):
pulumi.set(__self__, "group", group)
pulumi.set(__self__, "names", names)
pulumi.set(__self__, "scope", scope)
pulumi.set(__self__, "versions", versions)
if conversion is not None:
pulumi.set(__self__, "conversion", conversion)
if preserve_unknown_fields is not None:
pulumi.set(__self__, "preserve_unknown_fields", preserve_unknown_fields)
@property
@pulumi.getter
def group(self) -> pulumi.Input[str]:
return pulumi.get(self, "group")
@group.setter
def group(self, value: pulumi.Input[str]):
pulumi.set(self, "group", value)
@property
@pulumi.getter
def names(self) -> pulumi.Input['CustomResourceDefinitionNamesArgs']:
return pulumi.get(self, "names")
@names.setter
def names(self, value: pulumi.Input['CustomResourceDefinitionNamesArgs']):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def scope(self) -> pulumi.Input[str]:
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: pulumi.Input[str]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter
def versions(self) -> pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]]:
return pulumi.get(self, "versions")
@versions.setter
def versions(self, value: pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionVersionArgs']]]):
pulumi.set(self, "versions", value)
@property
@pulumi.getter
def conversion(self) -> Optional[pulumi.Input['CustomResourceConversionArgs']]:
return pulumi.get(self, "conversion")
@conversion.setter
def conversion(self, value: Optional[pulumi.Input['CustomResourceConversionArgs']]):
pulumi.set(self, "conversion", value)
@property
@pulumi.getter(name="preserveUnknownFields")
def preserve_unknown_fields(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "preserve_unknown_fields")
@preserve_unknown_fields.setter
def preserve_unknown_fields(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "preserve_unknown_fields", value)
@pulumi.input_type
class CustomResourceDefinitionStatusArgs:
def __init__(__self__, *,
accepted_names: pulumi.Input['CustomResourceDefinitionNamesArgs'],
stored_versions: pulumi.Input[Sequence[pulumi.Input[str]]],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]] = None):
pulumi.set(__self__, "accepted_names", accepted_names)
pulumi.set(__self__, "stored_versions", stored_versions)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter(name="acceptedNames")
def accepted_names(self) -> pulumi.Input['CustomResourceDefinitionNamesArgs']:
return pulumi.get(self, "accepted_names")
@accepted_names.setter
def accepted_names(self, value: pulumi.Input['CustomResourceDefinitionNamesArgs']):
pulumi.set(self, "accepted_names", value)
@property
@pulumi.getter(name="storedVersions")
def stored_versions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "stored_versions")
@stored_versions.setter
def stored_versions(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "stored_versions", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceDefinitionConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class CustomResourceDefinitionVersionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
served: pulumi.Input[bool],
storage: pulumi.Input[bool],
additional_printer_columns: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]] = None,
deprecated: Optional[pulumi.Input[bool]] = None,
deprecation_warning: Optional[pulumi.Input[str]] = None,
schema: Optional[pulumi.Input['CustomResourceValidationArgs']] = None,
subresources: Optional[pulumi.Input['CustomResourceSubresourcesArgs']] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "served", served)
pulumi.set(__self__, "storage", storage)
if additional_printer_columns is not None:
pulumi.set(__self__, "additional_printer_columns", additional_printer_columns)
if deprecated is not None:
pulumi.set(__self__, "deprecated", deprecated)
if deprecation_warning is not None:
pulumi.set(__self__, "deprecation_warning", deprecation_warning)
if schema is not None:
pulumi.set(__self__, "schema", schema)
if subresources is not None:
pulumi.set(__self__, "subresources", subresources)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def served(self) -> pulumi.Input[bool]:
return pulumi.get(self, "served")
@served.setter
def served(self, value: pulumi.Input[bool]):
pulumi.set(self, "served", value)
@property
@pulumi.getter
def storage(self) -> pulumi.Input[bool]:
return pulumi.get(self, "storage")
@storage.setter
def storage(self, value: pulumi.Input[bool]):
pulumi.set(self, "storage", value)
@property
@pulumi.getter(name="additionalPrinterColumns")
def additional_printer_columns(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]]:
return pulumi.get(self, "additional_printer_columns")
@additional_printer_columns.setter
def additional_printer_columns(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CustomResourceColumnDefinitionArgs']]]]):
pulumi.set(self, "additional_printer_columns", value)
@property
@pulumi.getter
def deprecated(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "deprecated")
@deprecated.setter
def deprecated(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deprecated", value)
@property
@pulumi.getter(name="deprecationWarning")
def deprecation_warning(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "deprecation_warning")
@deprecation_warning.setter
def deprecation_warning(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deprecation_warning", value)
@property
@pulumi.getter
def schema(self) -> Optional[pulumi.Input['CustomResourceValidationArgs']]:
return pulumi.get(self, "schema")
@schema.setter
def schema(self, value: Optional[pulumi.Input['CustomResourceValidationArgs']]):
pulumi.set(self, "schema", value)
@property
@pulumi.getter
def subresources(self) -> Optional[pulumi.Input['CustomResourceSubresourcesArgs']]:
return pulumi.get(self, "subresources")
@subresources.setter
def subresources(self, value: Optional[pulumi.Input['CustomResourceSubresourcesArgs']]):
pulumi.set(self, "subresources", value)
@pulumi.input_type
class CustomResourceDefinitionArgs:
def __init__(__self__, *,
spec: pulumi.Input['CustomResourceDefinitionSpecArgs'],
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
status: Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']] = None):
pulumi.set(__self__, "spec", spec)
if api_version is not None:
pulumi.set(__self__, "api_version", 'apiextensions.k8s.io/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'CustomResourceDefinition')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def spec(self) -> pulumi.Input['CustomResourceDefinitionSpecArgs']:
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: pulumi.Input['CustomResourceDefinitionSpecArgs']):
pulumi.set(self, "spec", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['CustomResourceDefinitionStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class CustomResourceSubresourceScaleArgs:
def __init__(__self__, *,
spec_replicas_path: pulumi.Input[str],
status_replicas_path: pulumi.Input[str],
label_selector_path: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "spec_replicas_path", spec_replicas_path)
pulumi.set(__self__, "status_replicas_path", status_replicas_path)
if label_selector_path is not None:
pulumi.set(__self__, "label_selector_path", label_selector_path)
@property
@pulumi.getter(name="specReplicasPath")
def spec_replicas_path(self) -> pulumi.Input[str]:
return pulumi.get(self, "spec_replicas_path")
@spec_replicas_path.setter
def spec_replicas_path(self, value: pulumi.Input[str]):
pulumi.set(self, "spec_replicas_path", value)
@property
@pulumi.getter(name="statusReplicasPath")
def status_replicas_path(self) -> pulumi.Input[str]:
return pulumi.get(self, "status_replicas_path")
@status_replicas_path.setter
def status_replicas_path(self, value: pulumi.Input[str]):
pulumi.set(self, "status_replicas_path", value)
@property
@pulumi.getter(name="labelSelectorPath")
def label_selector_path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "label_selector_path")
@label_selector_path.setter
def label_selector_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label_selector_path", value)
@pulumi.input_type
class CustomResourceSubresourcesArgs:
def __init__(__self__, *,
scale: Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']] = None,
status: Optional[Any] = None):
if scale is not None:
pulumi.set(__self__, "scale", scale)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def scale(self) -> Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']]:
return pulumi.get(self, "scale")
@scale.setter
def scale(self, value: Optional[pulumi.Input['CustomResourceSubresourceScaleArgs']]):
pulumi.set(self, "scale", value)
@property
@pulumi.getter
def status(self) -> Optional[Any]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[Any]):
pulumi.set(self, "status", value)
@pulumi.input_type
class CustomResourceValidationArgs:
def __init__(__self__, *,
open_apiv3_schema: Optional[pulumi.Input['JSONSchemaPropsArgs']] = None):
if open_apiv3_schema is not None:
pulumi.set(__self__, "open_apiv3_schema", open_apiv3_schema)
@property
@pulumi.getter(name="openAPIV3Schema")
def open_apiv3_schema(self) -> Optional[pulumi.Input['JSONSchemaPropsArgs']]:
return pulumi.get(self, "open_apiv3_schema")
@open_apiv3_schema.setter
def open_apiv3_schema(self, value: Optional[pulumi.Input['JSONSchemaPropsArgs']]):
pulumi.set(self, "open_apiv3_schema", value)
@pulumi.input_type
class ExternalDocumentationArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
if description is not None:
pulumi.set(__self__, "description", description)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class JSONSchemaPropsArgs:
def __init__(__self__, *,
_ref: Optional[pulumi.Input[str]] = None,
_schema: Optional[pulumi.Input[str]] = None,
additional_items: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]] = None,
additional_properties: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]] = None,
all_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
any_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
default: Optional[Any] = None,
definitions: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
dependencies: Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enum: Optional[pulumi.Input[Sequence[Any]]] = None,
example: Optional[Any] = None,
exclusive_maximum: Optional[pulumi.Input[bool]] = None,
exclusive_minimum: Optional[pulumi.Input[bool]] = None,
external_docs: Optional[pulumi.Input['ExternalDocumentationArgs']] = None,
format: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
items: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]] = None,
max_items: Optional[pulumi.Input[int]] = None,
max_length: Optional[pulumi.Input[int]] = None,
max_properties: Optional[pulumi.Input[int]] = None,
maximum: Optional[pulumi.Input[float]] = None,
min_items: Optional[pulumi.Input[int]] = None,
min_length: Optional[pulumi.Input[int]] = None,
min_properties: Optional[pulumi.Input[int]] = None,
minimum: Optional[pulumi.Input[float]] = None,
multiple_of: Optional[pulumi.Input[float]] = None,
not_: Optional[pulumi.Input['JSONSchemaPropsArgs']] = None,
nullable: Optional[pulumi.Input[bool]] = None,
one_of: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]] = None,
pattern: Optional[pulumi.Input[str]] = None,
pattern_properties: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
properties: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]] = None,
required: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
title: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
unique_items: Optional[pulumi.Input[bool]] = None,
x_kubernetes_embedded_resource: Optional[pulumi.Input[bool]] = None,
x_kubernetes_int_or_string: Optional[pulumi.Input[bool]] = None,
x_kubernetes_list_map_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
x_kubernetes_list_type: Optional[pulumi.Input[str]] = None,
x_kubernetes_map_type: Optional[pulumi.Input[str]] = None,
x_kubernetes_preserve_unknown_fields: Optional[pulumi.Input[bool]] = None,
x_kubernetes_validations: Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]] = None):
if _ref is not None:
pulumi.set(__self__, "_ref", _ref)
if _schema is not None:
pulumi.set(__self__, "_schema", _schema)
if additional_items is not None:
pulumi.set(__self__, "additional_items", additional_items)
if additional_properties is not None:
pulumi.set(__self__, "additional_properties", additional_properties)
if all_of is not None:
pulumi.set(__self__, "all_of", all_of)
if any_of is not None:
pulumi.set(__self__, "any_of", any_of)
if default is not None:
pulumi.set(__self__, "default", default)
if definitions is not None:
pulumi.set(__self__, "definitions", definitions)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if description is not None:
pulumi.set(__self__, "description", description)
if enum is not None:
pulumi.set(__self__, "enum", enum)
if example is not None:
pulumi.set(__self__, "example", example)
if exclusive_maximum is not None:
pulumi.set(__self__, "exclusive_maximum", exclusive_maximum)
if exclusive_minimum is not None:
pulumi.set(__self__, "exclusive_minimum", exclusive_minimum)
if external_docs is not None:
pulumi.set(__self__, "external_docs", external_docs)
if format is not None:
pulumi.set(__self__, "format", format)
if id is not None:
pulumi.set(__self__, "id", id)
if items is not None:
pulumi.set(__self__, "items", items)
if max_items is not None:
pulumi.set(__self__, "max_items", max_items)
if max_length is not None:
pulumi.set(__self__, "max_length", max_length)
if max_properties is not None:
pulumi.set(__self__, "max_properties", max_properties)
if maximum is not None:
pulumi.set(__self__, "maximum", maximum)
if min_items is not None:
pulumi.set(__self__, "min_items", min_items)
if min_length is not None:
pulumi.set(__self__, "min_length", min_length)
if min_properties is not None:
pulumi.set(__self__, "min_properties", min_properties)
if minimum is not None:
pulumi.set(__self__, "minimum", minimum)
if multiple_of is not None:
pulumi.set(__self__, "multiple_of", multiple_of)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if nullable is not None:
pulumi.set(__self__, "nullable", nullable)
if one_of is not None:
pulumi.set(__self__, "one_of", one_of)
if pattern is not None:
pulumi.set(__self__, "pattern", pattern)
if pattern_properties is not None:
pulumi.set(__self__, "pattern_properties", pattern_properties)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if required is not None:
pulumi.set(__self__, "required", required)
if title is not None:
pulumi.set(__self__, "title", title)
if type is not None:
pulumi.set(__self__, "type", type)
if unique_items is not None:
pulumi.set(__self__, "unique_items", unique_items)
if x_kubernetes_embedded_resource is not None:
pulumi.set(__self__, "x_kubernetes_embedded_resource", x_kubernetes_embedded_resource)
if x_kubernetes_int_or_string is not None:
pulumi.set(__self__, "x_kubernetes_int_or_string", x_kubernetes_int_or_string)
if x_kubernetes_list_map_keys is not None:
pulumi.set(__self__, "x_kubernetes_list_map_keys", x_kubernetes_list_map_keys)
if x_kubernetes_list_type is not None:
pulumi.set(__self__, "x_kubernetes_list_type", x_kubernetes_list_type)
if x_kubernetes_map_type is not None:
pulumi.set(__self__, "x_kubernetes_map_type", x_kubernetes_map_type)
if x_kubernetes_preserve_unknown_fields is not None:
pulumi.set(__self__, "x_kubernetes_preserve_unknown_fields", x_kubernetes_preserve_unknown_fields)
if x_kubernetes_validations is not None:
pulumi.set(__self__, "x_kubernetes_validations", x_kubernetes_validations)
@property
@pulumi.getter(name="$ref")
def _ref(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "_ref")
@_ref.setter
def _ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "_ref", value)
@property
@pulumi.getter(name="$schema")
def _schema(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "_schema")
@_schema.setter
def _schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "_schema", value)
@property
@pulumi.getter(name="additionalItems")
def additional_items(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]:
return pulumi.get(self, "additional_items")
@additional_items.setter
def additional_items(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]):
pulumi.set(self, "additional_items", value)
@property
@pulumi.getter(name="additionalProperties")
def additional_properties(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]:
return pulumi.get(self, "additional_properties")
@additional_properties.setter
def additional_properties(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', bool]]]):
pulumi.set(self, "additional_properties", value)
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "all_of")
@all_of.setter
def all_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "all_of", value)
@property
@pulumi.getter(name="anyOf")
def any_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "any_of")
@any_of.setter
def any_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "any_of", value)
@property
@pulumi.getter
def default(self) -> Optional[Any]:
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[Any]):
pulumi.set(self, "default", value)
@property
@pulumi.getter
def definitions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "definitions")
@definitions.setter
def definitions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "definitions", value)
@property
@pulumi.getter
def dependencies(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]]:
return pulumi.get(self, "dependencies")
@dependencies.setter
def dependencies(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[pulumi.Input[str]]]]]]]):
pulumi.set(self, "dependencies", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enum(self) -> Optional[pulumi.Input[Sequence[Any]]]:
return pulumi.get(self, "enum")
@enum.setter
def enum(self, value: Optional[pulumi.Input[Sequence[Any]]]):
pulumi.set(self, "enum", value)
@property
@pulumi.getter
def example(self) -> Optional[Any]:
return pulumi.get(self, "example")
@example.setter
def example(self, value: Optional[Any]):
pulumi.set(self, "example", value)
@property
@pulumi.getter(name="exclusiveMaximum")
def exclusive_maximum(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "exclusive_maximum")
@exclusive_maximum.setter
def exclusive_maximum(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclusive_maximum", value)
@property
@pulumi.getter(name="exclusiveMinimum")
def exclusive_minimum(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "exclusive_minimum")
@exclusive_minimum.setter
def exclusive_minimum(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclusive_minimum", value)
@property
@pulumi.getter(name="externalDocs")
def external_docs(self) -> Optional[pulumi.Input['ExternalDocumentationArgs']]:
return pulumi.get(self, "external_docs")
@external_docs.setter
def external_docs(self, value: Optional[pulumi.Input['ExternalDocumentationArgs']]):
pulumi.set(self, "external_docs", value)
@property
@pulumi.getter
def format(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "format")
@format.setter
def format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "format", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def items(self) -> Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]]:
return pulumi.get(self, "items")
@items.setter
def items(self, value: Optional[pulumi.Input[Union['JSONSchemaPropsArgs', Sequence[Any]]]]):
pulumi.set(self, "items", value)
@property
@pulumi.getter(name="maxItems")
def max_items(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_items")
@max_items.setter
def max_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_items", value)
@property
@pulumi.getter(name="maxLength")
def max_length(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_length")
@max_length.setter
def max_length(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_length", value)
@property
@pulumi.getter(name="maxProperties")
def max_properties(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_properties")
@max_properties.setter
def max_properties(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_properties", value)
@property
@pulumi.getter
def maximum(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "maximum")
@maximum.setter
def maximum(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "maximum", value)
@property
@pulumi.getter(name="minItems")
def min_items(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_items")
@min_items.setter
def min_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_items", value)
@property
@pulumi.getter(name="minLength")
def min_length(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_length")
@min_length.setter
def min_length(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_length", value)
@property
@pulumi.getter(name="minProperties")
def min_properties(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_properties")
@min_properties.setter
def min_properties(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_properties", value)
@property
@pulumi.getter
def minimum(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "minimum")
@minimum.setter
def minimum(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "minimum", value)
@property
@pulumi.getter(name="multipleOf")
def multiple_of(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "multiple_of")
@multiple_of.setter
def multiple_of(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "multiple_of", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input['JSONSchemaPropsArgs']]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input['JSONSchemaPropsArgs']]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def nullable(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "nullable")
@nullable.setter
def nullable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "nullable", value)
@property
@pulumi.getter(name="oneOf")
def one_of(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "one_of")
@one_of.setter
def one_of(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "one_of", value)
@property
@pulumi.getter
def pattern(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "pattern")
@pattern.setter
def pattern(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pattern", value)
@property
@pulumi.getter(name="patternProperties")
def pattern_properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "pattern_properties")
@pattern_properties.setter
def pattern_properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "pattern_properties", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]:
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['JSONSchemaPropsArgs']]]]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter
def required(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "required")
@required.setter
def required(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "required", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="uniqueItems")
def unique_items(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "unique_items")
@unique_items.setter
def unique_items(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "unique_items", value)
@property
@pulumi.getter
def x_kubernetes_embedded_resource(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "x_kubernetes_embedded_resource")
@x_kubernetes_embedded_resource.setter
def x_kubernetes_embedded_resource(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_embedded_resource", value)
@property
@pulumi.getter
def x_kubernetes_int_or_string(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "x_kubernetes_int_or_string")
@x_kubernetes_int_or_string.setter
def x_kubernetes_int_or_string(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_int_or_string", value)
@property
@pulumi.getter
def x_kubernetes_list_map_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "x_kubernetes_list_map_keys")
@x_kubernetes_list_map_keys.setter
def x_kubernetes_list_map_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "x_kubernetes_list_map_keys", value)
@property
@pulumi.getter
def x_kubernetes_list_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "x_kubernetes_list_type")
@x_kubernetes_list_type.setter
def x_kubernetes_list_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "x_kubernetes_list_type", value)
@property
@pulumi.getter
def x_kubernetes_map_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "x_kubernetes_map_type")
@x_kubernetes_map_type.setter
def x_kubernetes_map_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "x_kubernetes_map_type", value)
@property
@pulumi.getter
def x_kubernetes_preserve_unknown_fields(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "x_kubernetes_preserve_unknown_fields")
@x_kubernetes_preserve_unknown_fields.setter
def x_kubernetes_preserve_unknown_fields(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "x_kubernetes_preserve_unknown_fields", value)
@property
@pulumi.getter
def x_kubernetes_validations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]]:
return pulumi.get(self, "x_kubernetes_validations")
@x_kubernetes_validations.setter
def x_kubernetes_validations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ValidationRuleArgs']]]]):
pulumi.set(self, "x_kubernetes_validations", value)
@pulumi.input_type
class ServiceReferenceArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
namespace: pulumi.Input[str],
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "namespace", namespace)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> pulumi.Input[str]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: pulumi.Input[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class ValidationRuleArgs:
def __init__(__self__, *,
rule: pulumi.Input[str],
message: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "rule", rule)
if message is not None:
pulumi.set(__self__, "message", message)
@property
@pulumi.getter
def rule(self) -> pulumi.Input[str]:
return pulumi.get(self, "rule")
@rule.setter
def rule(self, value: pulumi.Input[str]):
pulumi.set(self, "rule", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@pulumi.input_type
class WebhookClientConfigArgs:
def __init__(__self__, *,
ca_bundle: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input['ServiceReferenceArgs']] = None,
url: Optional[pulumi.Input[str]] = None):
if ca_bundle is not None:
pulumi.set(__self__, "ca_bundle", ca_bundle)
if service is not None:
pulumi.set(__self__, "service", service)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter(name="caBundle")
def ca_bundle(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ca_bundle")
@ca_bundle.setter
def ca_bundle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ca_bundle", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ServiceReferenceArgs']]:
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ServiceReferenceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class WebhookConversionArgs:
def __init__(__self__, *,
conversion_review_versions: pulumi.Input[Sequence[pulumi.Input[str]]],
client_config: Optional[pulumi.Input['WebhookClientConfigArgs']] = None):
pulumi.set(__self__, "conversion_review_versions", conversion_review_versions)
if client_config is not None:
pulumi.set(__self__, "client_config", client_config)
@property
@pulumi.getter(name="conversionReviewVersions")
def conversion_review_versions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "conversion_review_versions")
@conversion_review_versions.setter
def conversion_review_versions(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "conversion_review_versions", value)
@property
@pulumi.getter(name="clientConfig")
def client_config(self) -> Optional[pulumi.Input['WebhookClientConfigArgs']]:
return pulumi.get(self, "client_config")
@client_config.setter
def client_config(self, value: Optional[pulumi.Input['WebhookClientConfigArgs']]):
pulumi.set(self, "client_config", value)
| true | true |
f7fa799f62c1cca171c6dbc0987d4bfd13654500 | 790 | py | Python | setup.py | chrisemezue/joeynmt | 9472d57b2956466db7dcf1eb184cfa7a0864ab4c | [
"Apache-2.0"
] | null | null | null | setup.py | chrisemezue/joeynmt | 9472d57b2956466db7dcf1eb184cfa7a0864ab4c | [
"Apache-2.0"
] | null | null | null | setup.py | chrisemezue/joeynmt | 9472d57b2956466db7dcf1eb184cfa7a0864ab4c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup, find_packages
with open("requirements.txt", encoding="utf-8") as req_fp:
install_requires = req_fp.readlines()
setup(
name='joeynmt',
version='1.4',
description='Minimalist NMT for educational purposes',
author='Jasmijn Bastings and Julia Kreutzer',
url='https://github.com/joeynmt/joeynmt',
license='Apache License',
install_requires=install_requires,
packages=find_packages(exclude=[]),
python_requires='>=3.7',
project_urls={
'Documentation': 'http://joeynmt.readthedocs.io/en/latest/',
'Source': 'https://github.com/joeynmt/joeynmt',
'Tracker': 'https://github.com/joeynmt/joeynmt/issues',
},
entry_points={
'console_scripts': [
],
}
)
| 29.259259 | 68 | 0.660759 |
from setuptools import setup, find_packages
with open("requirements.txt", encoding="utf-8") as req_fp:
install_requires = req_fp.readlines()
setup(
name='joeynmt',
version='1.4',
description='Minimalist NMT for educational purposes',
author='Jasmijn Bastings and Julia Kreutzer',
url='https://github.com/joeynmt/joeynmt',
license='Apache License',
install_requires=install_requires,
packages=find_packages(exclude=[]),
python_requires='>=3.7',
project_urls={
'Documentation': 'http://joeynmt.readthedocs.io/en/latest/',
'Source': 'https://github.com/joeynmt/joeynmt',
'Tracker': 'https://github.com/joeynmt/joeynmt/issues',
},
entry_points={
'console_scripts': [
],
}
)
| true | true |
f7fa79a3091015a40aa9623cfce3fb7d9ba1b04a | 6,181 | py | Python | src/audio_library/views.py | Speccy-Rom/Soundclick | 9cf51faf0d4721836663c0d66875785107729779 | [
"MIT"
] | 1 | 2021-12-15T16:49:51.000Z | 2021-12-15T16:49:51.000Z | src/audio_library/views.py | Speccy-Rom/Soundclick | 9cf51faf0d4721836663c0d66875785107729779 | [
"MIT"
] | null | null | null | src/audio_library/views.py | Speccy-Rom/Soundclick | 9cf51faf0d4721836663c0d66875785107729779 | [
"MIT"
] | null | null | null | import os
from django.http import FileResponse, Http404, HttpResponse
from django.shortcuts import get_object_or_404
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import generics, parsers, views, viewsets
from ..base.classes import MixedSerializer, Pagination
from ..base.permissions import IsAuthor
from ..base.services import delete_old_file
from . import models, serializer
class GenreView(generics.ListAPIView):
"""Список жанров"""
queryset = models.Genre.objects.all()
serializer_class = serializer.GenreSerializer
class LicenseView(viewsets.ModelViewSet):
"""CRUD лицензий автора"""
serializer_class = serializer.LicenseSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.License.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class AlbumView(viewsets.ModelViewSet):
"""CRUD альбомов автора"""
parser_classes = (parsers.MultiPartParser,)
serializer_class = serializer.AlbumSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.Album.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
instance.delete()
class PublicAlbumView(generics.ListAPIView):
"""Список публичных альбомов автора"""
serializer_class = serializer.AlbumSerializer
def get_queryset(self):
return models.Album.objects.filter(
user__id=self.kwargs.get("pk"), private=False
)
class TrackView(MixedSerializer, viewsets.ModelViewSet):
"""CRUD треков"""
parser_classes = (parsers.MultiPartParser,)
permission_classes = [IsAuthor]
serializer_class = serializer.CreateAuthorTrackSerializer
serializer_classes_by_action = {"list": serializer.AuthorTrackSerializer}
def get_queryset(self):
return models.Track.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
delete_old_file(instance.file.path)
instance.delete()
class PlayListView(MixedSerializer, viewsets.ModelViewSet):
"""CRUD плейлистов пользователя"""
parser_classes = (parsers.MultiPartParser,)
permission_classes = [IsAuthor]
serializer_class = serializer.CreatePlayListSerializer
serializer_classes_by_action = {"list": serializer.PlayListSerializer}
def get_queryset(self):
return models.PlayList.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
instance.delete()
class TrackListView(generics.ListAPIView):
"""Список всех треков"""
queryset = models.Track.objects.filter(album__private=False, private=False)
serializer_class = serializer.AuthorTrackSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend]
filterset_fields = ["title", "user__display_name", "album__name", "genre__name"]
class AuthorTrackListView(generics.ListAPIView):
"""Список всех треков автора"""
serializer_class = serializer.AuthorTrackSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend]
filterset_fields = ["title", "album__name", "genre__name"]
def get_queryset(self):
return models.Track.objects.filter(
user__id=self.kwargs.get("pk"), album__private=False, private=False
)
class CommentAuthorView(viewsets.ModelViewSet):
"""CRUD комментариев автора"""
serializer_class = serializer.CommentAuthorSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.Comment.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class CommentView(viewsets.ModelViewSet):
"""Комментарии к треку"""
serializer_class = serializer.CommentSerializer
def get_queryset(self):
return models.Comment.objects.filter(track_id=self.kwargs.get("pk"))
class StreamingFileView(views.APIView):
"""Воспроизведение трека"""
def set_play(self):
self.track.plays_count += 1
self.track.save()
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, private=False)
if os.path.exists(self.track.file.path):
self.set_play()
response = HttpResponse("", content_type="audio/mpeg", status=206)
response["X-Accel-Redirect"] = f"/mp3/{self.track.file.name}"
return response
else:
return Http404
class DownloadTrackView(views.APIView):
"""Скачивание трека"""
def set_download(self):
self.track.download += 1
self.track.save()
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, private=False)
if os.path.exists(self.track.file.path):
self.set_download()
response = HttpResponse("", content_type="audio/mpeg", status=206)
response[
"Content-Disposition"
] = f"attachment; filename={self.track.file.name}"
response["X-Accel-Redirect"] = f"/media/{self.track.file.name}"
return response
else:
return Http404
class StreamingFileAuthorView(views.APIView):
"""Воспроизведение трека автора"""
permission_classes = [IsAuthor]
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, user=request.user)
if os.path.exists(self.track.file.path):
response = HttpResponse("", content_type="audio/mpeg", status=206)
response["X-Accel-Redirect"] = f"/mp3/{self.track.file.name}"
return response
else:
return Http404
| 30.905 | 84 | 0.700857 | import os
from django.http import FileResponse, Http404, HttpResponse
from django.shortcuts import get_object_or_404
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import generics, parsers, views, viewsets
from ..base.classes import MixedSerializer, Pagination
from ..base.permissions import IsAuthor
from ..base.services import delete_old_file
from . import models, serializer
class GenreView(generics.ListAPIView):
queryset = models.Genre.objects.all()
serializer_class = serializer.GenreSerializer
class LicenseView(viewsets.ModelViewSet):
serializer_class = serializer.LicenseSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.License.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class AlbumView(viewsets.ModelViewSet):
parser_classes = (parsers.MultiPartParser,)
serializer_class = serializer.AlbumSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.Album.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
instance.delete()
class PublicAlbumView(generics.ListAPIView):
serializer_class = serializer.AlbumSerializer
def get_queryset(self):
return models.Album.objects.filter(
user__id=self.kwargs.get("pk"), private=False
)
class TrackView(MixedSerializer, viewsets.ModelViewSet):
parser_classes = (parsers.MultiPartParser,)
permission_classes = [IsAuthor]
serializer_class = serializer.CreateAuthorTrackSerializer
serializer_classes_by_action = {"list": serializer.AuthorTrackSerializer}
def get_queryset(self):
return models.Track.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
delete_old_file(instance.file.path)
instance.delete()
class PlayListView(MixedSerializer, viewsets.ModelViewSet):
parser_classes = (parsers.MultiPartParser,)
permission_classes = [IsAuthor]
serializer_class = serializer.CreatePlayListSerializer
serializer_classes_by_action = {"list": serializer.PlayListSerializer}
def get_queryset(self):
return models.PlayList.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def perform_destroy(self, instance):
delete_old_file(instance.cover.path)
instance.delete()
class TrackListView(generics.ListAPIView):
queryset = models.Track.objects.filter(album__private=False, private=False)
serializer_class = serializer.AuthorTrackSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend]
filterset_fields = ["title", "user__display_name", "album__name", "genre__name"]
class AuthorTrackListView(generics.ListAPIView):
serializer_class = serializer.AuthorTrackSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend]
filterset_fields = ["title", "album__name", "genre__name"]
def get_queryset(self):
return models.Track.objects.filter(
user__id=self.kwargs.get("pk"), album__private=False, private=False
)
class CommentAuthorView(viewsets.ModelViewSet):
serializer_class = serializer.CommentAuthorSerializer
permission_classes = [IsAuthor]
def get_queryset(self):
return models.Comment.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class CommentView(viewsets.ModelViewSet):
serializer_class = serializer.CommentSerializer
def get_queryset(self):
return models.Comment.objects.filter(track_id=self.kwargs.get("pk"))
class StreamingFileView(views.APIView):
def set_play(self):
self.track.plays_count += 1
self.track.save()
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, private=False)
if os.path.exists(self.track.file.path):
self.set_play()
response = HttpResponse("", content_type="audio/mpeg", status=206)
response["X-Accel-Redirect"] = f"/mp3/{self.track.file.name}"
return response
else:
return Http404
class DownloadTrackView(views.APIView):
def set_download(self):
self.track.download += 1
self.track.save()
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, private=False)
if os.path.exists(self.track.file.path):
self.set_download()
response = HttpResponse("", content_type="audio/mpeg", status=206)
response[
"Content-Disposition"
] = f"attachment; filename={self.track.file.name}"
response["X-Accel-Redirect"] = f"/media/{self.track.file.name}"
return response
else:
return Http404
class StreamingFileAuthorView(views.APIView):
permission_classes = [IsAuthor]
def get(self, request, pk):
self.track = get_object_or_404(models.Track, id=pk, user=request.user)
if os.path.exists(self.track.file.path):
response = HttpResponse("", content_type="audio/mpeg", status=206)
response["X-Accel-Redirect"] = f"/mp3/{self.track.file.name}"
return response
else:
return Http404
| true | true |
f7fa79b877b13f10e1a1cb1711228985ba3c34cd | 11,298 | py | Python | modules/weather.py | AlexanderBaransky/PilotTelegramBot-telebot | 009a99334e56c7129230106dc717358bcd894a85 | [
"Apache-2.0"
] | null | null | null | modules/weather.py | AlexanderBaransky/PilotTelegramBot-telebot | 009a99334e56c7129230106dc717358bcd894a85 | [
"Apache-2.0"
] | null | null | null | modules/weather.py | AlexanderBaransky/PilotTelegramBot-telebot | 009a99334e56c7129230106dc717358bcd894a85 | [
"Apache-2.0"
] | null | null | null | import telebot
import config
from translation import tw
from telebot import types
import requests
import json
import time
from geopy.geocoders import Nominatim
bot = telebot.TeleBot(config.token)
geolocator = Nominatim(user_agent="pilot_telegram_bot")
forecasts = {}
weathers = {}
def weather(message):
trans = tw.get_translation(message)
if trans == 1:
return
try:
words = message.text.split()
city_name = words[1]
loc = geolocator.geocode(city_name)
if loc is None:
bot.reply_to(message, trans['weather']['city_not_found_err'])
else:
weather_message = bot.send_message(chat_id=message.chat.id,
text=trans['weather']['making_forecast'],
reply_to_message_id=message.message_id)
global weathers
weathers[weather_message.message_id] = message.from_user.id
response = requests.get('https://api.openweathermap.org/data/2.5/onecall?lat=' + str(loc.latitude) +
'&lon=' + str(loc.longitude) + '&appid=c1c0032b6ff3be83e44ab641e780fc3d&lang=RU' +
'&units=metric')
data = json.loads(response.content)
destination = loc.address.split(',')
dest = ''
for i in destination:
if i == destination[0]:
dest += i
else:
dest += ',' + i
text = trans['weather']['weather_in'].format(city=dest) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += trans['weather']['weather']['current_weather'] + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += '<b>' + str(data['current']['temp']) + ' °C <i>' + data['current']['weather'][0][
'description'].capitalize() + '</i></b>\n'
text += trans['weather']['weather']['feels_like'].format(
feels_like=str(data['current']['feels_like'])) + '\n'
text += trans['weather']['humidity'].format(humidity=str(data['current']['humidity'])) + '\n'
text += trans['weather']['pressure'].format(pressure=str(data['current']['pressure'])) + '\n'
text += trans['weather']['wind_speed'].format(wind_speed=str(data['current']['wind_speed'])) + '\n'
text += trans['weather']['cloudiness'].format(cloudiness=str(data['current']['clouds'])) + '\n'
text += trans['weather']['uvi'].format(uvi=str(data['current']['uvi']))
keyboard = types.InlineKeyboardMarkup()
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'], callback_data='weather_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=message.chat.id,
message_id=weather_message.message_id,
text=text,
parse_mode='HTML',
reply_markup=keyboard)
except Exception:
bot.reply_to(message, trans['global']['errors']['default'])
def forecast(message):
trans = tw.get_translation(message)
if trans == 1:
return
try:
words = message.text.split()
city_name = words[1]
loc = geolocator.geocode(city_name)
if loc is None:
bot.reply_to(message, trans['weather']['city_not_found_err'])
else:
forecast_message = bot.send_message(chat_id=message.chat.id,
text=trans['weather']['making_forecast'],
reply_to_message_id=message.message_id)
global forecasts
forecasts[forecast_message.message_id] = []
response = requests.get('https://api.openweathermap.org/data/2.5/onecall?lat=' + str(loc.latitude) +
'&lon=' + str(loc.longitude) + '&appid=c1c0032b6ff3be83e44ab641e780fc3d&lang=RU' +
'&units=metric')
data = json.loads(response.content)
destination = loc.address.split(',')
for i in range(8):
dest = ''
for j in destination:
if j == destination[0]:
dest += j
else:
dest += ',' + j
text = trans['weather']['weather_in'].format(city=dest) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += trans['weather']['forecast']['forecast_for'].format(time=time.strftime("%d/%m", time.gmtime(data['daily'][i]['dt']))) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += '<b>' + str(data['daily'][i]['temp']['day']) + ' °C <i>' + data['daily'][i]['weather'][0][
'description'].capitalize() + '</i></b>\n'
text += trans['weather']['forecast']['min_temp'].format(
min_temp=str(data['daily'][i]['temp']['min']))+'\n'
text += trans['weather']['forecast']['max_temp'].format(
max_temp=str(data['daily'][i]['temp']['max']))+'\n'
text += trans['weather']['forecast']['morn_temp'].format(
morn_temp=str(data['daily'][i]['temp']['morn']))+'\n'
text += trans['weather']['forecast']['eve_temp'].format(
eve_temp=str(data['daily'][i]['temp']['eve']))+'\n'
text += trans['weather']['forecast']['night_temp'].format(
night_temp=str(data['daily'][i]['temp']['night']))+'\n'
text += trans['weather']['humidity'].format(humidity=str(data['current']['humidity'])) + '\n'
text += trans['weather']['pressure'].format(pressure=str(data['current']['pressure'])) + '\n'
text += trans['weather']['wind_speed'].format(wind_speed=str(data['current']['wind_speed'])) + '\n'
text += trans['weather']['cloudiness'].format(cloudiness=str(data['current']['clouds'])) + '\n'
text += trans['weather']['uvi'].format(uvi=str(data['current']['uvi']))
forecasts[forecast_message.message_id].append(text)
forecasts[forecast_message.message_id].append(0)
forecasts[forecast_message.message_id].append(message.from_user.id)
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=message.chat.id,
message_id=forecast_message.message_id,
text=forecasts[forecast_message.message_id][0],
parse_mode='HTML',
reply_markup=keyboard)
except Exception:
bot.reply_to(message, trans['global']['errors']['default'])
def call_handler(call):
trans = tw.get_translation(call)
if trans == 1:
return
if call.data == 'forecast_prev':
if call.from_user.id == forecasts[call.message.message_id][9]:
if not forecasts[call.message.message_id][8] <= 0:
forecasts[call.message.message_id][8] -= 1
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=call.message.chat.id,
message_id=call.message.message_id,
text=forecasts[call.message.message_id][forecasts[call.message.message_id][8]],
parse_mode='HTML',
reply_markup=keyboard)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['start_of_list'])
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'forecast_next':
if call.from_user.id == forecasts[call.message.message_id][9]:
if not forecasts[call.message.message_id][8] >= 7:
forecasts[call.message.message_id][8] += 1
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=call.message.chat.id,
message_id=call.message.message_id,
text=forecasts[call.message.message_id][forecasts[call.message.message_id][8]],
parse_mode='HTML',
reply_markup=keyboard)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['end_of_list'])
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'forecast_close':
if call.from_user.id == forecasts[call.message.message_id][9]:
forecasts.pop(call.message.message_id)
bot.delete_message(chat_id=call.message.chat.id, message_id=call.message.message_id)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'weather_close':
if call.from_user.id == weathers[call.message.message_id]:
weathers.pop(call.message.message_id)
bot.delete_message(chat_id=call.message.chat.id, message_id=call.message.message_id)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
| 50.891892 | 148 | 0.531333 | import telebot
import config
from translation import tw
from telebot import types
import requests
import json
import time
from geopy.geocoders import Nominatim
bot = telebot.TeleBot(config.token)
geolocator = Nominatim(user_agent="pilot_telegram_bot")
forecasts = {}
weathers = {}
def weather(message):
trans = tw.get_translation(message)
if trans == 1:
return
try:
words = message.text.split()
city_name = words[1]
loc = geolocator.geocode(city_name)
if loc is None:
bot.reply_to(message, trans['weather']['city_not_found_err'])
else:
weather_message = bot.send_message(chat_id=message.chat.id,
text=trans['weather']['making_forecast'],
reply_to_message_id=message.message_id)
global weathers
weathers[weather_message.message_id] = message.from_user.id
response = requests.get('https://api.openweathermap.org/data/2.5/onecall?lat=' + str(loc.latitude) +
'&lon=' + str(loc.longitude) + '&appid=c1c0032b6ff3be83e44ab641e780fc3d&lang=RU' +
'&units=metric')
data = json.loads(response.content)
destination = loc.address.split(',')
dest = ''
for i in destination:
if i == destination[0]:
dest += i
else:
dest += ',' + i
text = trans['weather']['weather_in'].format(city=dest) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += trans['weather']['weather']['current_weather'] + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += '<b>' + str(data['current']['temp']) + ' °C <i>' + data['current']['weather'][0][
'description'].capitalize() + '</i></b>\n'
text += trans['weather']['weather']['feels_like'].format(
feels_like=str(data['current']['feels_like'])) + '\n'
text += trans['weather']['humidity'].format(humidity=str(data['current']['humidity'])) + '\n'
text += trans['weather']['pressure'].format(pressure=str(data['current']['pressure'])) + '\n'
text += trans['weather']['wind_speed'].format(wind_speed=str(data['current']['wind_speed'])) + '\n'
text += trans['weather']['cloudiness'].format(cloudiness=str(data['current']['clouds'])) + '\n'
text += trans['weather']['uvi'].format(uvi=str(data['current']['uvi']))
keyboard = types.InlineKeyboardMarkup()
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'], callback_data='weather_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=message.chat.id,
message_id=weather_message.message_id,
text=text,
parse_mode='HTML',
reply_markup=keyboard)
except Exception:
bot.reply_to(message, trans['global']['errors']['default'])
def forecast(message):
trans = tw.get_translation(message)
if trans == 1:
return
try:
words = message.text.split()
city_name = words[1]
loc = geolocator.geocode(city_name)
if loc is None:
bot.reply_to(message, trans['weather']['city_not_found_err'])
else:
forecast_message = bot.send_message(chat_id=message.chat.id,
text=trans['weather']['making_forecast'],
reply_to_message_id=message.message_id)
global forecasts
forecasts[forecast_message.message_id] = []
response = requests.get('https://api.openweathermap.org/data/2.5/onecall?lat=' + str(loc.latitude) +
'&lon=' + str(loc.longitude) + '&appid=c1c0032b6ff3be83e44ab641e780fc3d&lang=RU' +
'&units=metric')
data = json.loads(response.content)
destination = loc.address.split(',')
for i in range(8):
dest = ''
for j in destination:
if j == destination[0]:
dest += j
else:
dest += ',' + j
text = trans['weather']['weather_in'].format(city=dest) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += trans['weather']['forecast']['forecast_for'].format(time=time.strftime("%d/%m", time.gmtime(data['daily'][i]['dt']))) + '\n'
text += '━━━━━━━━━━━━━━━━━━━━\n'
text += '<b>' + str(data['daily'][i]['temp']['day']) + ' °C <i>' + data['daily'][i]['weather'][0][
'description'].capitalize() + '</i></b>\n'
text += trans['weather']['forecast']['min_temp'].format(
min_temp=str(data['daily'][i]['temp']['min']))+'\n'
text += trans['weather']['forecast']['max_temp'].format(
max_temp=str(data['daily'][i]['temp']['max']))+'\n'
text += trans['weather']['forecast']['morn_temp'].format(
morn_temp=str(data['daily'][i]['temp']['morn']))+'\n'
text += trans['weather']['forecast']['eve_temp'].format(
eve_temp=str(data['daily'][i]['temp']['eve']))+'\n'
text += trans['weather']['forecast']['night_temp'].format(
night_temp=str(data['daily'][i]['temp']['night']))+'\n'
text += trans['weather']['humidity'].format(humidity=str(data['current']['humidity'])) + '\n'
text += trans['weather']['pressure'].format(pressure=str(data['current']['pressure'])) + '\n'
text += trans['weather']['wind_speed'].format(wind_speed=str(data['current']['wind_speed'])) + '\n'
text += trans['weather']['cloudiness'].format(cloudiness=str(data['current']['clouds'])) + '\n'
text += trans['weather']['uvi'].format(uvi=str(data['current']['uvi']))
forecasts[forecast_message.message_id].append(text)
forecasts[forecast_message.message_id].append(0)
forecasts[forecast_message.message_id].append(message.from_user.id)
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=message.chat.id,
message_id=forecast_message.message_id,
text=forecasts[forecast_message.message_id][0],
parse_mode='HTML',
reply_markup=keyboard)
except Exception:
bot.reply_to(message, trans['global']['errors']['default'])
def call_handler(call):
trans = tw.get_translation(call)
if trans == 1:
return
if call.data == 'forecast_prev':
if call.from_user.id == forecasts[call.message.message_id][9]:
if not forecasts[call.message.message_id][8] <= 0:
forecasts[call.message.message_id][8] -= 1
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=call.message.chat.id,
message_id=call.message.message_id,
text=forecasts[call.message.message_id][forecasts[call.message.message_id][8]],
parse_mode='HTML',
reply_markup=keyboard)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['start_of_list'])
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'forecast_next':
if call.from_user.id == forecasts[call.message.message_id][9]:
if not forecasts[call.message.message_id][8] >= 7:
forecasts[call.message.message_id][8] += 1
keyboard = types.InlineKeyboardMarkup(row_width=2)
key_prev = types.InlineKeyboardButton(text='<<', callback_data='forecast_prev')
key_next = types.InlineKeyboardButton(text='>>', callback_data='forecast_next')
keyboard.add(key_prev, key_next)
key_close = types.InlineKeyboardButton(text=trans['weather']['close_button'],
callback_data='forecast_close')
keyboard.add(key_close)
bot.edit_message_text(chat_id=call.message.chat.id,
message_id=call.message.message_id,
text=forecasts[call.message.message_id][forecasts[call.message.message_id][8]],
parse_mode='HTML',
reply_markup=keyboard)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['end_of_list'])
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'forecast_close':
if call.from_user.id == forecasts[call.message.message_id][9]:
forecasts.pop(call.message.message_id)
bot.delete_message(chat_id=call.message.chat.id, message_id=call.message.message_id)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
elif call.data == 'weather_close':
if call.from_user.id == weathers[call.message.message_id]:
weathers.pop(call.message.message_id)
bot.delete_message(chat_id=call.message.chat.id, message_id=call.message.message_id)
else:
bot.answer_callback_query(callback_query_id=call.id,
text=trans['weather']['forecast']['other_user_err'])
| true | true |
f7fa79c2247902720dbec44baa642388affb4dfa | 783 | py | Python | handlers/users/echo.py | RSk141/exchange_bot | e84ca22c9421e984acd0f88df544ad4c7b025edc | [
"MIT"
] | 2 | 2021-08-21T10:42:10.000Z | 2021-08-23T09:55:55.000Z | handlers/users/echo.py | RSk141/exchange_bot | e84ca22c9421e984acd0f88df544ad4c7b025edc | [
"MIT"
] | 1 | 2021-09-18T11:30:27.000Z | 2021-09-18T18:05:44.000Z | handlers/users/echo.py | RSk141/exchange_bot | e84ca22c9421e984acd0f88df544ad4c7b025edc | [
"MIT"
] | 1 | 2022-03-28T19:08:54.000Z | 2022-03-28T19:08:54.000Z | from aiogram import types
from aiogram.dispatcher import FSMContext
from loader import dp
# Эхо хендлер, куда летят текстовые сообщения без указанного состояния
@dp.message_handler(state=None)
async def bot_echo(message: types.Message):
await message.answer(f"Эхо без состояния."
f"Сообщение:\n"
f"{message.text}")
# Эхо хендлер, куда летят ВСЕ сообщения с указанным состоянием
@dp.message_handler(state="*", content_types=types.ContentTypes.ANY)
async def bot_echo_all(message: types.Message, state: FSMContext):
state = await state.get_state()
await message.answer(f"Эхо в состоянии <code>{state}</code>.\n"
f"\nСодержание сообщения:\n"
f"<code>{message}</code>")
| 35.590909 | 70 | 0.666667 | from aiogram import types
from aiogram.dispatcher import FSMContext
from loader import dp
@dp.message_handler(state=None)
async def bot_echo(message: types.Message):
await message.answer(f"Эхо без состояния."
f"Сообщение:\n"
f"{message.text}")
@dp.message_handler(state="*", content_types=types.ContentTypes.ANY)
async def bot_echo_all(message: types.Message, state: FSMContext):
state = await state.get_state()
await message.answer(f"Эхо в состоянии <code>{state}</code>.\n"
f"\nСодержание сообщения:\n"
f"<code>{message}</code>")
| true | true |
f7fa7a0e5238c7cc2555a16bf8a46974bafc591a | 1,958 | py | Python | ocbind/components/component/power_supply/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2019-08-01T17:42:57.000Z | 2019-08-01T17:42:57.000Z | ocbind/components/component/power_supply/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2021-05-26T16:38:04.000Z | 2021-05-26T16:38:04.000Z | ocbind/components/component/power_supply/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 4 | 2019-07-24T16:52:39.000Z | 2021-12-03T02:08:13.000Z | # -*- coding: utf-8 -*-
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class power_supply(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /components/component/power-supply. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Data for power supply components
"""
_pyangbind_elements = {}
class power_supply(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /components/component/power-supply. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Data for power supply components
"""
_pyangbind_elements = {}
class power_supply(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /components/component/power-supply. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Data for power supply components
"""
_pyangbind_elements = {}
| 31.079365 | 117 | 0.788049 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class power_supply(PybindBase):
_pyangbind_elements = {}
class power_supply(PybindBase):
_pyangbind_elements = {}
class power_supply(PybindBase):
_pyangbind_elements = {}
| true | true |
f7fa7a5e8e3d92a05ebc6088b99cf4fa2718432f | 27,146 | py | Python | tools/odrive/utils.py | OstapFerensovych/ODrive | 918240fe3323d7040d5b1f71f899afb78da1fe6a | [
"MIT"
] | null | null | null | tools/odrive/utils.py | OstapFerensovych/ODrive | 918240fe3323d7040d5b1f71f899afb78da1fe6a | [
"MIT"
] | null | null | null | tools/odrive/utils.py | OstapFerensovych/ODrive | 918240fe3323d7040d5b1f71f899afb78da1fe6a | [
"MIT"
] | null | null | null | from __future__ import print_function
import sys
import time
import threading
import platform
import subprocess
import os
import numpy as np
from fibre.utils import Event
import odrive.enums
from odrive.enums import *
try:
if platform.system() == 'Windows':
import win32console
import colorama
colorama.init()
except ImportError:
print("Could not init terminal features.")
print("Refer to install instructions at http://docs.odriverobotics.com/#downloading-and-installing-tools")
sys.stdout.flush()
pass
if sys.version_info < (3, 0):
input = raw_input
_VT100Colors = {
'green': '\x1b[92;1m',
'cyan': '\x1b[96;1m',
'yellow': '\x1b[93;1m',
'red': '\x1b[91;1m',
'default': '\x1b[0m'
}
def calculate_thermistor_coeffs(degree, Rload, R_25, Beta, Tmin, Tmax, plot = False):
T_25 = 25 + 273.15 #Kelvin
temps = np.linspace(Tmin, Tmax, 1000)
tempsK = temps + 273.15
# https://en.wikipedia.org/wiki/Thermistor#B_or_%CE%B2_parameter_equation
r_inf = R_25 * np.exp(-Beta/T_25)
R_temps = r_inf * np.exp(Beta/tempsK)
V = Rload / (Rload + R_temps)
fit = np.polyfit(V, temps, degree)
p1 = np.poly1d(fit)
fit_temps = p1(V)
if plot:
import matplotlib.pyplot as plt
print(fit)
plt.plot(V, temps, label='actual')
plt.plot(V, fit_temps, label='fit')
plt.xlabel('normalized voltage')
plt.ylabel('Temp [C]')
plt.legend(loc=0)
plt.show()
return p1
class OperationAbortedException(Exception):
pass
def set_motor_thermistor_coeffs(axis, Rload, R_25, Beta, Tmin, TMax):
coeffs = calculate_thermistor_coeffs(3, Rload, R_25, Beta, Tmin, TMax)
axis.motor.motor_thermistor.config.poly_coefficient_0 = float(coeffs[3])
axis.motor.motor_thermistor.config.poly_coefficient_1 = float(coeffs[2])
axis.motor.motor_thermistor.config.poly_coefficient_2 = float(coeffs[1])
axis.motor.motor_thermistor.config.poly_coefficient_3 = float(coeffs[0])
def dump_errors(odrv, clear=False, printfunc = print):
axes = [(name, axis) for name, axis in odrv._remote_attributes.items() if 'axis' in name]
axes.sort()
def dump_errors_for_module(indent, name, obj, path, errorcodes):
prefix = indent + name.strip('0123456789') + ": "
for elem in path.split('.'):
if not hasattr(obj, elem):
printfunc(prefix + _VT100Colors['yellow'] + "not found" + _VT100Colors['default'])
return
parent = obj
obj = getattr(obj, elem)
if obj != 0:
printfunc(indent + name + ": " + _VT100Colors['red'] + "Error(s):" + _VT100Colors['default'])
for bit in range(64):
if obj & (1 << bit) != 0:
printfunc(indent + " " + errorcodes.get((1 << bit), 'UNKNOWN ERROR: 0x{:08X}'.format(1 << bit)))
if clear:
setattr(parent, elem, 0)
else:
printfunc(indent + name + ": " + _VT100Colors['green'] + "no error" + _VT100Colors['default'])
system_error_codes = {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("ODRIVE_ERROR_")}
dump_errors_for_module("", "system", odrv, 'error', system_error_codes)
for name, axis in axes:
printfunc(name)
# Flatten axis and submodules
# (name, obj, path, errorcode)
module_decode_map = [
('axis', axis, 'error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("AXIS_ERROR_")}),
('motor', axis, 'motor.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("MOTOR_ERROR_")}),
('sensorless_estimator', axis, 'sensorless_estimator.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("SENSORLESS_ESTIMATOR_ERROR")}),
('encoder', axis, 'encoder.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("ENCODER_ERROR_")}),
('controller', axis, 'controller.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("CONTROLLER_ERROR_")}),
]
for name, obj, path, errorcodes in module_decode_map:
dump_errors_for_module(" ", name, obj, path, errorcodes)
def oscilloscope_dump(odrv, num_vals, filename='oscilloscope.csv'):
with open(filename, 'w') as f:
for x in range(num_vals):
f.write(str(odrv.oscilloscope.get_val(x)))
f.write('\n')
data_rate = 200
plot_rate = 10
num_samples = 500
def start_liveplotter(get_var_callback):
"""
Starts a liveplotter.
The variable that is plotted is retrieved from get_var_callback.
This function returns immediately and the liveplotter quits when
the user closes it.
"""
import matplotlib.pyplot as plt
cancellation_token = Event()
global vals
vals = []
def fetch_data():
global vals
while not cancellation_token.is_set():
try:
data = get_var_callback()
except Exception as ex:
print(str(ex))
time.sleep(1)
continue
vals.append(data)
if len(vals) > num_samples:
vals = vals[-num_samples:]
time.sleep(1/data_rate)
# TODO: use animation for better UI performance, see:
# https://matplotlib.org/examples/animation/simple_anim.html
def plot_data():
global vals
plt.ion()
# Make sure the script terminates when the user closes the plotter
def closed(evt):
cancellation_token.set()
fig = plt.figure()
fig.canvas.mpl_connect('close_event', closed)
while not cancellation_token.is_set():
plt.clf()
plt.plot(vals)
plt.legend(list(range(len(vals))))
fig.canvas.draw()
fig.canvas.start_event_loop(1/plot_rate)
fetch_t = threading.Thread(target=fetch_data)
fetch_t.daemon = True
fetch_t.start()
plot_t = threading.Thread(target=plot_data)
plot_t.daemon = True
plot_t.start()
return cancellation_token;
#plot_data()
class BulkCapture:
'''
Asynchronously captures a bulk set of data when instance is created.
get_var_callback: a function that returns the data you want to collect (see the example below)
data_rate: Rate in hz
length: Length of time to capture in seconds
Example Usage:
capture = BulkCapture(lambda :[odrv0.axis0.encoder.pos_estimate, odrv0.axis0.controller.pos_setpoint])
# Do stuff while capturing (like sending position commands)
capture.event.wait() # When you're done doing stuff, wait for the capture to be completed.
print(capture.data) # Do stuff with the data
capture.plot_data() # Helper method to plot the data
'''
def __init__(self,
get_var_callback,
data_rate=500.0,
duration=2.0):
from threading import Event, Thread
import numpy as np
self.get_var_callback = get_var_callback
self.event = Event()
def loop():
vals = []
start_time = time.monotonic()
period = 1.0 / data_rate
while time.monotonic() - start_time < duration:
try:
data = get_var_callback()
except Exception as ex:
print(str(ex))
print("Waiting 1 second before next data point")
time.sleep(1)
continue
relative_time = time.monotonic() - start_time
vals.append([relative_time] + data)
time.sleep(period - (relative_time % period)) # this ensures consistently timed samples
self.data = np.array(vals) # A lock is not really necessary due to the event
print("Capture complete")
achieved_data_rate = len(self.data) / self.data[-1, 0]
if achieved_data_rate < (data_rate * 0.9):
print("Achieved average data rate: {}Hz".format(achieved_data_rate))
print("If this rate is significantly lower than what you specified, consider lowering it below the achieved value for more consistent sampling.")
self.event.set() # tell the main thread that the bulk capture is complete
Thread(target=loop, daemon=True).start()
def plot(self):
import matplotlib.pyplot as plt
import inspect
from textwrap import wrap
plt.plot(self.data[:,0], self.data[:,1:])
plt.xlabel("Time (seconds)")
title = (str(inspect.getsource(self.get_var_callback))
.strip("['\\n']")
.split(" = ")[1])
plt.title("\n".join(wrap(title, 60)))
plt.legend(range(self.data.shape[1]-1))
plt.show()
def step_and_plot( axis,
step_size=100.0,
settle_time=0.5,
data_rate=500.0,
ctrl_mode=CONTROL_MODE_POSITION_CONTROL):
if ctrl_mode is CONTROL_MODE_POSITION_CONTROL:
get_var_callback = lambda :[axis.encoder.pos_estimate, axis.controller.pos_setpoint]
initial_setpoint = axis.encoder.pos_estimate
def set_setpoint(setpoint):
axis.controller.pos_setpoint = setpoint
elif ctrl_mode is CONTROL_MODE_VELOCITY_CONTROL:
get_var_callback = lambda :[axis.encoder.vel_estimate, axis.controller.vel_setpoint]
initial_setpoint = 0
def set_setpoint(setpoint):
axis.controller.vel_setpoint = setpoint
else:
print("Invalid control mode")
return
initial_settle_time = 0.5
initial_control_mode = axis.controller.config.control_mode # Set it back afterwards
print(initial_control_mode)
axis.controller.config.control_mode = ctrl_mode
axis.requested_state = AXIS_STATE_CLOSED_LOOP_CONTROL
capture = BulkCapture(get_var_callback,
data_rate=data_rate,
duration=initial_settle_time + settle_time)
set_setpoint(initial_setpoint)
time.sleep(initial_settle_time)
set_setpoint(initial_setpoint + step_size) # relative/incremental movement
capture.event.wait() # wait for Bulk Capture to be complete
axis.requested_state = AXIS_STATE_IDLE
axis.controller.config.control_mode = initial_control_mode
capture.plot()
def print_drv_regs(name, motor):
"""
Dumps the current gate driver regisers for the specified motor
"""
fault = motor.gate_driver.drv_fault
status_reg_1 = motor.gate_driver.status_reg_1
status_reg_2 = motor.gate_driver.status_reg_2
ctrl_reg_1 = motor.gate_driver.ctrl_reg_1
ctrl_reg_2 = motor.gate_driver.ctrl_reg_2
print(name + ": " + str(fault))
print("DRV Fault Code: " + str(fault))
print("Status Reg 1: " + str(status_reg_1) + " (" + format(status_reg_1, '#010b') + ")")
print("Status Reg 2: " + str(status_reg_2) + " (" + format(status_reg_2, '#010b') + ")")
print("Control Reg 1: " + str(ctrl_reg_1) + " (" + format(ctrl_reg_1, '#013b') + ")")
print("Control Reg 2: " + str(ctrl_reg_2) + " (" + format(ctrl_reg_2, '#09b') + ")")
def show_oscilloscope(odrv):
size = 18000
values = []
for i in range(size):
values.append(odrv.oscilloscope.get_val(i))
import matplotlib.pyplot as plt
plt.plot(values)
plt.show()
def rate_test(device):
"""
Tests how many integers per second can be transmitted
"""
# import matplotlib.pyplot as plt
# plt.ion()
print("reading 10000 values...")
numFrames = 10000
vals = []
for _ in range(numFrames):
vals.append(device.axis0.loop_counter)
loopsPerFrame = (vals[-1] - vals[0])/numFrames
loopsPerSec = (168000000/(6*3500))
FramePerSec = loopsPerSec/loopsPerFrame
print("Frames per second: " + str(FramePerSec))
# plt.plot(vals)
# plt.show(block=True)
def usb_burn_in_test(get_var_callback, cancellation_token):
"""
Starts background threads that read a values form the USB device in a spin-loop
"""
def fetch_data():
global vals
i = 0
while not cancellation_token.is_set():
try:
get_var_callback()
i += 1
except Exception as ex:
print(str(ex))
time.sleep(1)
i = 0
continue
if i % 1000 == 0:
print("read {} values".format(i))
threading.Thread(target=fetch_data, daemon=True).start()
def yes_no_prompt(question, default=None):
if default is None:
question += " [y/n] "
elif default == True:
question += " [Y/n] "
elif default == False:
question += " [y/N] "
while True:
print(question, end='')
choice = input().lower()
if choice in {'yes', 'y'}:
return True
elif choice in {'no', 'n'}:
return False
elif choice == '' and default is not None:
return default
def dump_interrupts(odrv):
interrupts = [
(-12, "MemoryManagement_IRQn"),
(-11, "BusFault_IRQn"),
(-10, "UsageFault_IRQn"),
(-5, "SVCall_IRQn"),
(-4, "DebugMonitor_IRQn"),
(-2, "PendSV_IRQn"),
(-1, "SysTick_IRQn"),
(0, "WWDG_IRQn"),
(1, "PVD_IRQn"),
(2, "TAMP_STAMP_IRQn"),
(3, "RTC_WKUP_IRQn"),
(4, "FLASH_IRQn"),
(5, "RCC_IRQn"),
(6, "EXTI0_IRQn"),
(7, "EXTI1_IRQn"),
(8, "EXTI2_IRQn"),
(9, "EXTI3_IRQn"),
(10, "EXTI4_IRQn"),
(11, "DMA1_Stream0_IRQn"),
(12, "DMA1_Stream1_IRQn"),
(13, "DMA1_Stream2_IRQn"),
(14, "DMA1_Stream3_IRQn"),
(15, "DMA1_Stream4_IRQn"),
(16, "DMA1_Stream5_IRQn"),
(17, "DMA1_Stream6_IRQn"),
(18, "ADC_IRQn"),
(19, "CAN1_TX_IRQn"),
(20, "CAN1_RX0_IRQn"),
(21, "CAN1_RX1_IRQn"),
(22, "CAN1_SCE_IRQn"),
(23, "EXTI9_5_IRQn"),
(24, "TIM1_BRK_TIM9_IRQn"),
(25, "TIM1_UP_TIM10_IRQn"),
(26, "TIM1_TRG_COM_TIM11_IRQn"),
(27, "TIM1_CC_IRQn"),
(28, "TIM2_IRQn"),
(29, "TIM3_IRQn"),
(30, "TIM4_IRQn"),
(31, "I2C1_EV_IRQn"),
(32, "I2C1_ER_IRQn"),
(33, "I2C2_EV_IRQn"),
(34, "I2C2_ER_IRQn"),
(35, "SPI1_IRQn"),
(36, "SPI2_IRQn"),
(37, "USART1_IRQn"),
(38, "USART2_IRQn"),
(39, "USART3_IRQn"),
(40, "EXTI15_10_IRQn"),
(41, "RTC_Alarm_IRQn"),
(42, "OTG_FS_WKUP_IRQn"),
(43, "TIM8_BRK_TIM12_IRQn"),
(44, "TIM8_UP_TIM13_IRQn"),
(45, "TIM8_TRG_COM_TIM14_IRQn"),
(46, "TIM8_CC_IRQn"),
(47, "DMA1_Stream7_IRQn"),
(48, "FMC_IRQn"),
(49, "SDMMC1_IRQn"),
(50, "TIM5_IRQn"),
(51, "SPI3_IRQn"),
(52, "UART4_IRQn"),
(53, "UART5_IRQn"),
(54, "TIM6_DAC_IRQn"),
(55, "TIM7_IRQn"),
(56, "DMA2_Stream0_IRQn"),
(57, "DMA2_Stream1_IRQn"),
(58, "DMA2_Stream2_IRQn"),
(59, "DMA2_Stream3_IRQn"),
(60, "DMA2_Stream4_IRQn"),
(61, "ETH_IRQn"),
(62, "ETH_WKUP_IRQn"),
(63, "CAN2_TX_IRQn"),
(64, "CAN2_RX0_IRQn"),
(65, "CAN2_RX1_IRQn"),
(66, "CAN2_SCE_IRQn"),
(67, "OTG_FS_IRQn"),
(68, "DMA2_Stream5_IRQn"),
(69, "DMA2_Stream6_IRQn"),
(70, "DMA2_Stream7_IRQn"),
(71, "USART6_IRQn"),
(72, "I2C3_EV_IRQn"),
(73, "I2C3_ER_IRQn"),
(74, "OTG_HS_EP1_OUT_IRQn"),
(75, "OTG_HS_EP1_IN_IRQn"),
(76, "OTG_HS_WKUP_IRQn"),
(77, "OTG_HS_IRQn"),
# gap
(80, "RNG_IRQn"),
(81, "FPU_IRQn"),
(82, "UART7_IRQn"),
(83, "UART8_IRQn"),
(84, "SPI4_IRQn"),
(85, "SPI5_IRQn"),
# gap
(87, "SAI1_IRQn"),
# gap
(91, "SAI2_IRQn"),
(92, "QUADSPI_IRQn"),
(93, "LPTIM1_IRQn"),
# gap
(103, "SDMMC2_IRQn")
]
print("| # | Name | Prio | En | Count |")
print("|-----|-------------------------|------|----|---------|")
for irqn, irq_name in interrupts:
status = odrv.get_interrupt_status(irqn)
if (status != 0):
print("| {} | {} | {} | {} | {} |".format(
str(irqn).rjust(3),
irq_name.ljust(23),
str(status & 0xff).rjust(4),
" *" if (status & 0x80000000) else " ",
str((status >> 8) & 0x7fffff).rjust(7)))
def dump_threads(odrv):
prefixes = ["max_stack_usage_", "stack_size_", "prio_"]
keys = [k[len(prefix):] for k in dir(odrv.system_stats) for prefix in prefixes if k.startswith(prefix)]
good_keys = set([k for k in set(keys) if keys.count(k) == len(prefixes)])
if len(good_keys) > len(set(keys)):
print("Warning: incomplete thread information for threads {}".format(set(keys) - good_keys))
print("| Name | Stack Size [B] | Max Ever Stack Usage [B] | Prio |")
print("|---------|----------------|--------------------------|------|")
for k in sorted(good_keys):
sz = getattr(odrv.system_stats, "stack_size_" + k)
use = getattr(odrv.system_stats, "max_stack_usage_" + k)
print("| {} | {} | {} | {} |".format(
k.ljust(7),
str(sz).rjust(14),
"{} ({:.1f}%)".format(use, use / sz * 100).rjust(24),
str(getattr(odrv.system_stats, "prio_" + k)).rjust(4)
))
def dump_dma(odrv):
if odrv.hw_version_major == 3:
dma_functions = [[
# https://www.st.com/content/ccc/resource/technical/document/reference_manual/3d/6d/5a/66/b4/99/40/d4/DM00031020.pdf/files/DM00031020.pdf/jcr:content/translations/en.DM00031020.pdf Table 42
["SPI3_RX", "-", "SPI3_RX", "SPI2_RX", "SPI2_TX", "SPI3_TX", "-", "SPI3_TX"],
["I2C1_RX", "-", "TIM7_UP", "-", "TIM7_UP", "I2C1_RX", "I2C1_TX", "I2C1_TX"],
["TIM4_CH1", "-", "I2S3_EXT_RX", "TIM4_CH2", "I2S2_EXT_TX", "I2S3_EXT_TX", "TIM4_UP", "TIM4_CH3"],
["I2S3_EXT_RX", "TIM2_UP/TIM2_CH3", "I2C3_RX", "I2S2_EXT_RX", "I2C3_TX", "TIM2_CH1", "TIM2_CH2/TIM2_CH4", "TIM2_UP/TIM2_CH4"],
["UART5_RX", "USART3_RX", "UART4_RX", "USART3_TX", "UART4_TX", "USART2_RX", "USART2_TX", "UART5_TX"],
["UART8_TX", "UART7_TX", "TIM3_CH4/TIM3_UP", "UART7_RX", "TIM3_CH1/TIM3_TRIG", "TIM3_CH2", "UART8_RX", "TIM3_CH3"],
["TIM5_CH3/TIM5_UP", "TIM5_CH4/TIM5_TRIG", "TIM5_CH1", "TIM5_CH4/TIM5_TRIG", "TIM5_CH2", "-", "TIM5_UP", "-"],
["-", "TIM6_UP", "I2C2_RX", "I2C2_RX", "USART3_TX", "DAC1", "DAC2", "I2C2_TX"],
], [
# https://www.st.com/content/ccc/resource/technical/document/reference_manual/3d/6d/5a/66/b4/99/40/d4/DM00031020.pdf/files/DM00031020.pdf/jcr:content/translations/en.DM00031020.pdf Table 43
["ADC1", "SAI1_A", "TIM8_CH1/TIM8_CH2/TIM8_CH3", "SAI1_A", "ADC1", "SAI1_B", "TIM1_CH1/TIM1_CH2/TIM1_CH3", "-"],
["-", "DCMI", "ADC2", "ADC2", "SAI1_B", "SPI6_TX", "SPI6_RX", "DCMI"],
["ADC3", "ADC3", "-", "SPI5_RX", "SPI5_TX", "CRYP_OUT", "CRYP_IN", "HASH_IN"],
["SPI1_RX", "-", "SPI1_RX", "SPI1_TX", "-", "SPI1_TX", "-", "-"],
["SPI4_RX", "SPI4_TX", "USART1_RX", "SDIO", "-", "USART1_RX", "SDIO", "USART1_TX"],
["-", "USART6_RX", "USART6_RX", "SPI4_RX", "SPI4_TX", "-", "USART6_TX", "USART6_TX"],
["TIM1_TRIG", "TIM1_CH1", "TIM1_CH2", "TIM1_CH1", "TIM1_CH4/TIM1_TRIG/TIM1_COM", "TIM1_UP", "TIM1_CH3", "-"],
["-", "TIM8_UP", "TIM8_CH1", "TIM8_CH2", "TIM8_CH3", "SPI5_RX", "SPI5_TX", "TIM8_CH4/TIM8_TRIG/TIM8_COM"],
]]
elif odrv.hw_version_major == 4:
dma_functions = [[
# https://www.st.com/resource/en/reference_manual/dm00305990-stm32f72xxx-and-stm32f73xxx-advanced-armbased-32bit-mcus-stmicroelectronics.pdf Table 26
["SPI3_RX", "-", "SPI3_RX", "SPI2_RX", "SPI2_TX", "SPI3_TX", "-", "SPI3_TX"],
["I2C1_RX", "I2C3_RX", "TIM7_UP", "-", "TIM7_UP", "I2C1_RX", "I2C1_TX", "I2C1_TX"],
["TIM4_CH1", "-", "-", "TIM4_CH2", "-", "-", "TIM4_UP", "TIM4_CH3"],
["-", "TIM2_UP/TIM2_CH3", "I2C3_RX", "-", "I2C3_TX", "TIM2_CH1", "TIM2_CH2/TIM2_CH4", "TIM2_UP/TIM2_CH4"],
["UART5_RX", "USART3_RX", "UART4_RX", "USART3_TX", "UART4_TX", "USART2_RX", "USART2_TX", "UART5_TX"],
["UART8_TX", "UART7_TX", "TIM3_CH4/TIM3_UP", "UART7_RX", "TIM3_CH1/TIM3_TRIG", "TIM3_CH2", "UART8_RX", "TIM3_CH3"],
["TIM5_CH3/TIM5_UP", "TIM5_CH4/TIM5_TRIG", "TIM5_CH1", "TIM5_CH4/TIM5_TRIG", "TIM5_CH2", "-", "TIM5_UP", "-"],
["-", "TIM6_UP", "I2C2_RX", "I2C2_RX", "USART3_TX", "DAC1", "DAC2", "I2C2_TX"],
], [
# https://www.st.com/resource/en/reference_manual/dm00305990-stm32f72xxx-and-stm32f73xxx-advanced-armbased-32bit-mcus-stmicroelectronics.pdf Table 27
["ADC1", "SAI1_A", "TIM8_CH1/TIM8_CH2/TIM8_CH3", "SAI1_A", "ADC1", "SAI1_B", "TIM1_CH1/TIM1_CH2/TIM1_CH3", "SAI2_B"],
["-", "-", "ADC2", "ADC2", "SAI1_B", "-", "-", "-"],
["ADC3", "ADC3", "-", "SPI5_RX", "SPI5_TX", "AES_OUT", "AES_IN", "-"],
["SPI1_RX", "-", "SPI1_RX", "SPI1_TX", "SAI2_A", "SPI1_TX", "SAI2_B", "QUADSPI"],
["SPI4_RX", "SPI4_TX", "USART1_RX", "SDMMC1", "-", "USART1_RX", "SDMMC1", "USART1_TX"],
["-", "USART6_RX", "USART6_RX", "SPI4_RX", "SPI4_TX", "-", "USART6_TX", "USART6_TX"],
["TIM1_TRIG", "TIM1_CH1", "TIM1_CH2", "TIM1_CH1", "TIM1_CH4/TIM1_TRIG/TIM1_COM", "TIM1_UP", "TIM1_CH3", "-"],
["-", "TIM8_UP", "TIM8_CH1", "TIM8_CH2", "TIM8_CH3", "SPI5_RX", "SPI5_TX", "TIM8_CH4/TIM8_TRIG/TIM8_COM"],
None,
None,
None,
["SDMMC2", "-", "-", "-", "-", "SDMMC2", "-", "-"],
]]
print("| Name | Prio | Channel | Configured |")
print("|--------------|------|----------------------------------|------------|")
for stream_num in range(16):
status = odrv.get_dma_status(stream_num)
if (status != 0):
channel = (status >> 2) & 0x7
ch_name = dma_functions[stream_num >> 3][channel][stream_num & 0x7]
print("| DMA{}_Stream{} | {} | {} {} | {} |".format(
(stream_num >> 3) + 1,
(stream_num & 0x7),
(status & 0x3),
channel,
("(" + ch_name + ")").ljust(30),
"*" if (status & 0x80000000) else " "))
def dump_timing(odrv, n_samples=100, path='/tmp/timings.png'):
import matplotlib.pyplot as plt
import re
timings = []
for attr in dir(odrv.task_times):
if not attr.startswith('_'):
timings.append((attr, getattr(odrv.task_times, attr), [], [])) # (name, obj, start_times, lengths)
for k in dir(odrv):
if re.match(r'axis[0-9]+', k):
for attr in dir(getattr(odrv, k).task_times):
if not attr.startswith('_'):
timings.append((k + '.' + attr, getattr(getattr(odrv, k).task_times, attr), [], [])) # (name, obj, start_times, lengths)
# Take a couple of samples
print("sampling...")
for i in range(n_samples):
odrv.task_timers_armed = True # Trigger sample and wait for it to finish
while odrv.task_timers_armed: pass
for name, obj, start_times, lengths in timings:
start_times.append(obj.start_time)
lengths.append(obj.length)
print("done")
# sort by start time
timings = sorted(timings, key = lambda x: np.mean(x[2]))
plt.rcParams['figure.figsize'] = 21, 9
plt.figure()
plt.grid(True)
plt.barh(
[-i for i in range(len(timings))], # y positions
[np.mean(lengths) for name, obj, start_times, lengths in timings], # lengths
left = [np.mean(start_times) for name, obj, start_times, lengths in timings], # starts
xerr = (
[np.std(lengths) for name, obj, start_times, lengths in timings], # error bars to the left side
[(min(obj.max_length, 20100) - np.mean(lengths)) for name, obj, start_times, lengths in timings], # error bars to the right side - TODO: remove artificial min()
),
tick_label = [name for name, obj, start_times, lengths in timings], # labels
)
plt.savefig(path, bbox_inches='tight')
| 43.996759 | 201 | 0.521366 | from __future__ import print_function
import sys
import time
import threading
import platform
import subprocess
import os
import numpy as np
from fibre.utils import Event
import odrive.enums
from odrive.enums import *
try:
if platform.system() == 'Windows':
import win32console
import colorama
colorama.init()
except ImportError:
print("Could not init terminal features.")
print("Refer to install instructions at http://docs.odriverobotics.com/#downloading-and-installing-tools")
sys.stdout.flush()
pass
if sys.version_info < (3, 0):
input = raw_input
_VT100Colors = {
'green': '\x1b[92;1m',
'cyan': '\x1b[96;1m',
'yellow': '\x1b[93;1m',
'red': '\x1b[91;1m',
'default': '\x1b[0m'
}
def calculate_thermistor_coeffs(degree, Rload, R_25, Beta, Tmin, Tmax, plot = False):
T_25 = 25 + 273.15
temps = np.linspace(Tmin, Tmax, 1000)
tempsK = temps + 273.15
a/T_25)
R_temps = r_inf * np.exp(Beta/tempsK)
V = Rload / (Rload + R_temps)
fit = np.polyfit(V, temps, degree)
p1 = np.poly1d(fit)
fit_temps = p1(V)
if plot:
import matplotlib.pyplot as plt
print(fit)
plt.plot(V, temps, label='actual')
plt.plot(V, fit_temps, label='fit')
plt.xlabel('normalized voltage')
plt.ylabel('Temp [C]')
plt.legend(loc=0)
plt.show()
return p1
class OperationAbortedException(Exception):
pass
def set_motor_thermistor_coeffs(axis, Rload, R_25, Beta, Tmin, TMax):
coeffs = calculate_thermistor_coeffs(3, Rload, R_25, Beta, Tmin, TMax)
axis.motor.motor_thermistor.config.poly_coefficient_0 = float(coeffs[3])
axis.motor.motor_thermistor.config.poly_coefficient_1 = float(coeffs[2])
axis.motor.motor_thermistor.config.poly_coefficient_2 = float(coeffs[1])
axis.motor.motor_thermistor.config.poly_coefficient_3 = float(coeffs[0])
def dump_errors(odrv, clear=False, printfunc = print):
axes = [(name, axis) for name, axis in odrv._remote_attributes.items() if 'axis' in name]
axes.sort()
def dump_errors_for_module(indent, name, obj, path, errorcodes):
prefix = indent + name.strip('0123456789') + ": "
for elem in path.split('.'):
if not hasattr(obj, elem):
printfunc(prefix + _VT100Colors['yellow'] + "not found" + _VT100Colors['default'])
return
parent = obj
obj = getattr(obj, elem)
if obj != 0:
printfunc(indent + name + ": " + _VT100Colors['red'] + "Error(s):" + _VT100Colors['default'])
for bit in range(64):
if obj & (1 << bit) != 0:
printfunc(indent + " " + errorcodes.get((1 << bit), 'UNKNOWN ERROR: 0x{:08X}'.format(1 << bit)))
if clear:
setattr(parent, elem, 0)
else:
printfunc(indent + name + ": " + _VT100Colors['green'] + "no error" + _VT100Colors['default'])
system_error_codes = {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("ODRIVE_ERROR_")}
dump_errors_for_module("", "system", odrv, 'error', system_error_codes)
for name, axis in axes:
printfunc(name)
module_decode_map = [
('axis', axis, 'error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("AXIS_ERROR_")}),
('motor', axis, 'motor.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("MOTOR_ERROR_")}),
('sensorless_estimator', axis, 'sensorless_estimator.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("SENSORLESS_ESTIMATOR_ERROR")}),
('encoder', axis, 'encoder.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("ENCODER_ERROR_")}),
('controller', axis, 'controller.error', {v: k for k, v in odrive.enums.__dict__ .items() if k.startswith("CONTROLLER_ERROR_")}),
]
for name, obj, path, errorcodes in module_decode_map:
dump_errors_for_module(" ", name, obj, path, errorcodes)
def oscilloscope_dump(odrv, num_vals, filename='oscilloscope.csv'):
with open(filename, 'w') as f:
for x in range(num_vals):
f.write(str(odrv.oscilloscope.get_val(x)))
f.write('\n')
data_rate = 200
plot_rate = 10
num_samples = 500
def start_liveplotter(get_var_callback):
import matplotlib.pyplot as plt
cancellation_token = Event()
global vals
vals = []
def fetch_data():
global vals
while not cancellation_token.is_set():
try:
data = get_var_callback()
except Exception as ex:
print(str(ex))
time.sleep(1)
continue
vals.append(data)
if len(vals) > num_samples:
vals = vals[-num_samples:]
time.sleep(1/data_rate)
def plot_data():
global vals
plt.ion()
def closed(evt):
cancellation_token.set()
fig = plt.figure()
fig.canvas.mpl_connect('close_event', closed)
while not cancellation_token.is_set():
plt.clf()
plt.plot(vals)
plt.legend(list(range(len(vals))))
fig.canvas.draw()
fig.canvas.start_event_loop(1/plot_rate)
fetch_t = threading.Thread(target=fetch_data)
fetch_t.daemon = True
fetch_t.start()
plot_t = threading.Thread(target=plot_data)
plot_t.daemon = True
plot_t.start()
return cancellation_token;
class BulkCapture:
def __init__(self,
get_var_callback,
data_rate=500.0,
duration=2.0):
from threading import Event, Thread
import numpy as np
self.get_var_callback = get_var_callback
self.event = Event()
def loop():
vals = []
start_time = time.monotonic()
period = 1.0 / data_rate
while time.monotonic() - start_time < duration:
try:
data = get_var_callback()
except Exception as ex:
print(str(ex))
print("Waiting 1 second before next data point")
time.sleep(1)
continue
relative_time = time.monotonic() - start_time
vals.append([relative_time] + data)
time.sleep(period - (relative_time % period))
self.data = np.array(vals)
print("Capture complete")
achieved_data_rate = len(self.data) / self.data[-1, 0]
if achieved_data_rate < (data_rate * 0.9):
print("Achieved average data rate: {}Hz".format(achieved_data_rate))
print("If this rate is significantly lower than what you specified, consider lowering it below the achieved value for more consistent sampling.")
self.event.set()
Thread(target=loop, daemon=True).start()
def plot(self):
import matplotlib.pyplot as plt
import inspect
from textwrap import wrap
plt.plot(self.data[:,0], self.data[:,1:])
plt.xlabel("Time (seconds)")
title = (str(inspect.getsource(self.get_var_callback))
.strip("['\\n']")
.split(" = ")[1])
plt.title("\n".join(wrap(title, 60)))
plt.legend(range(self.data.shape[1]-1))
plt.show()
def step_and_plot( axis,
step_size=100.0,
settle_time=0.5,
data_rate=500.0,
ctrl_mode=CONTROL_MODE_POSITION_CONTROL):
if ctrl_mode is CONTROL_MODE_POSITION_CONTROL:
get_var_callback = lambda :[axis.encoder.pos_estimate, axis.controller.pos_setpoint]
initial_setpoint = axis.encoder.pos_estimate
def set_setpoint(setpoint):
axis.controller.pos_setpoint = setpoint
elif ctrl_mode is CONTROL_MODE_VELOCITY_CONTROL:
get_var_callback = lambda :[axis.encoder.vel_estimate, axis.controller.vel_setpoint]
initial_setpoint = 0
def set_setpoint(setpoint):
axis.controller.vel_setpoint = setpoint
else:
print("Invalid control mode")
return
initial_settle_time = 0.5
initial_control_mode = axis.controller.config.control_mode
print(initial_control_mode)
axis.controller.config.control_mode = ctrl_mode
axis.requested_state = AXIS_STATE_CLOSED_LOOP_CONTROL
capture = BulkCapture(get_var_callback,
data_rate=data_rate,
duration=initial_settle_time + settle_time)
set_setpoint(initial_setpoint)
time.sleep(initial_settle_time)
set_setpoint(initial_setpoint + step_size)
capture.event.wait()
axis.requested_state = AXIS_STATE_IDLE
axis.controller.config.control_mode = initial_control_mode
capture.plot()
def print_drv_regs(name, motor):
fault = motor.gate_driver.drv_fault
status_reg_1 = motor.gate_driver.status_reg_1
status_reg_2 = motor.gate_driver.status_reg_2
ctrl_reg_1 = motor.gate_driver.ctrl_reg_1
ctrl_reg_2 = motor.gate_driver.ctrl_reg_2
print(name + ": " + str(fault))
print("DRV Fault Code: " + str(fault))
print("Status Reg 1: " + str(status_reg_1) + " (" + format(status_reg_1, '#010b') + ")")
print("Status Reg 2: " + str(status_reg_2) + " (" + format(status_reg_2, '#010b') + ")")
print("Control Reg 1: " + str(ctrl_reg_1) + " (" + format(ctrl_reg_1, '#013b') + ")")
print("Control Reg 2: " + str(ctrl_reg_2) + " (" + format(ctrl_reg_2, '#09b') + ")")
def show_oscilloscope(odrv):
size = 18000
values = []
for i in range(size):
values.append(odrv.oscilloscope.get_val(i))
import matplotlib.pyplot as plt
plt.plot(values)
plt.show()
def rate_test(device):
print("reading 10000 values...")
numFrames = 10000
vals = []
for _ in range(numFrames):
vals.append(device.axis0.loop_counter)
loopsPerFrame = (vals[-1] - vals[0])/numFrames
loopsPerSec = (168000000/(6*3500))
FramePerSec = loopsPerSec/loopsPerFrame
print("Frames per second: " + str(FramePerSec))
def usb_burn_in_test(get_var_callback, cancellation_token):
def fetch_data():
global vals
i = 0
while not cancellation_token.is_set():
try:
get_var_callback()
i += 1
except Exception as ex:
print(str(ex))
time.sleep(1)
i = 0
continue
if i % 1000 == 0:
print("read {} values".format(i))
threading.Thread(target=fetch_data, daemon=True).start()
def yes_no_prompt(question, default=None):
if default is None:
question += " [y/n] "
elif default == True:
question += " [Y/n] "
elif default == False:
question += " [y/N] "
while True:
print(question, end='')
choice = input().lower()
if choice in {'yes', 'y'}:
return True
elif choice in {'no', 'n'}:
return False
elif choice == '' and default is not None:
return default
def dump_interrupts(odrv):
interrupts = [
(-12, "MemoryManagement_IRQn"),
(-11, "BusFault_IRQn"),
(-10, "UsageFault_IRQn"),
(-5, "SVCall_IRQn"),
(-4, "DebugMonitor_IRQn"),
(-2, "PendSV_IRQn"),
(-1, "SysTick_IRQn"),
(0, "WWDG_IRQn"),
(1, "PVD_IRQn"),
(2, "TAMP_STAMP_IRQn"),
(3, "RTC_WKUP_IRQn"),
(4, "FLASH_IRQn"),
(5, "RCC_IRQn"),
(6, "EXTI0_IRQn"),
(7, "EXTI1_IRQn"),
(8, "EXTI2_IRQn"),
(9, "EXTI3_IRQn"),
(10, "EXTI4_IRQn"),
(11, "DMA1_Stream0_IRQn"),
(12, "DMA1_Stream1_IRQn"),
(13, "DMA1_Stream2_IRQn"),
(14, "DMA1_Stream3_IRQn"),
(15, "DMA1_Stream4_IRQn"),
(16, "DMA1_Stream5_IRQn"),
(17, "DMA1_Stream6_IRQn"),
(18, "ADC_IRQn"),
(19, "CAN1_TX_IRQn"),
(20, "CAN1_RX0_IRQn"),
(21, "CAN1_RX1_IRQn"),
(22, "CAN1_SCE_IRQn"),
(23, "EXTI9_5_IRQn"),
(24, "TIM1_BRK_TIM9_IRQn"),
(25, "TIM1_UP_TIM10_IRQn"),
(26, "TIM1_TRG_COM_TIM11_IRQn"),
(27, "TIM1_CC_IRQn"),
(28, "TIM2_IRQn"),
(29, "TIM3_IRQn"),
(30, "TIM4_IRQn"),
(31, "I2C1_EV_IRQn"),
(32, "I2C1_ER_IRQn"),
(33, "I2C2_EV_IRQn"),
(34, "I2C2_ER_IRQn"),
(35, "SPI1_IRQn"),
(36, "SPI2_IRQn"),
(37, "USART1_IRQn"),
(38, "USART2_IRQn"),
(39, "USART3_IRQn"),
(40, "EXTI15_10_IRQn"),
(41, "RTC_Alarm_IRQn"),
(42, "OTG_FS_WKUP_IRQn"),
(43, "TIM8_BRK_TIM12_IRQn"),
(44, "TIM8_UP_TIM13_IRQn"),
(45, "TIM8_TRG_COM_TIM14_IRQn"),
(46, "TIM8_CC_IRQn"),
(47, "DMA1_Stream7_IRQn"),
(48, "FMC_IRQn"),
(49, "SDMMC1_IRQn"),
(50, "TIM5_IRQn"),
(51, "SPI3_IRQn"),
(52, "UART4_IRQn"),
(53, "UART5_IRQn"),
(54, "TIM6_DAC_IRQn"),
(55, "TIM7_IRQn"),
(56, "DMA2_Stream0_IRQn"),
(57, "DMA2_Stream1_IRQn"),
(58, "DMA2_Stream2_IRQn"),
(59, "DMA2_Stream3_IRQn"),
(60, "DMA2_Stream4_IRQn"),
(61, "ETH_IRQn"),
(62, "ETH_WKUP_IRQn"),
(63, "CAN2_TX_IRQn"),
(64, "CAN2_RX0_IRQn"),
(65, "CAN2_RX1_IRQn"),
(66, "CAN2_SCE_IRQn"),
(67, "OTG_FS_IRQn"),
(68, "DMA2_Stream5_IRQn"),
(69, "DMA2_Stream6_IRQn"),
(70, "DMA2_Stream7_IRQn"),
(71, "USART6_IRQn"),
(72, "I2C3_EV_IRQn"),
(73, "I2C3_ER_IRQn"),
(74, "OTG_HS_EP1_OUT_IRQn"),
(75, "OTG_HS_EP1_IN_IRQn"),
(76, "OTG_HS_WKUP_IRQn"),
(77, "OTG_HS_IRQn"),
(80, "RNG_IRQn"),
(81, "FPU_IRQn"),
(82, "UART7_IRQn"),
(83, "UART8_IRQn"),
(84, "SPI4_IRQn"),
(85, "SPI5_IRQn"),
(87, "SAI1_IRQn"),
(91, "SAI2_IRQn"),
(92, "QUADSPI_IRQn"),
(93, "LPTIM1_IRQn"),
(103, "SDMMC2_IRQn")
]
print("| # | Name | Prio | En | Count |")
print("|-----|-------------------------|------|----|---------|")
for irqn, irq_name in interrupts:
status = odrv.get_interrupt_status(irqn)
if (status != 0):
print("| {} | {} | {} | {} | {} |".format(
str(irqn).rjust(3),
irq_name.ljust(23),
str(status & 0xff).rjust(4),
" *" if (status & 0x80000000) else " ",
str((status >> 8) & 0x7fffff).rjust(7)))
def dump_threads(odrv):
prefixes = ["max_stack_usage_", "stack_size_", "prio_"]
keys = [k[len(prefix):] for k in dir(odrv.system_stats) for prefix in prefixes if k.startswith(prefix)]
good_keys = set([k for k in set(keys) if keys.count(k) == len(prefixes)])
if len(good_keys) > len(set(keys)):
print("Warning: incomplete thread information for threads {}".format(set(keys) - good_keys))
print("| Name | Stack Size [B] | Max Ever Stack Usage [B] | Prio |")
print("|---------|----------------|--------------------------|------|")
for k in sorted(good_keys):
sz = getattr(odrv.system_stats, "stack_size_" + k)
use = getattr(odrv.system_stats, "max_stack_usage_" + k)
print("| {} | {} | {} | {} |".format(
k.ljust(7),
str(sz).rjust(14),
"{} ({:.1f}%)".format(use, use / sz * 100).rjust(24),
str(getattr(odrv.system_stats, "prio_" + k)).rjust(4)
))
def dump_dma(odrv):
if odrv.hw_version_major == 3:
dma_functions = [[
["SPI3_RX", "-", "SPI3_RX", "SPI2_RX", "SPI2_TX", "SPI3_TX", "-", "SPI3_TX"],
["I2C1_RX", "-", "TIM7_UP", "-", "TIM7_UP", "I2C1_RX", "I2C1_TX", "I2C1_TX"],
["TIM4_CH1", "-", "I2S3_EXT_RX", "TIM4_CH2", "I2S2_EXT_TX", "I2S3_EXT_TX", "TIM4_UP", "TIM4_CH3"],
["I2S3_EXT_RX", "TIM2_UP/TIM2_CH3", "I2C3_RX", "I2S2_EXT_RX", "I2C3_TX", "TIM2_CH1", "TIM2_CH2/TIM2_CH4", "TIM2_UP/TIM2_CH4"],
["UART5_RX", "USART3_RX", "UART4_RX", "USART3_TX", "UART4_TX", "USART2_RX", "USART2_TX", "UART5_TX"],
["UART8_TX", "UART7_TX", "TIM3_CH4/TIM3_UP", "UART7_RX", "TIM3_CH1/TIM3_TRIG", "TIM3_CH2", "UART8_RX", "TIM3_CH3"],
["TIM5_CH3/TIM5_UP", "TIM5_CH4/TIM5_TRIG", "TIM5_CH1", "TIM5_CH4/TIM5_TRIG", "TIM5_CH2", "-", "TIM5_UP", "-"],
["-", "TIM6_UP", "I2C2_RX", "I2C2_RX", "USART3_TX", "DAC1", "DAC2", "I2C2_TX"],
], [
["ADC1", "SAI1_A", "TIM8_CH1/TIM8_CH2/TIM8_CH3", "SAI1_A", "ADC1", "SAI1_B", "TIM1_CH1/TIM1_CH2/TIM1_CH3", "-"],
["-", "DCMI", "ADC2", "ADC2", "SAI1_B", "SPI6_TX", "SPI6_RX", "DCMI"],
["ADC3", "ADC3", "-", "SPI5_RX", "SPI5_TX", "CRYP_OUT", "CRYP_IN", "HASH_IN"],
["SPI1_RX", "-", "SPI1_RX", "SPI1_TX", "-", "SPI1_TX", "-", "-"],
["SPI4_RX", "SPI4_TX", "USART1_RX", "SDIO", "-", "USART1_RX", "SDIO", "USART1_TX"],
["-", "USART6_RX", "USART6_RX", "SPI4_RX", "SPI4_TX", "-", "USART6_TX", "USART6_TX"],
["TIM1_TRIG", "TIM1_CH1", "TIM1_CH2", "TIM1_CH1", "TIM1_CH4/TIM1_TRIG/TIM1_COM", "TIM1_UP", "TIM1_CH3", "-"],
["-", "TIM8_UP", "TIM8_CH1", "TIM8_CH2", "TIM8_CH3", "SPI5_RX", "SPI5_TX", "TIM8_CH4/TIM8_TRIG/TIM8_COM"],
]]
elif odrv.hw_version_major == 4:
dma_functions = [[
["SPI3_RX", "-", "SPI3_RX", "SPI2_RX", "SPI2_TX", "SPI3_TX", "-", "SPI3_TX"],
["I2C1_RX", "I2C3_RX", "TIM7_UP", "-", "TIM7_UP", "I2C1_RX", "I2C1_TX", "I2C1_TX"],
["TIM4_CH1", "-", "-", "TIM4_CH2", "-", "-", "TIM4_UP", "TIM4_CH3"],
["-", "TIM2_UP/TIM2_CH3", "I2C3_RX", "-", "I2C3_TX", "TIM2_CH1", "TIM2_CH2/TIM2_CH4", "TIM2_UP/TIM2_CH4"],
["UART5_RX", "USART3_RX", "UART4_RX", "USART3_TX", "UART4_TX", "USART2_RX", "USART2_TX", "UART5_TX"],
["UART8_TX", "UART7_TX", "TIM3_CH4/TIM3_UP", "UART7_RX", "TIM3_CH1/TIM3_TRIG", "TIM3_CH2", "UART8_RX", "TIM3_CH3"],
["TIM5_CH3/TIM5_UP", "TIM5_CH4/TIM5_TRIG", "TIM5_CH1", "TIM5_CH4/TIM5_TRIG", "TIM5_CH2", "-", "TIM5_UP", "-"],
["-", "TIM6_UP", "I2C2_RX", "I2C2_RX", "USART3_TX", "DAC1", "DAC2", "I2C2_TX"],
], [
["ADC1", "SAI1_A", "TIM8_CH1/TIM8_CH2/TIM8_CH3", "SAI1_A", "ADC1", "SAI1_B", "TIM1_CH1/TIM1_CH2/TIM1_CH3", "SAI2_B"],
["-", "-", "ADC2", "ADC2", "SAI1_B", "-", "-", "-"],
["ADC3", "ADC3", "-", "SPI5_RX", "SPI5_TX", "AES_OUT", "AES_IN", "-"],
["SPI1_RX", "-", "SPI1_RX", "SPI1_TX", "SAI2_A", "SPI1_TX", "SAI2_B", "QUADSPI"],
["SPI4_RX", "SPI4_TX", "USART1_RX", "SDMMC1", "-", "USART1_RX", "SDMMC1", "USART1_TX"],
["-", "USART6_RX", "USART6_RX", "SPI4_RX", "SPI4_TX", "-", "USART6_TX", "USART6_TX"],
["TIM1_TRIG", "TIM1_CH1", "TIM1_CH2", "TIM1_CH1", "TIM1_CH4/TIM1_TRIG/TIM1_COM", "TIM1_UP", "TIM1_CH3", "-"],
["-", "TIM8_UP", "TIM8_CH1", "TIM8_CH2", "TIM8_CH3", "SPI5_RX", "SPI5_TX", "TIM8_CH4/TIM8_TRIG/TIM8_COM"],
None,
None,
None,
["SDMMC2", "-", "-", "-", "-", "SDMMC2", "-", "-"],
]]
print("| Name | Prio | Channel | Configured |")
print("|--------------|------|----------------------------------|------------|")
for stream_num in range(16):
status = odrv.get_dma_status(stream_num)
if (status != 0):
channel = (status >> 2) & 0x7
ch_name = dma_functions[stream_num >> 3][channel][stream_num & 0x7]
print("| DMA{}_Stream{} | {} | {} {} | {} |".format(
(stream_num >> 3) + 1,
(stream_num & 0x7),
(status & 0x3),
channel,
("(" + ch_name + ")").ljust(30),
"*" if (status & 0x80000000) else " "))
def dump_timing(odrv, n_samples=100, path='/tmp/timings.png'):
import matplotlib.pyplot as plt
import re
timings = []
for attr in dir(odrv.task_times):
if not attr.startswith('_'):
timings.append((attr, getattr(odrv.task_times, attr), [], []))
for k in dir(odrv):
if re.match(r'axis[0-9]+', k):
for attr in dir(getattr(odrv, k).task_times):
if not attr.startswith('_'):
timings.append((k + '.' + attr, getattr(getattr(odrv, k).task_times, attr), [], []))
print("sampling...")
for i in range(n_samples):
odrv.task_timers_armed = True
while odrv.task_timers_armed: pass
for name, obj, start_times, lengths in timings:
start_times.append(obj.start_time)
lengths.append(obj.length)
print("done")
timings = sorted(timings, key = lambda x: np.mean(x[2]))
plt.rcParams['figure.figsize'] = 21, 9
plt.figure()
plt.grid(True)
plt.barh(
[-i for i in range(len(timings))],
[np.mean(lengths) for name, obj, start_times, lengths in timings],
left = [np.mean(start_times) for name, obj, start_times, lengths in timings],
xerr = (
[np.std(lengths) for name, obj, start_times, lengths in timings],
[(min(obj.max_length, 20100) - np.mean(lengths)) for name, obj, start_times, lengths in timings],
),
tick_label = [name for name, obj, start_times, lengths in timings],
)
plt.savefig(path, bbox_inches='tight')
| true | true |
f7fa7aa48881472632355dbf397494aeb3465e23 | 825 | py | Python | tests/integers.py | fuzziqersoftware/nemesys | ee6cc8e1fd804b5cfaa94a79e295770cadec8204 | [
"MIT"
] | 9 | 2018-02-05T06:52:02.000Z | 2022-03-27T18:26:14.000Z | tests/integers.py | fuzziqersoftware/nemesys | ee6cc8e1fd804b5cfaa94a79e295770cadec8204 | [
"MIT"
] | 1 | 2021-05-17T16:53:12.000Z | 2021-05-31T11:19:28.000Z | tests/integers.py | fuzziqersoftware/nemesys | ee6cc8e1fd804b5cfaa94a79e295770cadec8204 | [
"MIT"
] | 3 | 2018-02-06T17:24:34.000Z | 2021-11-16T17:20:28.000Z | a = 0x204
b = 0x404
print("a | b should be 1540: %d" % (a | b))
print("a & b should be 4: %d" % (a & b))
print("a ^ b should be 1536: %d" % (a ^ b))
print("a << 2 should be 2064: %d" % (a << 2))
print("a >> 2 should be 129: %d" % (a >> 2))
print("a + b should be 1544: %d" % (a + b))
print("a - b should be -512: %d" % (a - b))
print("a * b should be 530448: %d" % (a * b))
print("b // a should be 1: %d" % (b // a))
print("a // b should be 0: %d" % (a // b))
print("bin(a) should be 0b1000000100: " + bin(a))
print("bin(b) should be 0b10000000100: " + bin(b))
print("oct(a) should be 0o1004: " + oct(a))
print("oct(b) should be 0o2004: " + oct(b))
print("hex(a) should be 0x204: " + hex(a))
print("hex(b) should be 0x404: " + hex(b))
print("'%%o' %% a should be 0o1004: %o" % a)
print("'%%o' %% b should be 0o2004: %o" % b)
| 39.285714 | 50 | 0.535758 | a = 0x204
b = 0x404
print("a | b should be 1540: %d" % (a | b))
print("a & b should be 4: %d" % (a & b))
print("a ^ b should be 1536: %d" % (a ^ b))
print("a << 2 should be 2064: %d" % (a << 2))
print("a >> 2 should be 129: %d" % (a >> 2))
print("a + b should be 1544: %d" % (a + b))
print("a - b should be -512: %d" % (a - b))
print("a * b should be 530448: %d" % (a * b))
print("b // a should be 1: %d" % (b // a))
print("a // b should be 0: %d" % (a // b))
print("bin(a) should be 0b1000000100: " + bin(a))
print("bin(b) should be 0b10000000100: " + bin(b))
print("oct(a) should be 0o1004: " + oct(a))
print("oct(b) should be 0o2004: " + oct(b))
print("hex(a) should be 0x204: " + hex(a))
print("hex(b) should be 0x404: " + hex(b))
print("'%%o' %% a should be 0o1004: %o" % a)
print("'%%o' %% b should be 0o2004: %o" % b)
| true | true |
f7fa7c4134dc320730c862ccfeb0df0102a79c1b | 2,164 | py | Python | gpMgmt/bin/gppylib/test/unit/test_unit_userinput.py | bradfordb-vmware/gpdb | 5cc23bd1df4133aaa7a80174f5b0950933a83cc2 | [
"PostgreSQL",
"Apache-2.0"
] | 5,535 | 2015-10-28T01:05:40.000Z | 2022-03-30T13:46:53.000Z | gpMgmt/bin/gppylib/test/unit/test_unit_userinput.py | bradfordb-vmware/gpdb | 5cc23bd1df4133aaa7a80174f5b0950933a83cc2 | [
"PostgreSQL",
"Apache-2.0"
] | 9,369 | 2015-10-28T07:48:01.000Z | 2022-03-31T23:56:42.000Z | gpMgmt/bin/gppylib/test/unit/test_unit_userinput.py | bradfordb-vmware/gpdb | 5cc23bd1df4133aaa7a80174f5b0950933a83cc2 | [
"PostgreSQL",
"Apache-2.0"
] | 1,800 | 2015-10-28T01:08:25.000Z | 2022-03-29T13:29:36.000Z | #!/usr/bin/env python3
import sys, os, getpass
import unittest
from gppylib.userinput import ask_create_password
class GppylibUserInputTestCase(unittest.TestCase):
@staticmethod
def mock_get_pass_start(values):
def mock_get_pass(prompt):
mock_get_pass.counter += 1
return values[mock_get_pass.counter]
mock_get_pass.counter = -1
GppylibUserInputTestCase.old_std_method = getpass.getpass
getpass.getpass = mock_get_pass
@staticmethod
def mock_get_pass_stop():
getpass.getpass = GppylibUserInputTestCase.old_std_method
def test00_verify_too_short_password(self):
"""Verify too short password is rejected"""
stdinvals = ['abc', 'abc', 'foobar', 'foobar']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password()
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'foobar')
def test01_verify_too_short_password(self):
"""Verify non-matching password is rejected"""
stdinvals = ['abcdef', 'ABCDEF', 'coolio', 'coolio']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password()
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'coolio')
def test02_verify_max_attempts(self):
"""Verify max number of attempts to get password results in failure"""
stdinvals = ['a', 'a', 'b', 'b', 'c', 'c', 'd', 'd']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password(max_attempts=3)
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertIsNone(result)
def test03_verify_min_length(self):
"""Verify minimum password length"""
stdinvals = ['a', 'a', 'bb', 'bb', 'ccc', 'ccc', 'dddd', 'dddd']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password(max_attempts=10, min_length=3)
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'ccc')
if __name__ == "__main__":
unittest.main()
| 29.243243 | 78 | 0.682994 |
import sys, os, getpass
import unittest
from gppylib.userinput import ask_create_password
class GppylibUserInputTestCase(unittest.TestCase):
@staticmethod
def mock_get_pass_start(values):
def mock_get_pass(prompt):
mock_get_pass.counter += 1
return values[mock_get_pass.counter]
mock_get_pass.counter = -1
GppylibUserInputTestCase.old_std_method = getpass.getpass
getpass.getpass = mock_get_pass
@staticmethod
def mock_get_pass_stop():
getpass.getpass = GppylibUserInputTestCase.old_std_method
def test00_verify_too_short_password(self):
stdinvals = ['abc', 'abc', 'foobar', 'foobar']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password()
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'foobar')
def test01_verify_too_short_password(self):
stdinvals = ['abcdef', 'ABCDEF', 'coolio', 'coolio']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password()
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'coolio')
def test02_verify_max_attempts(self):
stdinvals = ['a', 'a', 'b', 'b', 'c', 'c', 'd', 'd']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password(max_attempts=3)
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertIsNone(result)
def test03_verify_min_length(self):
stdinvals = ['a', 'a', 'bb', 'bb', 'ccc', 'ccc', 'dddd', 'dddd']
GppylibUserInputTestCase.mock_get_pass_start(stdinvals)
result = ask_create_password(max_attempts=10, min_length=3)
GppylibUserInputTestCase.mock_get_pass_stop()
self.assertEqual(result, 'ccc')
if __name__ == "__main__":
unittest.main()
| true | true |
f7fa7d359a9605f6e48fd57f2610659241c2446f | 6,763 | py | Python | nmftranscription/inst_dict.py | arity-r/nmf-transcription | 2dcf80ba3ca0169130858b4203a780c421ab1acd | [
"MIT"
] | 1 | 2021-03-25T10:35:38.000Z | 2021-03-25T10:35:38.000Z | nmftranscription/inst_dict.py | arity-r/nmf-transcription | 2dcf80ba3ca0169130858b4203a780c421ab1acd | [
"MIT"
] | null | null | null | nmftranscription/inst_dict.py | arity-r/nmf-transcription | 2dcf80ba3ca0169130858b4203a780c421ab1acd | [
"MIT"
] | null | null | null |
melodic_dict = {
'Acoustic Piano': (1, 'Piano'),
'Bright Piano': (2, 'Piano'),
'Electric Grand Piano': (3, 'Piano'),
'Honky-tonk Piano': (4, 'Piano'),
'Electric Piano': (5, 'Piano'),
'Electric Piano 2': (6, 'Piano'),
'Harpsichord': (7, 'Piano'),
'Clavi': (8, 'Piano'),
'Celesta': (9, 'Chromatic Percussion'),
'Glockenspiel': (10, 'Chromatic Percussion'),
'Musical box': (11, 'Chromatic Percussion'),
'Vibraphone': (12, 'Chromatic Percussion'),
'Marimba': (13, 'Chromatic Percussion'),
'Xylophone': (14, 'Chromatic Percussion'),
'Tubular Bell': (15, 'Chromatic Percussion'),
'Dulcimer': (16, 'Chromatic Percussion'),
'Drawbar Organ': (17, 'Organ'),
'Percussive Organ': (18, 'Organ'),
'Rock Organ': (19, 'Organ'),
'Church organ': (20, 'Organ'),
'Reed organ': (21, 'Organ'),
'Accordion': (22, 'Organ'),
'Harmonica': (23, 'Organ'),
'Tango Accordion': (24, 'Organ'),
'Acoustic Guitar (nylon)': (25, 'Guitar'),
'Acoustic Guitar (steel)': (26, 'Guitar'),
'Electric Guitar (jazz)': (27, 'Guitar'),
'Electric Guitar (clean)': (28, 'Guitar'),
'Electric Guitar (muted)': (29, 'Guitar'),
'Overdriven Guitar': (30, 'Guitar'),
'Distortion Guitar': (31, 'Guitar'),
'Guitar harmonics': (32, 'Guitar'),
'Acoustic Bass': (33, 'Bass'),
'Electric Bass (finger)': (34, 'Bass'),
'Electric Bass (pick)': (35, 'Bass'),
'Fretless Bass': (36, 'Bass'),
'Slap Bass 1': (37, 'Bass'),
'Slap Bass 2': (38, 'Bass'),
'Synth Bass 1': (39, 'Bass'),
'Synth Bass 2': (40, 'Bass'),
'Violin': (41, 'Strings'),
'Viola': (42, 'Strings'),
'Cello': (43, 'Strings'),
'Double bass': (44, 'Strings'),
'Tremolo Strings': (45, 'Strings'),
'Pizzicato Strings': (46, 'Strings'),
'Orchestral Harp': (47, 'Strings'),
'Timpani': (48, 'Strings'),
'String Ensemble 1': (49, 'Ensemble'),
'String Ensemble 2': (50, 'Ensemble'),
'Synth Strings 1': (51, 'Ensemble'),
'Synth Strings 2': (52, 'Ensemble'),
'Voice Aahs': (53, 'Ensemble'),
'Voice Oohs': (54, 'Ensemble'),
'Synth Voice': (55, 'Ensemble'),
'Orchestra Hit': (56, 'Ensemble'),
'Trumpet': (57, 'Brass'),
'Trombone': (58, 'Brass'),
'Tuba': (59, 'Brass'),
'Muted Trumpet': (60, 'Brass'),
'French horn': (61, 'Brass'),
'Brass Section': (62, 'Brass'),
'Synth Brass 1': (63, 'Brass'),
'Synth Brass 2': (64, 'Brass'),
'Soprano Sax': (65, 'Reed'),
'Alto Sax': (66, 'Reed'),
'Tenor Sax': (67, 'Reed'),
'Baritone Sax': (68, 'Reed'),
'Oboe': (69, 'Reed'),
'English Horn': (70, 'Reed'),
'Bassoon': (71, 'Reed'),
'Clarinet': (72, 'Reed'),
'Piccolo': (73, 'Pipe'),
'Flute': (74, 'Pipe'),
'Recorder': (75, 'Pipe'),
'Pan Flute': (76, 'Pipe'),
'Blown Bottle': (77, 'Pipe'),
'Shakuhachi': (78, 'Pipe'),
'Whistle': (79, 'Pipe'),
'Ocarina': (80, 'Pipe'),
'Lead 1 (square)': (81, 'Synth Lead'),
'Lead 2 (sawtooth)': (82, 'Synth Lead'),
'Lead 3 (calliope)': (83, 'Synth Lead'),
'Lead 4 (chiff)': (84, 'Synth Lead'),
'Lead 5 (charang)': (85, 'Synth Lead'),
'Lead 6 (voice)': (86, 'Synth Lead'),
'Lead 7 (fifths)': (87, 'Synth Lead'),
'Lead 8 (bass + lead)': (88, 'Synth Lead'),
'Pad 1 (Fantasia)': (89, 'Synth Pad'),
'Pad 2 (warm)': (90, 'Synth Pad'),
'Pad 3 (polysynth)': (91, 'Synth Pad'),
'Pad 4 (choir)': (92, 'Synth Pad'),
'Pad 5 (bowed)': (93, 'Synth Pad'),
'Pad 6 (metallic)': (94, 'Synth Pad'),
'Pad 7 (halo)': (95, 'Synth Pad'),
'Pad 8 (sweep)': (96, 'Synth Pad'),
'FX 1 (rain)': (97, 'Synth Effects'),
'FX 2 (soundtrack)': (98, 'Synth Effects'),
'FX 3 (crystal)': (99, 'Synth Effects'),
'FX 4 (atmosphere)': (100, 'Synth Effects'),
'FX 5 (brightness)': (101, 'Synth Effects'),
'FX 6 (goblins)': (102, 'Synth Effects'),
'FX 7 (echoes)': (103, 'Synth Effects'),
'FX 8 (sci-fi)': (104, 'Synth Effects'),
'Sitar': (105, 'Ethnic'),
'Banjo': (106, 'Ethnic'),
'Shamisen': (107, 'Ethnic'),
'Koto': (108, 'Ethnic'),
'Kalimba': (109, 'Ethnic'),
'Bagpipe': (110, 'Ethnic'),
'Fiddle': (111, 'Ethnic'),
'Shanai': (112, 'Ethnic'),
'Tinkle Bell': (113, 'Percussive'),
'Agogo': (114, 'Percussive'),
'Steel Drums': (115, 'Percussive'),
'Woodblock': (116, 'Percussive'),
'Taiko Drum': (117, 'Percussive'),
'Melodic Tom': (118, 'Percussive'),
'Synth Drum': (119, 'Percussive'),
'Reverse Cymbal': (120, 'Percussive'),
'Guitar Fret Noise': (121, 'Sound effects'),
'Breath Noise': (122, 'Sound effects'),
'Seashore': (123, 'Sound effects'),
'Bird Tweet': (124, 'Sound effects'),
'Telephone Ring': (125, 'Sound effects'),
'Helicopter': (126, 'Sound effects'),
'Applause': (127, 'Sound effects'),
'Gunshot': (128, 'Sound effects'),
}
percussion_dict = {
'Bass Drum 2': (35, 'Percussion'),
'Bass Drum 1': (36, 'Percussion'),
'Side Stick': (37, 'Percussion'),
'Snare Drum 1': (38, 'Percussion'),
'Hand Clap': (39, 'Percussion'),
'Snare Drum 2': (40, 'Percussion'),
'Low Tom 2': (41, 'Percussion'),
'Closed Hi-hat': (42, 'Percussion'),
'Low Tom 1': (43, 'Percussion'),
'Pedal Hi-hat': (44, 'Percussion'),
'Mid Tom 2': (45, 'Percussion'),
'Open Hi-hat': (46, 'Percussion'),
'Mid Tom 1': (47, 'Percussion'),
'High Tom 2': (48, 'Percussion'),
'Crash Cymbal 1': (49, 'Percussion'),
'High Tom 1': (50, 'Percussion'),
'Ride Cymbal 1': (51, 'Percussion'),
'Chinese Cymbal': (52, 'Percussion'),
'Ride Bell': (53, 'Percussion'),
'Tambourine': (54, 'Percussion'),
'Splash Cymbal': (55, 'Percussion'),
'Cowbell': (56, 'Percussion'),
'Crash Cymbal 2': (57, 'Percussion'),
'Vibra Slap': (58, 'Percussion'),
'Ride Cymbal 2': (59, 'Percussion'),
'High Bongo': (60, 'Percussion'),
'Low Bongo': (61, 'Percussion'),
'Mute High Conga': (62, 'Percussion'),
'Open High Conga': (63, 'Percussion'),
'Low Conga': (64, 'Percussion'),
'High Timbale': (65, 'Percussion'),
'Low Timbale': (66, 'Percussion'),
'High Agogo': (67, 'Percussion'),
'Low Agogo': (68, 'Percussion'),
'Cabasa': (69, 'Percussion'),
'Maracas': (70, 'Percussion'),
'Short Whistle': (71, 'Percussion'),
'Long Whistle': (72, 'Percussion'),
'Short Guiro': (73, 'Percussion'),
'Long Guiro': (74, 'Percussion'),
'Claves': (75, 'Percussion'),
'High Wood Block': (76, 'Percussion'),
'Low Wood Block': (77, 'Percussion'),
'Mute Cuica': (78, 'Percussion'),
'Open Cuica': (79, 'Percussion'),
'Mute Triangle': (80, 'Percussion'),
'Open Triangle': (81, 'Percussion'),
}
| 37.159341 | 49 | 0.538962 |
melodic_dict = {
'Acoustic Piano': (1, 'Piano'),
'Bright Piano': (2, 'Piano'),
'Electric Grand Piano': (3, 'Piano'),
'Honky-tonk Piano': (4, 'Piano'),
'Electric Piano': (5, 'Piano'),
'Electric Piano 2': (6, 'Piano'),
'Harpsichord': (7, 'Piano'),
'Clavi': (8, 'Piano'),
'Celesta': (9, 'Chromatic Percussion'),
'Glockenspiel': (10, 'Chromatic Percussion'),
'Musical box': (11, 'Chromatic Percussion'),
'Vibraphone': (12, 'Chromatic Percussion'),
'Marimba': (13, 'Chromatic Percussion'),
'Xylophone': (14, 'Chromatic Percussion'),
'Tubular Bell': (15, 'Chromatic Percussion'),
'Dulcimer': (16, 'Chromatic Percussion'),
'Drawbar Organ': (17, 'Organ'),
'Percussive Organ': (18, 'Organ'),
'Rock Organ': (19, 'Organ'),
'Church organ': (20, 'Organ'),
'Reed organ': (21, 'Organ'),
'Accordion': (22, 'Organ'),
'Harmonica': (23, 'Organ'),
'Tango Accordion': (24, 'Organ'),
'Acoustic Guitar (nylon)': (25, 'Guitar'),
'Acoustic Guitar (steel)': (26, 'Guitar'),
'Electric Guitar (jazz)': (27, 'Guitar'),
'Electric Guitar (clean)': (28, 'Guitar'),
'Electric Guitar (muted)': (29, 'Guitar'),
'Overdriven Guitar': (30, 'Guitar'),
'Distortion Guitar': (31, 'Guitar'),
'Guitar harmonics': (32, 'Guitar'),
'Acoustic Bass': (33, 'Bass'),
'Electric Bass (finger)': (34, 'Bass'),
'Electric Bass (pick)': (35, 'Bass'),
'Fretless Bass': (36, 'Bass'),
'Slap Bass 1': (37, 'Bass'),
'Slap Bass 2': (38, 'Bass'),
'Synth Bass 1': (39, 'Bass'),
'Synth Bass 2': (40, 'Bass'),
'Violin': (41, 'Strings'),
'Viola': (42, 'Strings'),
'Cello': (43, 'Strings'),
'Double bass': (44, 'Strings'),
'Tremolo Strings': (45, 'Strings'),
'Pizzicato Strings': (46, 'Strings'),
'Orchestral Harp': (47, 'Strings'),
'Timpani': (48, 'Strings'),
'String Ensemble 1': (49, 'Ensemble'),
'String Ensemble 2': (50, 'Ensemble'),
'Synth Strings 1': (51, 'Ensemble'),
'Synth Strings 2': (52, 'Ensemble'),
'Voice Aahs': (53, 'Ensemble'),
'Voice Oohs': (54, 'Ensemble'),
'Synth Voice': (55, 'Ensemble'),
'Orchestra Hit': (56, 'Ensemble'),
'Trumpet': (57, 'Brass'),
'Trombone': (58, 'Brass'),
'Tuba': (59, 'Brass'),
'Muted Trumpet': (60, 'Brass'),
'French horn': (61, 'Brass'),
'Brass Section': (62, 'Brass'),
'Synth Brass 1': (63, 'Brass'),
'Synth Brass 2': (64, 'Brass'),
'Soprano Sax': (65, 'Reed'),
'Alto Sax': (66, 'Reed'),
'Tenor Sax': (67, 'Reed'),
'Baritone Sax': (68, 'Reed'),
'Oboe': (69, 'Reed'),
'English Horn': (70, 'Reed'),
'Bassoon': (71, 'Reed'),
'Clarinet': (72, 'Reed'),
'Piccolo': (73, 'Pipe'),
'Flute': (74, 'Pipe'),
'Recorder': (75, 'Pipe'),
'Pan Flute': (76, 'Pipe'),
'Blown Bottle': (77, 'Pipe'),
'Shakuhachi': (78, 'Pipe'),
'Whistle': (79, 'Pipe'),
'Ocarina': (80, 'Pipe'),
'Lead 1 (square)': (81, 'Synth Lead'),
'Lead 2 (sawtooth)': (82, 'Synth Lead'),
'Lead 3 (calliope)': (83, 'Synth Lead'),
'Lead 4 (chiff)': (84, 'Synth Lead'),
'Lead 5 (charang)': (85, 'Synth Lead'),
'Lead 6 (voice)': (86, 'Synth Lead'),
'Lead 7 (fifths)': (87, 'Synth Lead'),
'Lead 8 (bass + lead)': (88, 'Synth Lead'),
'Pad 1 (Fantasia)': (89, 'Synth Pad'),
'Pad 2 (warm)': (90, 'Synth Pad'),
'Pad 3 (polysynth)': (91, 'Synth Pad'),
'Pad 4 (choir)': (92, 'Synth Pad'),
'Pad 5 (bowed)': (93, 'Synth Pad'),
'Pad 6 (metallic)': (94, 'Synth Pad'),
'Pad 7 (halo)': (95, 'Synth Pad'),
'Pad 8 (sweep)': (96, 'Synth Pad'),
'FX 1 (rain)': (97, 'Synth Effects'),
'FX 2 (soundtrack)': (98, 'Synth Effects'),
'FX 3 (crystal)': (99, 'Synth Effects'),
'FX 4 (atmosphere)': (100, 'Synth Effects'),
'FX 5 (brightness)': (101, 'Synth Effects'),
'FX 6 (goblins)': (102, 'Synth Effects'),
'FX 7 (echoes)': (103, 'Synth Effects'),
'FX 8 (sci-fi)': (104, 'Synth Effects'),
'Sitar': (105, 'Ethnic'),
'Banjo': (106, 'Ethnic'),
'Shamisen': (107, 'Ethnic'),
'Koto': (108, 'Ethnic'),
'Kalimba': (109, 'Ethnic'),
'Bagpipe': (110, 'Ethnic'),
'Fiddle': (111, 'Ethnic'),
'Shanai': (112, 'Ethnic'),
'Tinkle Bell': (113, 'Percussive'),
'Agogo': (114, 'Percussive'),
'Steel Drums': (115, 'Percussive'),
'Woodblock': (116, 'Percussive'),
'Taiko Drum': (117, 'Percussive'),
'Melodic Tom': (118, 'Percussive'),
'Synth Drum': (119, 'Percussive'),
'Reverse Cymbal': (120, 'Percussive'),
'Guitar Fret Noise': (121, 'Sound effects'),
'Breath Noise': (122, 'Sound effects'),
'Seashore': (123, 'Sound effects'),
'Bird Tweet': (124, 'Sound effects'),
'Telephone Ring': (125, 'Sound effects'),
'Helicopter': (126, 'Sound effects'),
'Applause': (127, 'Sound effects'),
'Gunshot': (128, 'Sound effects'),
}
percussion_dict = {
'Bass Drum 2': (35, 'Percussion'),
'Bass Drum 1': (36, 'Percussion'),
'Side Stick': (37, 'Percussion'),
'Snare Drum 1': (38, 'Percussion'),
'Hand Clap': (39, 'Percussion'),
'Snare Drum 2': (40, 'Percussion'),
'Low Tom 2': (41, 'Percussion'),
'Closed Hi-hat': (42, 'Percussion'),
'Low Tom 1': (43, 'Percussion'),
'Pedal Hi-hat': (44, 'Percussion'),
'Mid Tom 2': (45, 'Percussion'),
'Open Hi-hat': (46, 'Percussion'),
'Mid Tom 1': (47, 'Percussion'),
'High Tom 2': (48, 'Percussion'),
'Crash Cymbal 1': (49, 'Percussion'),
'High Tom 1': (50, 'Percussion'),
'Ride Cymbal 1': (51, 'Percussion'),
'Chinese Cymbal': (52, 'Percussion'),
'Ride Bell': (53, 'Percussion'),
'Tambourine': (54, 'Percussion'),
'Splash Cymbal': (55, 'Percussion'),
'Cowbell': (56, 'Percussion'),
'Crash Cymbal 2': (57, 'Percussion'),
'Vibra Slap': (58, 'Percussion'),
'Ride Cymbal 2': (59, 'Percussion'),
'High Bongo': (60, 'Percussion'),
'Low Bongo': (61, 'Percussion'),
'Mute High Conga': (62, 'Percussion'),
'Open High Conga': (63, 'Percussion'),
'Low Conga': (64, 'Percussion'),
'High Timbale': (65, 'Percussion'),
'Low Timbale': (66, 'Percussion'),
'High Agogo': (67, 'Percussion'),
'Low Agogo': (68, 'Percussion'),
'Cabasa': (69, 'Percussion'),
'Maracas': (70, 'Percussion'),
'Short Whistle': (71, 'Percussion'),
'Long Whistle': (72, 'Percussion'),
'Short Guiro': (73, 'Percussion'),
'Long Guiro': (74, 'Percussion'),
'Claves': (75, 'Percussion'),
'High Wood Block': (76, 'Percussion'),
'Low Wood Block': (77, 'Percussion'),
'Mute Cuica': (78, 'Percussion'),
'Open Cuica': (79, 'Percussion'),
'Mute Triangle': (80, 'Percussion'),
'Open Triangle': (81, 'Percussion'),
}
| true | true |
f7fa7dbdac4d1c703c06e35885863d083c8b03c2 | 12,341 | py | Python | sdk/python/pulumi_azure_native/network/v20150504preview/record_set.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20150504preview/record_set.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20150504preview/record_set.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['RecordSet']
class RecordSet(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
a_aaa_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AaaaRecordArgs']]]]] = None,
a_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ARecordArgs']]]]] = None,
c_name_record: Optional[pulumi.Input[pulumi.InputType['CnameRecordArgs']]] = None,
etag: Optional[pulumi.Input[str]] = None,
m_x_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MxRecordArgs']]]]] = None,
n_s_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NsRecordArgs']]]]] = None,
p_tr_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PtrRecordArgs']]]]] = None,
record_type: Optional[pulumi.Input[str]] = None,
relative_record_set_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
s_oa_record: Optional[pulumi.Input[pulumi.InputType['SoaRecordArgs']]] = None,
s_rv_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SrvRecordArgs']]]]] = None,
t_xt_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TxtRecordArgs']]]]] = None,
ttl: Optional[pulumi.Input[float]] = None,
zone_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Describes a DNS record set (a collection of DNS records with the same name and type).
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AaaaRecordArgs']]]] a_aaa_records: Gets or sets the list of AAAA records in the RecordSet.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ARecordArgs']]]] a_records: Gets or sets the list of A records in the RecordSet.
:param pulumi.Input[pulumi.InputType['CnameRecordArgs']] c_name_record: Gets or sets the CNAME record in the RecordSet.
:param pulumi.Input[str] etag: The etag of the record set.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MxRecordArgs']]]] m_x_records: Gets or sets the list of MX records in the RecordSet.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NsRecordArgs']]]] n_s_records: Gets or sets the list of NS records in the RecordSet.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PtrRecordArgs']]]] p_tr_records: Gets or sets the list of PTR records in the RecordSet.
:param pulumi.Input[str] record_type: The type of DNS record.
:param pulumi.Input[str] relative_record_set_name: The name of the RecordSet, relative to the name of the zone.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[pulumi.InputType['SoaRecordArgs']] s_oa_record: Gets or sets the SOA record in the RecordSet.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SrvRecordArgs']]]] s_rv_records: Gets or sets the list of SRV records in the RecordSet.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TxtRecordArgs']]]] t_xt_records: Gets or sets the list of TXT records in the RecordSet.
:param pulumi.Input[float] ttl: Gets or sets the TTL of the records in the RecordSet.
:param pulumi.Input[str] zone_name: The name of the zone without a terminating dot.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['a_aaa_records'] = a_aaa_records
__props__['a_records'] = a_records
__props__['c_name_record'] = c_name_record
__props__['etag'] = etag
__props__['m_x_records'] = m_x_records
__props__['n_s_records'] = n_s_records
__props__['p_tr_records'] = p_tr_records
if record_type is None and not opts.urn:
raise TypeError("Missing required property 'record_type'")
__props__['record_type'] = record_type
__props__['relative_record_set_name'] = relative_record_set_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['s_oa_record'] = s_oa_record
__props__['s_rv_records'] = s_rv_records
__props__['t_xt_records'] = t_xt_records
__props__['ttl'] = ttl
if zone_name is None and not opts.urn:
raise TypeError("Missing required property 'zone_name'")
__props__['zone_name'] = zone_name
__props__['fqdn'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20150504preview:RecordSet"), pulumi.Alias(type_="azure-native:network/v20160401:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20160401:RecordSet"), pulumi.Alias(type_="azure-native:network/v20170901:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20170901:RecordSet"), pulumi.Alias(type_="azure-native:network/v20171001:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20171001:RecordSet"), pulumi.Alias(type_="azure-native:network/v20180301preview:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20180301preview:RecordSet"), pulumi.Alias(type_="azure-native:network/v20180501:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20180501:RecordSet")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(RecordSet, __self__).__init__(
'azure-native:network/v20150504preview:RecordSet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'RecordSet':
"""
Get an existing RecordSet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["a_aaa_records"] = None
__props__["a_records"] = None
__props__["c_name_record"] = None
__props__["etag"] = None
__props__["fqdn"] = None
__props__["m_x_records"] = None
__props__["n_s_records"] = None
__props__["name"] = None
__props__["p_tr_records"] = None
__props__["s_oa_record"] = None
__props__["s_rv_records"] = None
__props__["t_xt_records"] = None
__props__["ttl"] = None
__props__["type"] = None
return RecordSet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aAAARecords")
def a_aaa_records(self) -> pulumi.Output[Optional[Sequence['outputs.AaaaRecordResponse']]]:
"""
Gets or sets the list of AAAA records in the RecordSet.
"""
return pulumi.get(self, "a_aaa_records")
@property
@pulumi.getter(name="aRecords")
def a_records(self) -> pulumi.Output[Optional[Sequence['outputs.ARecordResponse']]]:
"""
Gets or sets the list of A records in the RecordSet.
"""
return pulumi.get(self, "a_records")
@property
@pulumi.getter(name="cNAMERecord")
def c_name_record(self) -> pulumi.Output[Optional['outputs.CnameRecordResponse']]:
"""
Gets or sets the CNAME record in the RecordSet.
"""
return pulumi.get(self, "c_name_record")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
The etag of the record set.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def fqdn(self) -> pulumi.Output[str]:
"""
Fully qualified domain name of the record set.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="mXRecords")
def m_x_records(self) -> pulumi.Output[Optional[Sequence['outputs.MxRecordResponse']]]:
"""
Gets or sets the list of MX records in the RecordSet.
"""
return pulumi.get(self, "m_x_records")
@property
@pulumi.getter(name="nSRecords")
def n_s_records(self) -> pulumi.Output[Optional[Sequence['outputs.NsRecordResponse']]]:
"""
Gets or sets the list of NS records in the RecordSet.
"""
return pulumi.get(self, "n_s_records")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the record set.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pTRRecords")
def p_tr_records(self) -> pulumi.Output[Optional[Sequence['outputs.PtrRecordResponse']]]:
"""
Gets or sets the list of PTR records in the RecordSet.
"""
return pulumi.get(self, "p_tr_records")
@property
@pulumi.getter(name="sOARecord")
def s_oa_record(self) -> pulumi.Output[Optional['outputs.SoaRecordResponse']]:
"""
Gets or sets the SOA record in the RecordSet.
"""
return pulumi.get(self, "s_oa_record")
@property
@pulumi.getter(name="sRVRecords")
def s_rv_records(self) -> pulumi.Output[Optional[Sequence['outputs.SrvRecordResponse']]]:
"""
Gets or sets the list of SRV records in the RecordSet.
"""
return pulumi.get(self, "s_rv_records")
@property
@pulumi.getter(name="tXTRecords")
def t_xt_records(self) -> pulumi.Output[Optional[Sequence['outputs.TxtRecordResponse']]]:
"""
Gets or sets the list of TXT records in the RecordSet.
"""
return pulumi.get(self, "t_xt_records")
@property
@pulumi.getter
def ttl(self) -> pulumi.Output[Optional[float]]:
"""
Gets or sets the TTL of the records in the RecordSet.
"""
return pulumi.get(self, "ttl")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the record set.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 47.833333 | 784 | 0.653837 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['RecordSet']
class RecordSet(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
a_aaa_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AaaaRecordArgs']]]]] = None,
a_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ARecordArgs']]]]] = None,
c_name_record: Optional[pulumi.Input[pulumi.InputType['CnameRecordArgs']]] = None,
etag: Optional[pulumi.Input[str]] = None,
m_x_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MxRecordArgs']]]]] = None,
n_s_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NsRecordArgs']]]]] = None,
p_tr_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PtrRecordArgs']]]]] = None,
record_type: Optional[pulumi.Input[str]] = None,
relative_record_set_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
s_oa_record: Optional[pulumi.Input[pulumi.InputType['SoaRecordArgs']]] = None,
s_rv_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SrvRecordArgs']]]]] = None,
t_xt_records: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TxtRecordArgs']]]]] = None,
ttl: Optional[pulumi.Input[float]] = None,
zone_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['a_aaa_records'] = a_aaa_records
__props__['a_records'] = a_records
__props__['c_name_record'] = c_name_record
__props__['etag'] = etag
__props__['m_x_records'] = m_x_records
__props__['n_s_records'] = n_s_records
__props__['p_tr_records'] = p_tr_records
if record_type is None and not opts.urn:
raise TypeError("Missing required property 'record_type'")
__props__['record_type'] = record_type
__props__['relative_record_set_name'] = relative_record_set_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['s_oa_record'] = s_oa_record
__props__['s_rv_records'] = s_rv_records
__props__['t_xt_records'] = t_xt_records
__props__['ttl'] = ttl
if zone_name is None and not opts.urn:
raise TypeError("Missing required property 'zone_name'")
__props__['zone_name'] = zone_name
__props__['fqdn'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20150504preview:RecordSet"), pulumi.Alias(type_="azure-native:network/v20160401:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20160401:RecordSet"), pulumi.Alias(type_="azure-native:network/v20170901:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20170901:RecordSet"), pulumi.Alias(type_="azure-native:network/v20171001:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20171001:RecordSet"), pulumi.Alias(type_="azure-native:network/v20180301preview:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20180301preview:RecordSet"), pulumi.Alias(type_="azure-native:network/v20180501:RecordSet"), pulumi.Alias(type_="azure-nextgen:network/v20180501:RecordSet")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(RecordSet, __self__).__init__(
'azure-native:network/v20150504preview:RecordSet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'RecordSet':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["a_aaa_records"] = None
__props__["a_records"] = None
__props__["c_name_record"] = None
__props__["etag"] = None
__props__["fqdn"] = None
__props__["m_x_records"] = None
__props__["n_s_records"] = None
__props__["name"] = None
__props__["p_tr_records"] = None
__props__["s_oa_record"] = None
__props__["s_rv_records"] = None
__props__["t_xt_records"] = None
__props__["ttl"] = None
__props__["type"] = None
return RecordSet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aAAARecords")
def a_aaa_records(self) -> pulumi.Output[Optional[Sequence['outputs.AaaaRecordResponse']]]:
return pulumi.get(self, "a_aaa_records")
@property
@pulumi.getter(name="aRecords")
def a_records(self) -> pulumi.Output[Optional[Sequence['outputs.ARecordResponse']]]:
return pulumi.get(self, "a_records")
@property
@pulumi.getter(name="cNAMERecord")
def c_name_record(self) -> pulumi.Output[Optional['outputs.CnameRecordResponse']]:
return pulumi.get(self, "c_name_record")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "etag")
@property
@pulumi.getter
def fqdn(self) -> pulumi.Output[str]:
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="mXRecords")
def m_x_records(self) -> pulumi.Output[Optional[Sequence['outputs.MxRecordResponse']]]:
return pulumi.get(self, "m_x_records")
@property
@pulumi.getter(name="nSRecords")
def n_s_records(self) -> pulumi.Output[Optional[Sequence['outputs.NsRecordResponse']]]:
return pulumi.get(self, "n_s_records")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pTRRecords")
def p_tr_records(self) -> pulumi.Output[Optional[Sequence['outputs.PtrRecordResponse']]]:
return pulumi.get(self, "p_tr_records")
@property
@pulumi.getter(name="sOARecord")
def s_oa_record(self) -> pulumi.Output[Optional['outputs.SoaRecordResponse']]:
return pulumi.get(self, "s_oa_record")
@property
@pulumi.getter(name="sRVRecords")
def s_rv_records(self) -> pulumi.Output[Optional[Sequence['outputs.SrvRecordResponse']]]:
return pulumi.get(self, "s_rv_records")
@property
@pulumi.getter(name="tXTRecords")
def t_xt_records(self) -> pulumi.Output[Optional[Sequence['outputs.TxtRecordResponse']]]:
return pulumi.get(self, "t_xt_records")
@property
@pulumi.getter
def ttl(self) -> pulumi.Output[Optional[float]]:
return pulumi.get(self, "ttl")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f7fa7eeeef313acbe7bc2e241e47650887daf947 | 22,279 | py | Python | test/functional/importmulti.py | fujicoin/fujicoin-3.0-v0.15.3 | aa55bf9ac60f8261e38758884eb47aa78e5c53ac | [
"MIT"
] | 1 | 2018-11-12T22:40:44.000Z | 2018-11-12T22:40:44.000Z | test/functional/importmulti.py | fujicoin/fujicoin-3.0-v0.15.3 | aa55bf9ac60f8261e38758884eb47aa78e5c53ac | [
"MIT"
] | null | null | null | test/functional/importmulti.py | fujicoin/fujicoin-3.0-v0.15.3 | aa55bf9ac60f8261e38758884eb47aa78e5c53ac | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Fujicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC."""
from test_framework.test_framework import FujicoinTestFramework
from test_framework.util import *
class ImportMultiTest (FujicoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.setup_nodes()
def run_test (self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
# keyword definition
PRIV_KEY = 'privkey'
PUB_KEY = 'pubkey'
ADDRESS_KEY = 'address'
SCRIPT_KEY = 'script'
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
#Check only one address
assert_equal(node0_address1['ismine'], True)
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),1)
#Address Test - before import
address_info = self.nodes[1].validateaddress(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# Fujicoin Address
self.log.info("Should import an address")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_address = address['address']
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": "not valid address",
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Invalid address')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + !internal
self.log.info("Should not import a scriptPubKey without internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Public key + !Internal
self.log.info("Should import an address with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + !internal
self.log.info("Should not import a scriptPubKey without internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import an address with private key if is already imported")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -4)
assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
# Address + Private key + watchonly
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Private key + !internal
self.log.info("Should not import a scriptPubKey without internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# P2SH address
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
# Address + Public key + !Internal + Wrong pubkey
self.log.info("Should not import an address with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly + Wrong private key
self.log.info("Should not import an address with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Importing existing watch only address with new timestamp should replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": watchonly_address,
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.start_nodes()
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
}])
if __name__ == '__main__':
ImportMultiTest ().main ()
| 48.015086 | 137 | 0.631222 |
from test_framework.test_framework import FujicoinTestFramework
from test_framework.util import *
class ImportMultiTest (FujicoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.setup_nodes()
def run_test (self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
PRIV_KEY = 'privkey'
PUB_KEY = 'pubkey'
ADDRESS_KEY = 'address'
SCRIPT_KEY = 'script'
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
assert_equal(node0_address1['ismine'], True)
assert_equal(self.nodes[1].getblockcount(),1)
address_info = self.nodes[1].validateaddress(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
self.log.info("Should import an address")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_address = address['address']
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": "not valid address",
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Invalid address')
self.log.info("Should import a scriptPubKey with internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import an address with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should import a scriptPubKey with internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import an address with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import an address with private key if is already imported")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -4)
assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import a scriptPubKey with internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
self.log.info("Should not import an address with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import an address with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": watchonly_address,
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_timestamp = timestamp
self.stop_nodes()
self.start_nodes()
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
}])
if __name__ == '__main__':
ImportMultiTest ().main ()
| true | true |
f7fa7f59103631d4a9e17477ebb50b9beb6b2037 | 658 | py | Python | model/test.py | delcacho/DataSciencePlatform | c19ac4c1aba54bafc0fed05cc534bb447ab3b631 | [
"BSD-3-Clause"
] | null | null | null | model/test.py | delcacho/DataSciencePlatform | c19ac4c1aba54bafc0fed05cc534bb447ab3b631 | [
"BSD-3-Clause"
] | null | null | null | model/test.py | delcacho/DataSciencePlatform | c19ac4c1aba54bafc0fed05cc534bb447ab3b631 | [
"BSD-3-Clause"
] | null | null | null | import warnings
warnings.filterwarnings("ignore")
from seldon_core.seldon_client import SeldonClient
import pandas as pd
data = pd.read_csv("wine-quality.csv")
data.head()
x_0 = data.drop(["quality"], axis=1).values[:1]
batch = x_0
sc = SeldonClient( \
deployment_name="mlflow-ab-test",
namespace = "default",
gateway_endpoint="api.bayescluster.com",
transport="rest",
debug = True \
)
colnames = data.columns.tolist()
colnames.remove("quality")
for i in range(1,2000):
r = sc.predict(data=batch, names=colnames, payload_type = "ndarray")
print(r)
assert(r.success==True)
#r = sc.explain(data=batch, predictor="quality")
#print(r)
| 20.5625 | 70 | 0.712766 | import warnings
warnings.filterwarnings("ignore")
from seldon_core.seldon_client import SeldonClient
import pandas as pd
data = pd.read_csv("wine-quality.csv")
data.head()
x_0 = data.drop(["quality"], axis=1).values[:1]
batch = x_0
sc = SeldonClient( \
deployment_name="mlflow-ab-test",
namespace = "default",
gateway_endpoint="api.bayescluster.com",
transport="rest",
debug = True \
)
colnames = data.columns.tolist()
colnames.remove("quality")
for i in range(1,2000):
r = sc.predict(data=batch, names=colnames, payload_type = "ndarray")
print(r)
assert(r.success==True)
| true | true |
f7fa7fa72d4d174527f7326fc3c3ad41e9c5f946 | 7,064 | py | Python | example/settings.py | guitarparty/django-socialregistration | d1d34890c9f72a191723d0192398791f71ee6956 | [
"MIT"
] | 1 | 2016-03-24T05:25:44.000Z | 2016-03-24T05:25:44.000Z | example/settings.py | assiotis/django-socialregistration | b9ac4ce0a8251bd915022816355e6f02e1cab291 | [
"MIT"
] | null | null | null | example/settings.py | assiotis/django-socialregistration | b9ac4ce0a8251bd915022816355e6f02e1cab291 | [
"MIT"
] | null | null | null | # Django settings for example project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '%s/db.sqlite' % os.path.dirname(__file__), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = '%s/media/' % os.path.dirname(__file__)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '%s/static/' % os.path.dirname(__file__)
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^@qj2z043lalviucgjjf7sl440^@)u7b^q^h^ik(t@-exze96h'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'%s/templates' % os.path.dirname(__file__)
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'socialregistration',
'socialregistration.contrib.openid',
'socialregistration.contrib.twitter',
'socialregistration.contrib.linkedin',
'socialregistration.contrib.github',
'socialregistration.contrib.facebook',
'socialregistration.contrib.foursquare',
'socialregistration.contrib.tumblr',
'example.app',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Socialregistration specific settings - including extended settings
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'socialregistration.contrib.openid.auth.OpenIDAuth',
'socialregistration.contrib.twitter.auth.TwitterAuth',
'socialregistration.contrib.linkedin.auth.LinkedInAuth',
'socialregistration.contrib.github.auth.GithubAuth',
'socialregistration.contrib.facebook.auth.FacebookAuth',
'socialregistration.contrib.foursquare.auth.FoursquareAuth',
'socialregistration.contrib.tumblr.auth.TumblrAuth',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'socialregistration.contrib.facebook.middleware.FacebookMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
)
# Add your Facebook API keys here
FACEBOOK_APP_ID = ''
FACEBOOK_SECRET_KEY = ''
FACEBOOK_REQUEST_PERMISSIONS = ''
# Add your Twitter API keys here
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET_KEY = ''
# Add your LinkedIn API keys here
LINKEDIN_CONSUMER_KEY = ''
LINKEDIN_CONSUMER_SECRET_KEY = ''
# Add your Github API keys here
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
GITHUB_REQUEST_PERMISSIONS = ''
# Add your Foursquare API keys here
FOURSQUARE_CLIENT_ID = ''
FOURSQUARE_CLIENT_SECRET = ''
FOURSQUARE_REQUEST_PERMISSIONS = ''
# Add your tumblr API keys here
TUMBLR_CONSUMER_KEY = ''
TUMBLR_CONSUMER_SECRET_KEY = ''
SOCIALREGISTRATION_USE_HTTPS = False
SOCIALREGISTRATION_GENERATE_USERNAME = False
LOGIN_REDIRECT_URL = '/'
| 33.164319 | 122 | 0.727775 |
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '%s/db.sqlite' % os.path.dirname(__file__),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
TIME_ZONE = 'Europe/London'
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = '%s/media/' % os.path.dirname(__file__)
MEDIA_URL = '/media/'
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
STATIC_ROOT = '%s/static/' % os.path.dirname(__file__)
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = (
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^@qj2z043lalviucgjjf7sl440^@)u7b^q^h^ik(t@-exze96h'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = (
'%s/templates' % os.path.dirname(__file__)
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'socialregistration',
'socialregistration.contrib.openid',
'socialregistration.contrib.twitter',
'socialregistration.contrib.linkedin',
'socialregistration.contrib.github',
'socialregistration.contrib.facebook',
'socialregistration.contrib.foursquare',
'socialregistration.contrib.tumblr',
'example.app',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Socialregistration specific settings - including extended settings
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'socialregistration.contrib.openid.auth.OpenIDAuth',
'socialregistration.contrib.twitter.auth.TwitterAuth',
'socialregistration.contrib.linkedin.auth.LinkedInAuth',
'socialregistration.contrib.github.auth.GithubAuth',
'socialregistration.contrib.facebook.auth.FacebookAuth',
'socialregistration.contrib.foursquare.auth.FoursquareAuth',
'socialregistration.contrib.tumblr.auth.TumblrAuth',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'socialregistration.contrib.facebook.middleware.FacebookMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
)
# Add your Facebook API keys here
FACEBOOK_APP_ID = ''
FACEBOOK_SECRET_KEY = ''
FACEBOOK_REQUEST_PERMISSIONS = ''
# Add your Twitter API keys here
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET_KEY = ''
# Add your LinkedIn API keys here
LINKEDIN_CONSUMER_KEY = ''
LINKEDIN_CONSUMER_SECRET_KEY = ''
# Add your Github API keys here
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
GITHUB_REQUEST_PERMISSIONS = ''
# Add your Foursquare API keys here
FOURSQUARE_CLIENT_ID = ''
FOURSQUARE_CLIENT_SECRET = ''
FOURSQUARE_REQUEST_PERMISSIONS = ''
# Add your tumblr API keys here
TUMBLR_CONSUMER_KEY = ''
TUMBLR_CONSUMER_SECRET_KEY = ''
SOCIALREGISTRATION_USE_HTTPS = False
SOCIALREGISTRATION_GENERATE_USERNAME = False
LOGIN_REDIRECT_URL = '/'
| true | true |
f7fa7fc9654e1fb96950ec82379c7a484155a4d0 | 77 | py | Python | savecode/threeyears/idownclient/scout/plugin/searchengine/bing/__init__.py | Octoberr/swm0920 | 8f05a6b91fc205960edd57f9076facec04f49a1a | [
"Apache-2.0"
] | 2 | 2019-05-19T11:54:26.000Z | 2019-05-19T12:03:49.000Z | savecode/threeyears/idownclient/scout/plugin/searchengine/bing/__init__.py | Octoberr/swm0920 | 8f05a6b91fc205960edd57f9076facec04f49a1a | [
"Apache-2.0"
] | 1 | 2020-11-27T07:55:15.000Z | 2020-11-27T07:55:15.000Z | savecode/threeyears/idownclient/scout/plugin/searchengine/bing/__init__.py | Octoberr/swm0920 | 8f05a6b91fc205960edd57f9076facec04f49a1a | [
"Apache-2.0"
] | 2 | 2021-09-06T18:06:12.000Z | 2021-12-31T07:44:43.000Z | from .bingsearchengine import BingSearchEngine
from .azbing import AzBingApi
| 25.666667 | 46 | 0.87013 | from .bingsearchengine import BingSearchEngine
from .azbing import AzBingApi
| true | true |
f7fa80365ba2f98a6c13f16142393a88055f4f49 | 2,556 | py | Python | runtime/python/Lib/site-packages/astroid/brain/brain_pytest.py | hwaipy/InteractionFreeNode | 88642b68430f57b028fd0f276a5709f89279e30d | [
"MIT"
] | 1 | 2021-12-30T19:05:22.000Z | 2021-12-30T19:05:22.000Z | runtime/python/Lib/site-packages/astroid/brain/brain_pytest.py | hwaipy/InteractionFreeNode | 88642b68430f57b028fd0f276a5709f89279e30d | [
"MIT"
] | 1 | 2021-08-24T05:13:20.000Z | 2021-08-24T05:13:20.000Z | runtime/python/Lib/site-packages/astroid/brain/brain_pytest.py | hwaipy/InteractionFreeNode | 88642b68430f57b028fd0f276a5709f89279e30d | [
"MIT"
] | 3 | 2021-08-28T14:22:36.000Z | 2021-10-06T18:59:41.000Z | # Copyright (c) 2014-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Jeff Quast <contact@jeffquast.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
"""Astroid hooks for pytest."""
from astroid.brain.helpers import register_module_extender
from astroid.builder import AstroidBuilder
from astroid.manager import AstroidManager
def pytest_transform():
return AstroidBuilder(AstroidManager()).string_build(
"""
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
import _pytest.skipping
import _pytest.assertion
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
warns = _pytest.recwarn.warns
exit = _pytest.runner.exit
fail = _pytest.runner.fail
skip = _pytest.runner.skip
importorskip = _pytest.runner.importorskip
xfail = _pytest.skipping.xfail
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
# New in pytest 3.0
try:
approx = _pytest.python.approx
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
except AttributeError:
pass
# Moved in pytest 3.0
try:
import _pytest.freeze_support
freeze_includes = _pytest.freeze_support.freeze_includes
except ImportError:
try:
import _pytest.genscript
freeze_includes = _pytest.genscript.freeze_includes
except ImportError:
pass
try:
import _pytest.debugging
set_trace = _pytest.debugging.pytestPDB().set_trace
except ImportError:
try:
import _pytest.pdb
set_trace = _pytest.pdb.pytestPDB().set_trace
except ImportError:
pass
try:
import _pytest.fixtures
fixture = _pytest.fixtures.fixture
yield_fixture = _pytest.fixtures.yield_fixture
except ImportError:
try:
import _pytest.python
fixture = _pytest.python.fixture
yield_fixture = _pytest.python.yield_fixture
except ImportError:
pass
"""
)
register_module_extender(AstroidManager(), "pytest", pytest_transform)
register_module_extender(AstroidManager(), "py.test", pytest_transform)
| 28.087912 | 85 | 0.728873 |
from astroid.brain.helpers import register_module_extender
from astroid.builder import AstroidBuilder
from astroid.manager import AstroidManager
def pytest_transform():
return AstroidBuilder(AstroidManager()).string_build(
"""
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
import _pytest.skipping
import _pytest.assertion
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
warns = _pytest.recwarn.warns
exit = _pytest.runner.exit
fail = _pytest.runner.fail
skip = _pytest.runner.skip
importorskip = _pytest.runner.importorskip
xfail = _pytest.skipping.xfail
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
# New in pytest 3.0
try:
approx = _pytest.python.approx
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
except AttributeError:
pass
# Moved in pytest 3.0
try:
import _pytest.freeze_support
freeze_includes = _pytest.freeze_support.freeze_includes
except ImportError:
try:
import _pytest.genscript
freeze_includes = _pytest.genscript.freeze_includes
except ImportError:
pass
try:
import _pytest.debugging
set_trace = _pytest.debugging.pytestPDB().set_trace
except ImportError:
try:
import _pytest.pdb
set_trace = _pytest.pdb.pytestPDB().set_trace
except ImportError:
pass
try:
import _pytest.fixtures
fixture = _pytest.fixtures.fixture
yield_fixture = _pytest.fixtures.yield_fixture
except ImportError:
try:
import _pytest.python
fixture = _pytest.python.fixture
yield_fixture = _pytest.python.yield_fixture
except ImportError:
pass
"""
)
register_module_extender(AstroidManager(), "pytest", pytest_transform)
register_module_extender(AstroidManager(), "py.test", pytest_transform)
| true | true |
f7fa80421d318424c9a3043896cb53eafbe9fe51 | 1,449 | py | Python | notebooks/Applications/dambreak2D_adapt/modelmesh.py | burgreen/proteus | 033bbd3fd0ff11d53d8e85b2da1af49e10af9c5d | [
"MIT"
] | null | null | null | notebooks/Applications/dambreak2D_adapt/modelmesh.py | burgreen/proteus | 033bbd3fd0ff11d53d8e85b2da1af49e10af9c5d | [
"MIT"
] | null | null | null | notebooks/Applications/dambreak2D_adapt/modelmesh.py | burgreen/proteus | 033bbd3fd0ff11d53d8e85b2da1af49e10af9c5d | [
"MIT"
] | null | null | null | from proteus import Domain
import proteus.MeshTools
from proteus.MeshAdaptPUMI import MeshAdaptPUMI
nd = 2 #number of dimensions in the problem
parallelPartitioningType = proteus.MeshTools.MeshParallelPartitioningTypes.element #type of partitioning if parallel
nLayersOfOverlapForParallel = 0 #amount of ghosting if parallel
boundaries=['left','right','bottom','top'] #boundary tag dictionary
boundaryTags=dict([(key,i+1) for (i,key) in enumerate(boundaries)])
domain = Domain.PUMIDomain(dim=nd) #initialize the domain
domain.faceList=[[11],[13],[14],[12]] #model entities associated wtih boundary tags
adaptMesh = True #adapt the mesh?
adaptMesh_nSteps = 5 #amount of time steps before checking error?
hMax = 0.08
hMin = 0.00625
adaptMesh_numIter = 2 #number of iterations for mesh adaptation
errorType="ERM" #only just ERM at the moment
logSwitch="off" #on or off
target_error = 10.0
target_element_count = 8000
domain.PUMIMesh=MeshAdaptPUMI.MeshAdaptPUMI(hmax=hMax,
hmin=hMin,
numIter=adaptMesh_numIter,
sfConfig=errorType,
logType=logSwitch,
targetError=target_error,
targetElementCount=target_element_count)
domain.PUMIMesh.loadModelAndMesh("Dambreak.null","Dambreak.msh") | 45.28125 | 116 | 0.656315 | from proteus import Domain
import proteus.MeshTools
from proteus.MeshAdaptPUMI import MeshAdaptPUMI
nd = 2
parallelPartitioningType = proteus.MeshTools.MeshParallelPartitioningTypes.element
nLayersOfOverlapForParallel = 0
boundaries=['left','right','bottom','top']
boundaryTags=dict([(key,i+1) for (i,key) in enumerate(boundaries)])
domain = Domain.PUMIDomain(dim=nd)
domain.faceList=[[11],[13],[14],[12]]
adaptMesh = True
adaptMesh_nSteps = 5
hMax = 0.08
hMin = 0.00625
adaptMesh_numIter = 2
errorType="ERM"
logSwitch="off"
target_error = 10.0
target_element_count = 8000
domain.PUMIMesh=MeshAdaptPUMI.MeshAdaptPUMI(hmax=hMax,
hmin=hMin,
numIter=adaptMesh_numIter,
sfConfig=errorType,
logType=logSwitch,
targetError=target_error,
targetElementCount=target_element_count)
domain.PUMIMesh.loadModelAndMesh("Dambreak.null","Dambreak.msh") | true | true |
f7fa80b2109da284409cf8ee0ac84e9f3572b178 | 14,131 | py | Python | web/tests/functional/report_viewer_api/test_get_run_results.py | ryankurte/codechecker | 737424ee77c181304f242d5a2adef3e6d9369998 | [
"Apache-2.0"
] | null | null | null | web/tests/functional/report_viewer_api/test_get_run_results.py | ryankurte/codechecker | 737424ee77c181304f242d5a2adef3e6d9369998 | [
"Apache-2.0"
] | null | null | null | web/tests/functional/report_viewer_api/test_get_run_results.py | ryankurte/codechecker | 737424ee77c181304f242d5a2adef3e6d9369998 | [
"Apache-2.0"
] | 1 | 2021-01-27T21:45:14.000Z | 2021-01-27T21:45:14.000Z | #
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
"""
Tests for getting the run results.
"""
import logging
import os
import re
import unittest
import codecs
from codechecker_api.codeCheckerDBAccess_v6.ttypes import Encoding, Order, \
ReportFilter, SortMode, SortType, RunSortMode, RunSortType
from codechecker_web.shared import convert
from libtest.debug_printer import print_run_results
from libtest.thrift_client_to_db import get_all_run_results
from libtest.result_compare import find_all
from libtest import env
class RunResults(unittest.TestCase):
_ccClient = None
def setUp(self):
test_workspace = os.environ['TEST_WORKSPACE']
test_class = self.__class__.__name__
print('Running ' + test_class + ' tests in ' + test_workspace)
# Get the clang version which is tested.
self._clang_to_test = env.clang_to_test()
self._testproject_data = env.setup_test_proj_cfg(test_workspace)
self.assertIsNotNone(self._testproject_data)
self._cc_client = env.setup_viewer_client(test_workspace)
self.assertIsNotNone(self._cc_client)
# Get the run names which belong to this test.
run_names = env.get_run_names(test_workspace)
sort_mode = RunSortMode(RunSortType.DATE, Order.ASC)
runs = self._cc_client.getRunData(None, None, 0, sort_mode)
test_runs = [run for run in runs if run.name in run_names]
self._runid = test_runs[0].runId
def __check_bug_path_order(self, run_results, order):
"""
Checks the bug path length order of the run results.
:param run_results: Run results.
:param order: If it is a negative value, it checks that bug path length
of the results order is descending otherwise ascending.
"""
prev = None
for res in run_results:
self.assertGreater(res.bugPathLength, 0)
if not prev:
prev = res
continue
if order == Order.ASC:
self.assertGreaterEqual(res.bugPathLength, prev.bugPathLength)
else:
self.assertLessEqual(res.bugPathLength, prev.bugPathLength)
def test_get_run_results_no_filter(self):
""" Get all the run results without any filtering. """
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client, runid)
print_run_results(run_results)
self.assertIsNotNone(run_results)
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_checker_id_and_file_path(self):
""" Test if all the bugs are found based
on the test project configuration. """
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client, runid)
self.assertIsNotNone(run_results)
self.assertEqual(run_result_count, len(run_results))
test_project_results = self._testproject_data[
self._clang_to_test]['bugs']
for r in test_project_results:
print(r)
not_found = find_all(run_results, test_project_results)
print_run_results(run_results)
if not_found:
print("===================")
print('Not found bugs:')
for bug in not_found:
print(bug)
print("===================")
self.assertEqual(len(not_found), 0)
def test_get_source_file_content(self):
"""
Test getting the source file content stored to the database.
Test unicode support the stored file can be decoded properly
compare results form the database to the original file.
"""
runid = self._runid
report_filter = ReportFilter(checkerName=['*'], filepath=['*.c*'])
run_result_count = self._cc_client.getRunResultCount([runid],
report_filter,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
[],
report_filter)
self.assertIsNotNone(run_results)
for run_res in run_results:
self.assertTrue(re.match(r'.*\.c(pp)?$', run_res.checkedFile))
print('Getting the content of ' + run_res.checkedFile)
file_data = self._cc_client.getSourceFileData(run_res.fileId,
True,
None)
self.assertIsNotNone(file_data)
file_content1 = file_data.fileContent
self.assertIsNotNone(file_content1)
with codecs.open(run_res.checkedFile, 'r', encoding='utf-8',
errors='ignore') as source_file:
file_content2 = source_file.read()
self.assertEqual(file_content1, file_content2)
file_data_b64 = self._cc_client.getSourceFileData(
run_res.fileId, True, Encoding.BASE64)
self.assertIsNotNone(file_data_b64)
file_content1_b64 = convert.from_b64(file_data_b64.fileContent)
self.assertIsNotNone(file_content1_b64)
self.assertEqual(file_content1_b64, file_content2)
print('got ' + str(len(run_results)) + ' files')
self.assertEqual(run_result_count, len(run_results))
def test_get_source_file_content_latin1_encoding(self):
""" Test if the source file was saved with latin1 encoding.
Test if the source file can be read back from the
database even if it was not saved with utf-8 encoding.
"""
runid = self._runid
report_filter = ReportFilter(checkerName=['*'],
filepath=['*call_and_message.cpp*'])
run_result_count = self._cc_client.getRunResultCount([runid],
report_filter,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
[],
report_filter)
self.assertIsNotNone(run_results)
self.assertIsNotNone(run_results)
for run_res in run_results:
print('Getting the content of ' + run_res.checkedFile)
file_data = self._cc_client.getSourceFileData(run_res.fileId,
True,
None)
self.assertIsNotNone(file_data)
file_content1 = file_data.fileContent
self.assertIsNotNone(file_content1)
with codecs.open(run_res.checkedFile, 'r', encoding='utf-8',
errors='ignore') as source_file:
file_content2 = source_file.read()
self.assertEqual(file_content1, file_content2)
file_data_b64 = self._cc_client.getSourceFileData(
run_res.fileId, True, Encoding.BASE64)
self.assertIsNotNone(file_data_b64)
file_content1_b64 = convert.from_b64(file_data_b64.fileContent)
self.assertIsNotNone(file_content1_b64)
self.assertEqual(file_content1_b64, file_content2)
print('got ' + str(len(run_results)) + ' files')
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_severity_sort(self):
""" Get the run results and sort them by severity and filename ASC. """
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
sort_mode1 = SortMode(SortType.SEVERITY, Order.ASC)
sort_mode2 = SortMode(SortType.FILENAME, Order.ASC)
sort_types = [sort_mode1, sort_mode2]
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
sort_types,
None)
self.assertIsNotNone(run_results)
for i in range(run_result_count - 1):
bug1 = run_results[i]
bug2 = run_results[i + 1]
self.assertTrue(bug1.severity <= bug2.severity)
self.assertTrue((bug1.severity != bug2.severity) or
(bug1.checkedFile <= bug2.checkedFile))
print_run_results(run_results)
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_sorted2(self):
""" Get the run results and sort them by file name and
checker name ASC. """
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
sortMode1 = SortMode(SortType.FILENAME, Order.ASC)
sortMode2 = SortMode(SortType.CHECKER_NAME, Order.ASC)
sort_types = [sortMode1, sortMode2]
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
sort_types,
None)
self.assertIsNotNone(run_results)
print_run_results(run_results)
self.assertEqual(run_result_count, len(run_results))
for i in range(run_result_count - 1):
bug1 = run_results[i]
bug2 = run_results[i + 1]
self.assertTrue(bug1.checkedFile <= bug2.checkedFile)
self.assertTrue((bug1.checkedFile != bug2.checkedFile) or
(bug1.line <=
bug2.line) or
(bug1.checkerId <= bug2.checkerId))
def test_bug_path_length(self):
runid = self._runid
sortMode1 = SortMode(SortType.BUG_PATH_LENGTH, Order.ASC)
sortMode2 = SortMode(SortType.BUG_PATH_LENGTH, Order.DESC)
simple_filter = ReportFilter()
unique_filter = ReportFilter(isUnique=True)
run_results = self._cc_client.getRunResults([runid],
100,
0,
[sortMode1],
simple_filter,
None,
False)
self.__check_bug_path_order(run_results, Order.ASC)
run_results = self._cc_client.getRunResults([runid],
100,
0,
[sortMode2],
unique_filter,
None,
False)
self.__check_bug_path_order(run_results, Order.DESC)
def test_report_details(self):
"""
Get run results and check that report details are correctly set.
"""
runid = self._runid
simple_filter = ReportFilter()
run_results = self._cc_client.getRunResults([runid],
100,
0,
None,
simple_filter,
None,
True)
self.assertTrue(any(res.details for res in run_results))
def test_unqiue_report_details(self):
"""
Get uniqued run results and check that report details are correctly
set.
"""
runid = self._runid
unique_filter = ReportFilter(isUnique=True)
run_results = self._cc_client.getRunResults([runid],
100,
0,
None,
unique_filter,
None,
True)
self.assertTrue(any(res.details for res in run_results))
| 39.805634 | 79 | 0.518647 |
import logging
import os
import re
import unittest
import codecs
from codechecker_api.codeCheckerDBAccess_v6.ttypes import Encoding, Order, \
ReportFilter, SortMode, SortType, RunSortMode, RunSortType
from codechecker_web.shared import convert
from libtest.debug_printer import print_run_results
from libtest.thrift_client_to_db import get_all_run_results
from libtest.result_compare import find_all
from libtest import env
class RunResults(unittest.TestCase):
_ccClient = None
def setUp(self):
test_workspace = os.environ['TEST_WORKSPACE']
test_class = self.__class__.__name__
print('Running ' + test_class + ' tests in ' + test_workspace)
self._clang_to_test = env.clang_to_test()
self._testproject_data = env.setup_test_proj_cfg(test_workspace)
self.assertIsNotNone(self._testproject_data)
self._cc_client = env.setup_viewer_client(test_workspace)
self.assertIsNotNone(self._cc_client)
run_names = env.get_run_names(test_workspace)
sort_mode = RunSortMode(RunSortType.DATE, Order.ASC)
runs = self._cc_client.getRunData(None, None, 0, sort_mode)
test_runs = [run for run in runs if run.name in run_names]
self._runid = test_runs[0].runId
def __check_bug_path_order(self, run_results, order):
prev = None
for res in run_results:
self.assertGreater(res.bugPathLength, 0)
if not prev:
prev = res
continue
if order == Order.ASC:
self.assertGreaterEqual(res.bugPathLength, prev.bugPathLength)
else:
self.assertLessEqual(res.bugPathLength, prev.bugPathLength)
def test_get_run_results_no_filter(self):
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client, runid)
print_run_results(run_results)
self.assertIsNotNone(run_results)
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_checker_id_and_file_path(self):
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client, runid)
self.assertIsNotNone(run_results)
self.assertEqual(run_result_count, len(run_results))
test_project_results = self._testproject_data[
self._clang_to_test]['bugs']
for r in test_project_results:
print(r)
not_found = find_all(run_results, test_project_results)
print_run_results(run_results)
if not_found:
print("===================")
print('Not found bugs:')
for bug in not_found:
print(bug)
print("===================")
self.assertEqual(len(not_found), 0)
def test_get_source_file_content(self):
runid = self._runid
report_filter = ReportFilter(checkerName=['*'], filepath=['*.c*'])
run_result_count = self._cc_client.getRunResultCount([runid],
report_filter,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
[],
report_filter)
self.assertIsNotNone(run_results)
for run_res in run_results:
self.assertTrue(re.match(r'.*\.c(pp)?$', run_res.checkedFile))
print('Getting the content of ' + run_res.checkedFile)
file_data = self._cc_client.getSourceFileData(run_res.fileId,
True,
None)
self.assertIsNotNone(file_data)
file_content1 = file_data.fileContent
self.assertIsNotNone(file_content1)
with codecs.open(run_res.checkedFile, 'r', encoding='utf-8',
errors='ignore') as source_file:
file_content2 = source_file.read()
self.assertEqual(file_content1, file_content2)
file_data_b64 = self._cc_client.getSourceFileData(
run_res.fileId, True, Encoding.BASE64)
self.assertIsNotNone(file_data_b64)
file_content1_b64 = convert.from_b64(file_data_b64.fileContent)
self.assertIsNotNone(file_content1_b64)
self.assertEqual(file_content1_b64, file_content2)
print('got ' + str(len(run_results)) + ' files')
self.assertEqual(run_result_count, len(run_results))
def test_get_source_file_content_latin1_encoding(self):
runid = self._runid
report_filter = ReportFilter(checkerName=['*'],
filepath=['*call_and_message.cpp*'])
run_result_count = self._cc_client.getRunResultCount([runid],
report_filter,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
[],
report_filter)
self.assertIsNotNone(run_results)
self.assertIsNotNone(run_results)
for run_res in run_results:
print('Getting the content of ' + run_res.checkedFile)
file_data = self._cc_client.getSourceFileData(run_res.fileId,
True,
None)
self.assertIsNotNone(file_data)
file_content1 = file_data.fileContent
self.assertIsNotNone(file_content1)
with codecs.open(run_res.checkedFile, 'r', encoding='utf-8',
errors='ignore') as source_file:
file_content2 = source_file.read()
self.assertEqual(file_content1, file_content2)
file_data_b64 = self._cc_client.getSourceFileData(
run_res.fileId, True, Encoding.BASE64)
self.assertIsNotNone(file_data_b64)
file_content1_b64 = convert.from_b64(file_data_b64.fileContent)
self.assertIsNotNone(file_content1_b64)
self.assertEqual(file_content1_b64, file_content2)
print('got ' + str(len(run_results)) + ' files')
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_severity_sort(self):
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
sort_mode1 = SortMode(SortType.SEVERITY, Order.ASC)
sort_mode2 = SortMode(SortType.FILENAME, Order.ASC)
sort_types = [sort_mode1, sort_mode2]
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
sort_types,
None)
self.assertIsNotNone(run_results)
for i in range(run_result_count - 1):
bug1 = run_results[i]
bug2 = run_results[i + 1]
self.assertTrue(bug1.severity <= bug2.severity)
self.assertTrue((bug1.severity != bug2.severity) or
(bug1.checkedFile <= bug2.checkedFile))
print_run_results(run_results)
self.assertEqual(run_result_count, len(run_results))
def test_get_run_results_sorted2(self):
runid = self._runid
logging.debug('Get all run results from the db for runid: ' +
str(runid))
sortMode1 = SortMode(SortType.FILENAME, Order.ASC)
sortMode2 = SortMode(SortType.CHECKER_NAME, Order.ASC)
sort_types = [sortMode1, sortMode2]
run_result_count = self._cc_client.getRunResultCount([runid],
None,
None)
self.assertTrue(run_result_count)
run_results = get_all_run_results(self._cc_client,
runid,
sort_types,
None)
self.assertIsNotNone(run_results)
print_run_results(run_results)
self.assertEqual(run_result_count, len(run_results))
for i in range(run_result_count - 1):
bug1 = run_results[i]
bug2 = run_results[i + 1]
self.assertTrue(bug1.checkedFile <= bug2.checkedFile)
self.assertTrue((bug1.checkedFile != bug2.checkedFile) or
(bug1.line <=
bug2.line) or
(bug1.checkerId <= bug2.checkerId))
def test_bug_path_length(self):
runid = self._runid
sortMode1 = SortMode(SortType.BUG_PATH_LENGTH, Order.ASC)
sortMode2 = SortMode(SortType.BUG_PATH_LENGTH, Order.DESC)
simple_filter = ReportFilter()
unique_filter = ReportFilter(isUnique=True)
run_results = self._cc_client.getRunResults([runid],
100,
0,
[sortMode1],
simple_filter,
None,
False)
self.__check_bug_path_order(run_results, Order.ASC)
run_results = self._cc_client.getRunResults([runid],
100,
0,
[sortMode2],
unique_filter,
None,
False)
self.__check_bug_path_order(run_results, Order.DESC)
def test_report_details(self):
runid = self._runid
simple_filter = ReportFilter()
run_results = self._cc_client.getRunResults([runid],
100,
0,
None,
simple_filter,
None,
True)
self.assertTrue(any(res.details for res in run_results))
def test_unqiue_report_details(self):
runid = self._runid
unique_filter = ReportFilter(isUnique=True)
run_results = self._cc_client.getRunResults([runid],
100,
0,
None,
unique_filter,
None,
True)
self.assertTrue(any(res.details for res in run_results))
| true | true |
f7fa80f25c16b9a5c3a257d06722d7dd14a9ab4c | 977 | py | Python | drf_jwt_authentication/urls.py | Somsubhra1/Django-Rest-Framework-JWT-Authentication-simplejwt | b8e0097b27f241afde392c9a7e07547c2dacb9ab | [
"MIT"
] | 3 | 2021-06-25T17:37:09.000Z | 2022-01-14T21:19:05.000Z | drf_jwt_authentication/urls.py | Somsubhra1/Django-Rest-Framework-JWT-Authentication-simplejwt | b8e0097b27f241afde392c9a7e07547c2dacb9ab | [
"MIT"
] | null | null | null | drf_jwt_authentication/urls.py | Somsubhra1/Django-Rest-Framework-JWT-Authentication-simplejwt | b8e0097b27f241afde392c9a7e07547c2dacb9ab | [
"MIT"
] | null | null | null | """drf_jwt_authentication URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path("api/auth/", include('accounts.urls'))
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 36.185185 | 78 | 0.730809 | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path("api/auth/", include('accounts.urls'))
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| true | true |
f7fa82914edb59417d74e5642644ea47ef481e01 | 903 | py | Python | sentinelone/komand_sentinelone/actions/create_ioc_threat/action.py | emartin-merrill-r7/insightconnect-plugins | a589745dbcc9f01d3e601431e77ab7221a84c117 | [
"MIT"
] | 1 | 2020-03-18T09:14:55.000Z | 2020-03-18T09:14:55.000Z | sentinelone/komand_sentinelone/actions/create_ioc_threat/action.py | OSSSP/insightconnect-plugins | 846758dab745170cf1a8c146211a8bea9592e8ff | [
"MIT"
] | 1 | 2021-02-23T23:57:37.000Z | 2021-02-23T23:57:37.000Z | sentinelone/komand_sentinelone/actions/create_ioc_threat/action.py | OSSSP/insightconnect-plugins | 846758dab745170cf1a8c146211a8bea9592e8ff | [
"MIT"
] | null | null | null | import komand
from .schema import CreateIocThreatInput, CreateIocThreatOutput, Input, Output
# Custom imports below
class CreateIocThreat(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='create_ioc_threat',
description='Create an IOC threat',
input=CreateIocThreatInput(),
output=CreateIocThreatOutput())
def run(self, params={}):
hash_ = params.get(Input.HASH)
group_id = params.get(Input.GROUP_ID)
path = params.get(Input.PATH)
agent_id = params.get(Input.AGENT_ID)
annotation = params.get(Input.ANNOTATION)
annotation_url = params.get(Input.ANNOTATION_URL)
affected = self.connection.create_ioc_threat(
hash_, group_id, path, agent_id, annotation, annotation_url
)
return {Output.AFFECTED: affected}
| 33.444444 | 78 | 0.65227 | import komand
from .schema import CreateIocThreatInput, CreateIocThreatOutput, Input, Output
class CreateIocThreat(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='create_ioc_threat',
description='Create an IOC threat',
input=CreateIocThreatInput(),
output=CreateIocThreatOutput())
def run(self, params={}):
hash_ = params.get(Input.HASH)
group_id = params.get(Input.GROUP_ID)
path = params.get(Input.PATH)
agent_id = params.get(Input.AGENT_ID)
annotation = params.get(Input.ANNOTATION)
annotation_url = params.get(Input.ANNOTATION_URL)
affected = self.connection.create_ioc_threat(
hash_, group_id, path, agent_id, annotation, annotation_url
)
return {Output.AFFECTED: affected}
| true | true |
f7fa83eeb6a81a9cecee9a964ef025cb0bb94f32 | 16,235 | py | Python | app/raw/docs/question.py | hupili/legco-watch | 06253a65d299564827972a38dc7d7dee5c1f6120 | [
"MIT"
] | null | null | null | app/raw/docs/question.py | hupili/legco-watch | 06253a65d299564827972a38dc7d7dee5c1f6120 | [
"MIT"
] | null | null | null | app/raw/docs/question.py | hupili/legco-watch | 06253a65d299564827972a38dc7d7dee5c1f6120 | [
"MIT"
] | 1 | 2020-05-24T17:08:57.000Z | 2020-05-24T17:08:57.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Document wrappers for LegCo questions (and replies)
"""
import logging
import lxml
from lxml import etree
import lxml.html
from lxml.html.clean import clean_html, Cleaner
import re
from lxml.html import HTMLParser
import urllib2
from urllib2 import HTTPError
from ..scraper.settings import USER_AGENT
logger = logging.getLogger('legcowatch-docs')
class CouncilQuestion(object):
"""
Object representing the Council Question document (actually the reply as well).
Some can be used to check against RawCouncilQuestion object
This class parses the document source, and has potential to give out below elements:
Urgent or not
Question_number
Subject
Date_of_meeting
Time_of_reply
Asker
Replier(s)
Question_content
Reply_content
"""
def __init__(self, uid, date, urgent, oral, src, subject, link,*args, **kwargs):
logger.debug(u'** Parsing question {}'.format(uid))
self.uid = uid
if uid[-1] == 'e':
self.english = True
else:
self.english = False
self.src =src
self.date = date
self.urgent = urgent
self.oral = oral
self.subject = subject ### this is the title used by asker
self.link = link
self.tree = None
self.tree_content = None
self.question_title = None ### although so named, this is actually the title from replier(s)
#self.question_number =None
self.question_content = None
self.asker = None
self.reply_content = None
self.repliers = None
self._load()
self._parse()
def __repr__(self):
return u'<CouncilQuestion: {}>'.format(self.uid)
def _load(self):
"""
Load the ElementTree from the source
"""
htm = self.src
## seems that the page used 'hkscs'(香港增補字符集) as charset
"""
#TESTING
# Use these code to get and test a source from Legco website directly
try:
req = urllib2.Request(self.link, headers={ 'User-Agent': USER_AGENT })
html = urllib2.urlopen(req).read()
except HTTPError:
logger.warn('Cannot get open reply link for question {]'.format(self.uid))
#get encoding
if html:
content_type_re = ur'(?s).*content-type" content="(?P<content>.*?)"'
content_type = re.match(content_type_re,html).group('content')
charset = content_type.split('=')[1]
print('Encoding detected: {}'.format(charset))
else:
logger.warn('Cannot get charset from reply link. Guess instead.')
charset = 'utf-8' if self.english else 'BIG5' # sometimes not working for Chinese
htm=html
#ENDTESTING
"""
# Use the lxml cleaner
if htm:
#Get rid of undesired characters here
#use a list of dict to replace them differently (optional)
list_undesired = [{'\x83\xdc':''},{'\x84P':''},{'\x84h':''},] #occasionally a character '财','绊' etc. is placed between newlines
#replace different types of colons
#list_undesired.append({'\xef\xbc\x9a':':'})
#list_undesired.append({'\xef\xb8\xb0':':'})
#list_undesired.append({'\xef\xb9\x95':':'})
#list_undesired.append({'\xa0':' '})
for item in list_undesired:
htm=htm.replace(item.keys()[0],item.values()[0])
# Assume 香港增補字符集(big5hkscs) is used
htm = htm.decode('hkscs',errors='ignore')
cleaner = Cleaner()
parser = HTMLParser(encoding='utf-8')
# Finally, load the cleaned string to an ElementTree
self.tree = cleaner.clean_html(lxml.html.fromstring(htm, parser=parser))
self.src = htm
#print('HTML source prepared.')
else:
self.tree = None
def _parse(self):
#only the 'pressrelease' part is needed
try:
main_tree = self.tree.xpath('id("pressrelease")')[0]
except IndexError:
logger.warn(u'HTML of question {} does not have a "pressrelease" field'.format(self.uid))
# break the main tree into 2 parts: title and main body
#1. title string, no newlines
#e.g. 'LCQ17: Babies born in Hong Kong to mainland women'
#title_str = main_tree.text.strip() #also functional
#more robust than the above one
#title_str here may be different from 'subject' field in model.
#Maybe this is the title used by repliers? Shall we include this in reply section?
title_str =self.tree.text
#print(self.tree.text.encode('utf-8'))
#2. main body, including question header, question content and reply
#e.g. 'Following is a question by the Hon Yeung Sum and a written reply...'
main_body = main_tree.xpath('p[1]')[0]
main_body_str = main_body.text_content() # do not strip, keep the format
#print('Main Body String:{}'.format(main_body_str.encode('utf-8')))
#=========================================================================#
#TODO: there are a lot of different encodings of colons, e.g.
#(:,:,︰,﹕) - with unicodes \u3A, \uFF1A, \uFE30, \ufe55 respectively (may be more)
#and can have with whitespace ahead of them (no need to care for behind)
#sometimes the colon is missing...
#Find a way to handle them concisely!
#e.g. convert all alias to one kind, ':'
#Currently, ur'\s?:' is used to deal with them - but be cautious!
#=========================================================================#
# Parse the title
#title_re_e = ur'LC(.*)?Q(?P<number>\d*)?:\s*(?P<subject>.+)' #e.g. 'LCQ17: Babies born in Hong Kong to mainland women'
#title_re_c = ur'立法會(急切質詢)?(?P<number>\S*題)?:\s*(?P<subject>.+)' #e.g. '立法會五題:旅發局浮薪程式'
# Simpler, no question number
#note that the complete title is available in html header
title_re_e = ur'(?s).+\s*(:|:|︰|﹕)\s*(?P<subject>.+)'
title_re_c = ur'(?s).+\s*(:|:|︰|﹕)\s*(?P<subject>.+)'
#notice the difference of colon (half- and full-width) in the above regex
#print(u'Title str: {}'.format(title_str))
match_pattern = title_re_e if self.english else title_re_c
match_title = re.match(match_pattern, title_str)
if match_title:
self.question_title = match_title.group('subject')
#print(u'Title: {}'.format(self.question_title))
## We choose not to deal with numbers here, since they are better handled by scraper
#if match_title['number']:
# self.question_number = match_title.group('number')
#else:
# self.question_number = '0'
else:
logger.warn('Cannot match title for question {}'.format(self.uid))
# Parse the main body - 3 parts
#1. header of question, including date, asker and replier(s)
header_re_e = ur'(?P<header>.+)Question(s?)\s?(:|:|︰|﹕)'
header_re_c = ur'(?P<header>.+)問題\s?(:|:|︰|﹕)'
# sometimes the phrase "問題:" is absent. Match up to the 1st colon instead.
# Below should be the most general case. Too general that I prefer not to use.
header_re_colon = ur'((?P<header>[^(:|:|︰|﹕)]*))'
match_pattern = header_re_e if self.english else header_re_c
match_header = re.match(match_pattern, main_body_str.strip()) #strip here make it easier - get rid of newline
if match_header is None:
match_pattern = header_re_colon
match_header = re.match(match_pattern, main_body_str.strip())
header_str = None
if match_header:
header_str = match_header.group('header')
#print(u'header str:{}'.format(header_str))
else:
logger.warn('Cannot match header for question {}'.format(self.uid))
#print('Error!')
#retrieve asker and replier(s) from header
if header_str: #no need to try if no header
#more complicated, need to pick regex based on urgent or not
#there are two formats, need to separately match the 2 cases
asker_re_e=[]
asker_re_c=[]
if self.urgent:
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?) (\son.*?)?under (.*) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question by (?P<asker>.*?)(\son.*?)? under (.*) (in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to (a|an)(\s.*)? question by (?P<asker>.*?)(\son.*?)? under (.*) (in|at) the Legislative Council')
#sometimes a normal pattern is used for urgent question
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_c.append(ur'(?s)(.*)以下(為|是)今日((?P<date>.+))在?立法會會議上(?P<asker>.+)根據(.*)質詢(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<askers>.+)今日((?P<date>.+))在立法會會議上根據(.*)(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會會議上就(?P<asker>.+)提出的急切質詢所作的(答|回)覆')
else:
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question (raised )?by (?P<asker>.*?)(\son.*?)?(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+), today')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question(\son.*?)?(raised )?by (?P<asker>.*?)(in|at) the Legislative Council')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上?(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)的(.{0,5})(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會(會議)?上,?就(?P<asker>.+)的?提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上,?(?P<repliers>.+)就(?P<asker>.+)的提問(所作)?的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會會議上回應(?P<asker>.+)有關(.*?)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上(?P<asker>.+)的提問(,)?(和|及)(?P<repliers>.+)(就.*)的(.*?)(答|回)覆')#quite rare
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會(會議)?(上|上,)?就(?P<asker>.+)有關(.*)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))就(?P<asker>.+)有關(.*)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會會議上(?P<asker>.+)(就.*?)?的提問((.*))?(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上?(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)書面(答|回)覆')
header_str = header_str.replace('urder','under')
header_str = header_str.replace('rely','reply') #ask Legco to fix this, since 'rely' is a legal word
header_str = header_str.replace('council','Council')
header_str = header_str.replace('Legilsative','Legislative')
header_str = header_str.replace('Legisative','Legislative')
header_str = header_str.replace('Counil','Council')
header_str = header_str.replace(u'立法會會會',u'立法會會議')
header_str = header_str.replace(u'立會會議',u'立法會會議')
header_str = header_str.replace(u'立法會議',u'立法會會議')
#Very weird string in some cases:\xa0\xa0
header_str = header_str.replace(u'\xa0\xa0',' ')
match_patterns = asker_re_e if self.english else asker_re_c
for pattern in match_patterns:
match_asker = re.match(pattern, header_str)
if match_asker:
self.asker = match_asker.group('asker')
self.repliers = match_asker.group('repliers')
# Get rid of tailing comma/whitespace+comma/'的'
self.repliers = self.repliers.strip()
if self.repliers[-1]==',' or self.repliers[-1]==u'的':
self.repliers = self.repliers[:-1]
break
#postprocessing
if self.asker:
self.asker = self.asker.replace(u',', u'') #heading comma in a Chinese case
# get rid of titles, make it easier for Name Matcher
self.asker = self.asker.replace(u'the', u'')
self.asker = self.asker.replace(u'Hon', u'')
self.asker = self.asker.replace(u'Dr', u'')
#2. content of question
body = self.src.strip()
#print('body str: {}'.format(body.encode('utf-8')))
#body = main_body_str #main_body_str messes up with format structure. Match the src/htm instead.
q_content_re_e =[]
q_content_re_c =[]
q_content_re_e.append(ur'(?s).*Question(s?)\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(Reply|Answer)\s?(:|:|︰|﹕)?')
q_content_re_e.append(ur'(?s).*(:|:|︰|﹕)(?P<q_content>(?s).*)(Reply|Answer)\s?(:|:|︰|﹕)?')
q_content_re_e.append(ur'(?s).*Question(s?)\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(Madam)?(President|president)\s?(:|:|︰|﹕|,)?')
q_content_re_c.append(ur'(?s).*問題\s?(:|:|︰|﹕)(?P<q_content>(?s).*)(答|回)覆\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*(答|回)覆\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(答|回)覆\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*問題\s?(:|:|︰|﹕)(?P<q_content>(?s).*)(主席|主席女士)\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*問題(?P<q_content>(?s).*)(答|回)覆')#1 case only
q_content_re_c.append(ur'(?s).*(答|回)覆:(?P<q_content>(?s).*)主席女士')#1 case only
match_patterns = q_content_re_e if self.english else q_content_re_c
for pattern in match_patterns:
match_q_content = re.match(pattern, body)
if match_q_content:
self.question_content = match_q_content.group('q_content')
break
if match_q_content:
if self.question_content[-6:]=='Madam ':
self.question_content=self.question_content[:-6]
#if self.question_content[-11:-13]=='答覆':
# self.question_content=self.question_content[:-12]
else:
logger.warn('Cannot match question content for question {}'.format(self.uid))
#3. reply to question
reply_content_re_e = []
reply_content_re_c = []
reply_content_re_e.append(ur'(?s).*(President|Madam president)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)Ends')
reply_content_re_e.append(ur'(?s).*(Reply|Answer)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)Ends')
reply_content_re_c.append(ur'(?s).*(主席|主席女士)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)完')
reply_content_re_c.append(ur'(?s).*(答|回)覆\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)完')#sometimes '主席|主席女士' was omitted
match_patterns = reply_content_re_e if self.english else reply_content_re_c
for pattern in match_patterns:
match_reply = re.match(pattern, body)
if match_reply:
self.reply_content = match_reply.group('reply_content')
break
#no needed postprocessing found yet
if match_reply is None:
logger.warn('Cannot match reply content for question {}'.format(self.uid)) | 52.370968 | 184 | 0.555528 |
"""
Document wrappers for LegCo questions (and replies)
"""
import logging
import lxml
from lxml import etree
import lxml.html
from lxml.html.clean import clean_html, Cleaner
import re
from lxml.html import HTMLParser
import urllib2
from urllib2 import HTTPError
from ..scraper.settings import USER_AGENT
logger = logging.getLogger('legcowatch-docs')
class CouncilQuestion(object):
"""
Object representing the Council Question document (actually the reply as well).
Some can be used to check against RawCouncilQuestion object
This class parses the document source, and has potential to give out below elements:
Urgent or not
Question_number
Subject
Date_of_meeting
Time_of_reply
Asker
Replier(s)
Question_content
Reply_content
"""
def __init__(self, uid, date, urgent, oral, src, subject, link,*args, **kwargs):
logger.debug(u'** Parsing question {}'.format(uid))
self.uid = uid
if uid[-1] == 'e':
self.english = True
else:
self.english = False
self.src =src
self.date = date
self.urgent = urgent
self.oral = oral
self.subject = subject self.tree_content = None
self.question_title = None s = None
self._load()
self._parse()
def __repr__(self):
return u'<CouncilQuestion: {}>'.format(self.uid)
def _load(self):
"""
Load the ElementTree from the source
"""
htm = self.src
to get and test a source from Legco website directly
try:
req = urllib2.Request(self.link, headers={ 'User-Agent': USER_AGENT })
html = urllib2.urlopen(req).read()
except HTTPError:
logger.warn('Cannot get open reply link for question {]'.format(self.uid))
#get encoding
if html:
content_type_re = ur'(?s).*content-type" content="(?P<content>.*?)"'
content_type = re.match(content_type_re,html).group('content')
charset = content_type.split('=')[1]
print('Encoding detected: {}'.format(charset))
else:
logger.warn('Cannot get charset from reply link. Guess instead.')
charset = 'utf-8' if self.english else 'BIG5' # sometimes not working for Chinese
htm=html
#ENDTESTING
"""
# Use the lxml cleaner
if htm:
#Get rid of undesired characters here
#use a list of dict to replace them differently (optional)
list_undesired = [{'\x83\xdc':''},{'\x84P':''},{'\x84h':''},] #occasionally a character '财','绊' etc. is placed between newlines
#replace different types of colons
#list_undesired.append({'\xef\xbc\x9a':':'})
#list_undesired.append({'\xef\xb8\xb0':':'})
#list_undesired.append({'\xef\xb9\x95':':'})
#list_undesired.append({'\xa0':' '})
for item in list_undesired:
htm=htm.replace(item.keys()[0],item.values()[0])
# Assume 香港增補字符集(big5hkscs) is used
htm = htm.decode('hkscs',errors='ignore')
cleaner = Cleaner()
parser = HTMLParser(encoding='utf-8')
# Finally, load the cleaned string to an ElementTree
self.tree = cleaner.clean_html(lxml.html.fromstring(htm, parser=parser))
self.src = htm
#print('HTML source prepared.')
else:
self.tree = None
def _parse(self):
#only the 'pressrelease' part is needed
try:
main_tree = self.tree.xpath('id("pressrelease")')[0]
except IndexError:
logger.warn(u'HTML of question {} does not have a "pressrelease" field'.format(self.uid))
# break the main tree into 2 parts: title and main body
#1. title string, no newlines
#e.g. 'LCQ17: Babies born in Hong Kong to mainland women'
#title_str = main_tree.text.strip() #also functional
#more robust than the above one
#title_str here may be different from 'subject' field in model.
#Maybe this is the title used by repliers? Shall we include this in reply section?
title_str =self.tree.text
#print(self.tree.text.encode('utf-8'))
#2. main body, including question header, question content and reply
#e.g. 'Following is a question by the Hon Yeung Sum and a written reply...'
main_body = main_tree.xpath('p[1]')[0]
main_body_str = main_body.text_content() # do not strip, keep the format
#print('Main Body String:{}'.format(main_body_str.encode('utf-8')))
#=========================================================================#
#TODO: there are a lot of different encodings of colons, e.g.
#(:,:,︰,﹕) - with unicodes \u3A, \uFF1A, \uFE30, \ufe55 respectively (may be more)
#and can have with whitespace ahead of them (no need to care for behind)
#sometimes the colon is missing...
#Find a way to handle them concisely!
#e.g. convert all alias to one kind, ':'
#Currently, ur'\s?:' is used to deal with them - but be cautious!
#=========================================================================#
# Parse the title
#title_re_e = ur'LC(.*)?Q(?P<number>\d*)?:\s*(?P<subject>.+)' #e.g. 'LCQ17: Babies born in Hong Kong to mainland women'
#title_re_c = ur'立法會(急切質詢)?(?P<number>\S*題)?:\s*(?P<subject>.+)' #e.g. '立法會五題:旅發局浮薪程式'
# Simpler, no question number
#note that the complete title is available in html header
title_re_e = ur'(?s).+\s*(:|:|︰|﹕)\s*(?P<subject>.+)'
title_re_c = ur'(?s).+\s*(:|:|︰|﹕)\s*(?P<subject>.+)'
#notice the difference of colon (half- and full-width) in the above regex
#print(u'Title str: {}'.format(title_str))
match_pattern = title_re_e if self.english else title_re_c
match_title = re.match(match_pattern, title_str)
if match_title:
self.question_title = match_title.group('subject')
#print(u'Title: {}'.format(self.question_title))
## We choose not to deal with numbers here, since they are better handled by scraper
#if match_title['number']:
# self.question_number = match_title.group('number')
#else:
# self.question_number = '0'
else:
logger.warn('Cannot match title for question {}'.format(self.uid))
# Parse the main body - 3 parts
#1. header of question, including date, asker and replier(s)
header_re_e = ur'(?P<header>.+)Question(s?)\s?(:|:|︰|﹕)'
header_re_c = ur'(?P<header>.+)問題\s?(:|:|︰|﹕)'
# sometimes the phrase "問題:" is absent. Match up to the 1st colon instead.
# Below should be the most general case. Too general that I prefer not to use.
header_re_colon = ur'((?P<header>[^(:|:|︰|﹕)]*))'
match_pattern = header_re_e if self.english else header_re_c
match_header = re.match(match_pattern, main_body_str.strip()) #strip here make it easier - get rid of newline
if match_header is None:
match_pattern = header_re_colon
match_header = re.match(match_pattern, main_body_str.strip())
header_str = None
if match_header:
header_str = match_header.group('header')
#print(u'header str:{}'.format(header_str))
else:
logger.warn('Cannot match header for question {}'.format(self.uid))
#print('Error!')
#retrieve asker and replier(s) from header
if header_str: #no need to try if no header
#more complicated, need to pick regex based on urgent or not
#there are two formats, need to separately match the 2 cases
asker_re_e=[]
asker_re_c=[]
if self.urgent:
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?) (\son.*?)?under (.*) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question by (?P<asker>.*?)(\son.*?)? under (.*) (in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to (a|an)(\s.*)? question by (?P<asker>.*?)(\son.*?)? under (.*) (in|at) the Legislative Council')
#sometimes a normal pattern is used for urgent question
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_c.append(ur'(?s)(.*)以下(為|是)今日((?P<date>.+))在?立法會會議上(?P<asker>.+)根據(.*)質詢(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<askers>.+)今日((?P<date>.+))在立法會會議上根據(.*)(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會會議上就(?P<asker>.+)提出的急切質詢所作的(答|回)覆')
else:
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+)(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question (raised )?by (?P<asker>.*?)(\son.*?)?(in|at) the Legislative Council')
asker_re_e.append(ur'(?s)(.*) by (?P<asker>.*?)(\son.*?)?(and)? (a|an)(.*?) (reply|answer)(\son.+)? by (?P<repliers>.+), today')
asker_re_e.append(ur'(?s)(.*) (reply|answer) (by|of) (?P<repliers>.+) to a question(\son.*?)?(raised )?by (?P<asker>.*?)(in|at) the Legislative Council')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上?(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)的(.{0,5})(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會(會議)?上,?就(?P<asker>.+)的?提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上,?(?P<repliers>.+)就(?P<asker>.+)的提問(所作)?的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會會議上回應(?P<asker>.+)有關(.*?)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上(?P<asker>.+)的提問(,)?(和|及)(?P<repliers>.+)(就.*)的(.*?)(答|回)覆')#quite rare
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))在立法會(會議)?(上|上,)?就(?P<asker>.+)有關(.*)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)以下(為|是)(?P<repliers>.+)今日((?P<date>.+))就(?P<asker>.+)有關(.*)提問(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會會議上(?P<asker>.+)(就.*?)?的提問((.*))?(和|及)(?P<repliers>.+)的(.*?)(答|回)覆')
asker_re_c.append(ur'(?s)(.*)立法會(會議)?上?(?P<asker>.+)(就.*?)?的提問(和|及)(?P<repliers>.+)書面(答|回)覆')
header_str = header_str.replace('urder','under')
header_str = header_str.replace('rely','reply') #ask Legco to fix this, since 'rely' is a legal word
header_str = header_str.replace('council','Council')
header_str = header_str.replace('Legilsative','Legislative')
header_str = header_str.replace('Legisative','Legislative')
header_str = header_str.replace('Counil','Council')
header_str = header_str.replace(u'立法會會會',u'立法會會議')
header_str = header_str.replace(u'立會會議',u'立法會會議')
header_str = header_str.replace(u'立法會議',u'立法會會議')
#Very weird string in some cases:\xa0\xa0
header_str = header_str.replace(u'\xa0\xa0',' ')
match_patterns = asker_re_e if self.english else asker_re_c
for pattern in match_patterns:
match_asker = re.match(pattern, header_str)
if match_asker:
self.asker = match_asker.group('asker')
self.repliers = match_asker.group('repliers')
# Get rid of tailing comma/whitespace+comma/'的'
self.repliers = self.repliers.strip()
if self.repliers[-1]==',' or self.repliers[-1]==u'的':
self.repliers = self.repliers[:-1]
break
#postprocessing
if self.asker:
self.asker = self.asker.replace(u',', u'') #heading comma in a Chinese case
# get rid of titles, make it easier for Name Matcher
self.asker = self.asker.replace(u'the', u'')
self.asker = self.asker.replace(u'Hon', u'')
self.asker = self.asker.replace(u'Dr', u'')
#2. content of question
body = self.src.strip()
#print('body str: {}'.format(body.encode('utf-8')))
#body = main_body_str #main_body_str messes up with format structure. Match the src/htm instead.
q_content_re_e =[]
q_content_re_c =[]
q_content_re_e.append(ur'(?s).*Question(s?)\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(Reply|Answer)\s?(:|:|︰|﹕)?')
q_content_re_e.append(ur'(?s).*(:|:|︰|﹕)(?P<q_content>(?s).*)(Reply|Answer)\s?(:|:|︰|﹕)?')
q_content_re_e.append(ur'(?s).*Question(s?)\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(Madam)?(President|president)\s?(:|:|︰|﹕|,)?')
q_content_re_c.append(ur'(?s).*問題\s?(:|:|︰|﹕)(?P<q_content>(?s).*)(答|回)覆\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*(答|回)覆\s?(:|:|︰|﹕)?(?P<q_content>(?s).*)(答|回)覆\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*問題\s?(:|:|︰|﹕)(?P<q_content>(?s).*)(主席|主席女士)\s?(:|:|︰|﹕)')
q_content_re_c.append(ur'(?s).*問題(?P<q_content>(?s).*)(答|回)覆')#1 case only
q_content_re_c.append(ur'(?s).*(答|回)覆:(?P<q_content>(?s).*)主席女士')#1 case only
match_patterns = q_content_re_e if self.english else q_content_re_c
for pattern in match_patterns:
match_q_content = re.match(pattern, body)
if match_q_content:
self.question_content = match_q_content.group('q_content')
break
if match_q_content:
if self.question_content[-6:]=='Madam ':
self.question_content=self.question_content[:-6]
#if self.question_content[-11:-13]=='答覆':
# self.question_content=self.question_content[:-12]
else:
logger.warn('Cannot match question content for question {}'.format(self.uid))
#3. reply to question
reply_content_re_e = []
reply_content_re_c = []
reply_content_re_e.append(ur'(?s).*(President|Madam president)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)Ends')
reply_content_re_e.append(ur'(?s).*(Reply|Answer)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)Ends')
reply_content_re_c.append(ur'(?s).*(主席|主席女士)\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)完')
reply_content_re_c.append(ur'(?s).*(答|回)覆\s?(:|:|︰|﹕|,)(?P<reply_content>(?s).*)完')#sometimes '主席|主席女士' was omitted
match_patterns = reply_content_re_e if self.english else reply_content_re_c
for pattern in match_patterns:
match_reply = re.match(pattern, body)
if match_reply:
self.reply_content = match_reply.group('reply_content')
break
#no needed postprocessing found yet
if match_reply is None:
logger.warn('Cannot match reply content for question {}'.format(self.uid)) | false | true |
f7fa843355231195d889437ff967786e3ec95e56 | 2,533 | py | Python | djangoerp/pluggets/forms.py | xarala221/django-erp | f84ee8ab193b3b06ee4522ac3c888ed47396e795 | [
"MIT"
] | 345 | 2015-02-16T17:36:32.000Z | 2022-01-13T11:04:35.000Z | djangoerp/pluggets/forms.py | xarala221/django-erp | f84ee8ab193b3b06ee4522ac3c888ed47396e795 | [
"MIT"
] | 22 | 2015-09-11T13:29:00.000Z | 2021-11-27T18:47:38.000Z | djangoerp/pluggets/forms.py | xarala221/django-erp | f84ee8ab193b3b06ee4522ac3c888ed47396e795 | [
"MIT"
] | 243 | 2015-03-31T12:08:30.000Z | 2022-01-29T08:23:53.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""This file is part of the django ERP project.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.5'
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from djangoerp.core.forms import enrich_form
from .models import *
from .loading import registry
class TextPluggetForm(forms.Form):
"""A form to set context variables of text plugget.
"""
text = forms.CharField(initial=_("Write something here..."), required=True, widget=forms.Textarea, label=_('Text'))
class SelectPluggetSourceForm(forms.Form):
"""A form to choose the plugget source from registered ones.
"""
source_uid = forms.ChoiceField(required=True, label=_('Type'))
def __init__(self, *args, **kwargs):
super(SelectPluggetSourceForm, self).__init__(*args, **kwargs)
self.fields['source_uid'].choices = registry.get_plugget_source_choices()
class CustomizePluggetSettingsForm(forms.ModelForm):
"""A form to customize the plugget appearance source-specific settings.
Setting fields are added dynamically based on source registered form.
"""
class Meta:
model = Plugget
fields = ['title']
def __init__(self, *args, **kwargs):
self.region = kwargs.pop("region", None)
super(CustomizePluggetSettingsForm, self).__init__(*args, **kwargs)
def clean_title(self):
title = self.cleaned_data['title'].strip(" ")
try:
plugget = Plugget.objects.get(title=title, region=self.region)
if plugget != self.instance:
raise forms.ValidationError(_("This title is already in use."))
except Plugget.DoesNotExist:
pass
return title
enrich_form(SelectPluggetSourceForm)
enrich_form(CustomizePluggetSettingsForm)
| 35.180556 | 119 | 0.705882 |
from __future__ import unicode_literals
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.5'
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from djangoerp.core.forms import enrich_form
from .models import *
from .loading import registry
class TextPluggetForm(forms.Form):
text = forms.CharField(initial=_("Write something here..."), required=True, widget=forms.Textarea, label=_('Text'))
class SelectPluggetSourceForm(forms.Form):
source_uid = forms.ChoiceField(required=True, label=_('Type'))
def __init__(self, *args, **kwargs):
super(SelectPluggetSourceForm, self).__init__(*args, **kwargs)
self.fields['source_uid'].choices = registry.get_plugget_source_choices()
class CustomizePluggetSettingsForm(forms.ModelForm):
class Meta:
model = Plugget
fields = ['title']
def __init__(self, *args, **kwargs):
self.region = kwargs.pop("region", None)
super(CustomizePluggetSettingsForm, self).__init__(*args, **kwargs)
def clean_title(self):
title = self.cleaned_data['title'].strip(" ")
try:
plugget = Plugget.objects.get(title=title, region=self.region)
if plugget != self.instance:
raise forms.ValidationError(_("This title is already in use."))
except Plugget.DoesNotExist:
pass
return title
enrich_form(SelectPluggetSourceForm)
enrich_form(CustomizePluggetSettingsForm)
| true | true |
f7fa848cac5bbd992d8c928d858e2bad4d31124b | 1,746 | py | Python | 14/record_dataset.py | jason9075/ithome_tensorflow_series | e8f92de2a73a88e7b03a9ac58ece4c4a604f066e | [
"Apache-2.0"
] | 24 | 2019-10-06T17:11:23.000Z | 2022-03-15T08:52:22.000Z | 14/record_dataset.py | jason9075/ithome_tensorflow_series | e8f92de2a73a88e7b03a9ac58ece4c4a604f066e | [
"Apache-2.0"
] | 1 | 2020-07-03T10:13:10.000Z | 2020-07-08T06:55:12.000Z | 14/record_dataset.py | jason9075/ithome_tensorflow_series | e8f92de2a73a88e7b03a9ac58ece4c4a604f066e | [
"Apache-2.0"
] | 9 | 2019-10-06T17:11:25.000Z | 2022-03-15T08:52:26.000Z | import cv2
import tensorflow as tf
TFRECORD_PATH = '../tfrecord/member.tfrecord'
def main():
data_set = tf.data.TFRecordDataset(TFRECORD_PATH)
data_set = data_set.map(parse_function)
data_set = data_set.shuffle(buffer_size=9)
data_set = data_set.batch(3)
iterator = data_set.make_initializable_iterator()
next_element = iterator.get_next()
with tf.Session() as sess:
sess.run(iterator.initializer)
results, imgs = sess.run(next_element)
print('names: {}'.format(results['member/name']))
print('ages: {}'.format(results['member/age']))
print('heights: {}'.format(results['member/height']))
print('prefer_prods: {}'.format(results['member/prefer_prods']))
for img in imgs:
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
cv2.imshow('img', img)
cv2.waitKey(-1)
def parse_function(example_proto):
features = {'member/name': tf.io.FixedLenFeature([], tf.string),
'member/encoded': tf.io.FixedLenFeature([], tf.string),
'member/age': tf.io.FixedLenFeature([], tf.int64),
'member/height': tf.io.VarLenFeature(tf.float32),
'member/prefer_prods': tf.io.VarLenFeature(tf.int64)}
features = tf.io.parse_single_example(example_proto, features)
images = tf.image.decode_png(features['member/encoded'], channels=3)
# 注意png原本有4個channel,但執行到下面的處理會出錯,所以前一行先降成3個channel。
images = tf.image.random_brightness(images, 0.1)
images = tf.image.random_saturation(images, 0.7, 1.3)
images = tf.image.random_contrast(images, 0.6, 1.5)
images = tf.image.random_flip_left_right(images)
return features, images
if __name__ == '__main__':
main()
| 34.92 | 72 | 0.658648 | import cv2
import tensorflow as tf
TFRECORD_PATH = '../tfrecord/member.tfrecord'
def main():
data_set = tf.data.TFRecordDataset(TFRECORD_PATH)
data_set = data_set.map(parse_function)
data_set = data_set.shuffle(buffer_size=9)
data_set = data_set.batch(3)
iterator = data_set.make_initializable_iterator()
next_element = iterator.get_next()
with tf.Session() as sess:
sess.run(iterator.initializer)
results, imgs = sess.run(next_element)
print('names: {}'.format(results['member/name']))
print('ages: {}'.format(results['member/age']))
print('heights: {}'.format(results['member/height']))
print('prefer_prods: {}'.format(results['member/prefer_prods']))
for img in imgs:
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
cv2.imshow('img', img)
cv2.waitKey(-1)
def parse_function(example_proto):
features = {'member/name': tf.io.FixedLenFeature([], tf.string),
'member/encoded': tf.io.FixedLenFeature([], tf.string),
'member/age': tf.io.FixedLenFeature([], tf.int64),
'member/height': tf.io.VarLenFeature(tf.float32),
'member/prefer_prods': tf.io.VarLenFeature(tf.int64)}
features = tf.io.parse_single_example(example_proto, features)
images = tf.image.decode_png(features['member/encoded'], channels=3)
images = tf.image.random_brightness(images, 0.1)
images = tf.image.random_saturation(images, 0.7, 1.3)
images = tf.image.random_contrast(images, 0.6, 1.5)
images = tf.image.random_flip_left_right(images)
return features, images
if __name__ == '__main__':
main()
| true | true |
f7fa84a12ee4da0afb4adfbe80c07dbc01295f4d | 2,013 | py | Python | day_16/task_1.py | Korred/advent_of_code_2021 | 89afcaae3343653106d36fb7ad08558c0fbb4732 | [
"Unlicense"
] | null | null | null | day_16/task_1.py | Korred/advent_of_code_2021 | 89afcaae3343653106d36fb7ad08558c0fbb4732 | [
"Unlicense"
] | null | null | null | day_16/task_1.py | Korred/advent_of_code_2021 | 89afcaae3343653106d36fb7ad08558c0fbb4732 | [
"Unlicense"
] | null | null | null | def hexstr_to_binstr(hex):
return f"{int(hex, 16):0>{len(hex)*4}b}"
def decode_packet(packet):
# cosume packet version
p_version = int(packet[:3], 2)
p_version_sum = p_version
packet = packet[3:]
# consume packet type
p_type = int(packet[:3], 2)
packet = packet[3:]
# literal value packet
if p_type == 4:
groups = []
while True:
# consume literal value group
group = packet[1:5]
num = packet[0]
groups.append(group)
packet = packet[5:]
# last group - stop parsing
if num == "0":
break
p_val = int("".join(groups), 2)
return {"remaining": packet, "value": p_val, "version_sum": p_version_sum}
# operator packet
else:
# consume operator id
p_id = packet[0]
packet = packet[1:]
subpacket_vals = []
if p_id == "0":
# consume subpack length information
p_total_length = int(packet[:15], 2)
packet = packet[15:]
# consume subpackages
subpackets = packet[:p_total_length]
while subpackets:
res = decode_packet(subpackets)
subpackets = res["remaining"]
subpacket_vals.append(res["value"])
p_version_sum += res["version_sum"]
packet = packet[p_total_length:]
else:
# consume subpackages num
p_subpack_num = int(packet[:11], 2)
packet = packet[11:]
# consume subpackages
for _ in range(p_subpack_num):
res = decode_packet(packet)
packet = res["remaining"]
subpacket_vals.append(res["value"])
p_version_sum += res["version_sum"]
return {"remaining": packet, "value": 0, "version_sum": p_version_sum}
bin_packet = hexstr_to_binstr(open("input.txt", "r").readline())
print(decode_packet(bin_packet))
| 27.202703 | 82 | 0.538003 | def hexstr_to_binstr(hex):
return f"{int(hex, 16):0>{len(hex)*4}b}"
def decode_packet(packet):
p_version = int(packet[:3], 2)
p_version_sum = p_version
packet = packet[3:]
p_type = int(packet[:3], 2)
packet = packet[3:]
if p_type == 4:
groups = []
while True:
group = packet[1:5]
num = packet[0]
groups.append(group)
packet = packet[5:]
if num == "0":
break
p_val = int("".join(groups), 2)
return {"remaining": packet, "value": p_val, "version_sum": p_version_sum}
else:
p_id = packet[0]
packet = packet[1:]
subpacket_vals = []
if p_id == "0":
p_total_length = int(packet[:15], 2)
packet = packet[15:]
subpackets = packet[:p_total_length]
while subpackets:
res = decode_packet(subpackets)
subpackets = res["remaining"]
subpacket_vals.append(res["value"])
p_version_sum += res["version_sum"]
packet = packet[p_total_length:]
else:
p_subpack_num = int(packet[:11], 2)
packet = packet[11:]
for _ in range(p_subpack_num):
res = decode_packet(packet)
packet = res["remaining"]
subpacket_vals.append(res["value"])
p_version_sum += res["version_sum"]
return {"remaining": packet, "value": 0, "version_sum": p_version_sum}
bin_packet = hexstr_to_binstr(open("input.txt", "r").readline())
print(decode_packet(bin_packet))
| true | true |
f7fa84d30542fa5ea8a7e04262d9b849da8e4971 | 4,690 | py | Python | pipeline/contrib/statistics/signals/handlers.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | 55 | 2021-09-07T11:50:35.000Z | 2022-03-23T13:19:38.000Z | pipeline/contrib/statistics/signals/handlers.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | 64 | 2021-09-07T12:04:12.000Z | 2022-03-29T03:47:18.000Z | pipeline/contrib/statistics/signals/handlers.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | 20 | 2021-09-07T11:52:08.000Z | 2022-03-28T08:05:22.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
import ujson as json
from django.db.models.signals import post_save
from django.dispatch import receiver
from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION
from pipeline.contrib.statistics.models import (
ComponentInTemplate,
TemplateInPipeline,
)
from pipeline.contrib.statistics.tasks import pipeline_post_save_statistics_task, pipeline_archive_statistics_task
from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes
from pipeline.core.constants import PE
from pipeline.models import PipelineInstance, PipelineTemplate
from pipeline.signals import post_pipeline_finish, post_pipeline_revoke
logger = logging.getLogger("root")
@receiver(post_save, sender=PipelineTemplate)
def template_post_save_handler(sender, instance, created, **kwargs):
"""
模板执行保存处理
:param sender:
:param instance: 任务实例 Instance.Object对象
:param created: 是否是创建(可为更新)
:param kwargs: 参数序列
:return:
"""
template = instance
template_id = template.template_id
# 删除原先该项模板数据(无论是更新还是创建,都需要重新创建统计数据)
ComponentInTemplate.objects.filter(template_id=template_id).delete()
data = template.data
component_list = []
# 任务节点引用标准插件统计(包含间接通过子流程引用)
for act_id, act in data[PE.activities].items():
# 标准插件节点直接引用
if act["type"] == PE.ServiceActivity:
component = ComponentInTemplate(
component_code=act["component"]["code"],
template_id=template_id,
node_id=act_id,
version=act["component"].get("version", LEGACY_PLUGINS_VERSION),
)
component_list.append(component)
# 子流程节点间接引用
else:
components = ComponentInTemplate.objects.filter(template_id=act["template_id"]).values(
"subprocess_stack", "component_code", "node_id", "version"
)
for component_sub in components:
# 子流程的执行堆栈(子流程的执行过程)
stack = json.loads(component_sub["subprocess_stack"])
# 添加节点id
stack.insert(0, act_id)
component = ComponentInTemplate(
component_code=component_sub["component_code"],
template_id=template_id,
node_id=component_sub["node_id"],
is_sub=True,
subprocess_stack=json.dumps(stack),
version=component_sub["version"],
)
component_list.append(component)
ComponentInTemplate.objects.bulk_create(component_list)
# 统计流程标准插件个数,子流程个数,网关个数
atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(template.data)
TemplateInPipeline.objects.update_or_create(
template_id=template_id,
defaults={"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total},
)
@receiver(post_save, sender=PipelineInstance)
def pipeline_post_save_handler(sender, instance, created, **kwargs):
try:
if created:
pipeline_post_save_statistics_task.delay(instance_id=instance.instance_id)
except Exception:
logger.exception("pipeline_post_save_handler[instance_id={}] send message error".format(instance.id))
@receiver(post_pipeline_finish, sender=PipelineInstance)
def pipeline_post_finish_handler(sender, instance_id, **kwargs):
try:
pipeline_archive_statistics_task.delay(instance_id=instance_id)
except Exception:
logger.exception("pipeline_post_finish_handler[instance_id={}] send message error".format(instance_id))
@receiver(post_pipeline_revoke, sender=PipelineInstance)
def pipeline_post_revoke_handler(sender, instance_id, **kwargs):
try:
pipeline_archive_statistics_task.delay(instance_id=instance_id)
except Exception:
logger.exception("pipeline_post_revoke_handler[instance_id={}] send message error".format(instance_id))
| 41.504425 | 116 | 0.716844 |
import logging
import ujson as json
from django.db.models.signals import post_save
from django.dispatch import receiver
from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION
from pipeline.contrib.statistics.models import (
ComponentInTemplate,
TemplateInPipeline,
)
from pipeline.contrib.statistics.tasks import pipeline_post_save_statistics_task, pipeline_archive_statistics_task
from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes
from pipeline.core.constants import PE
from pipeline.models import PipelineInstance, PipelineTemplate
from pipeline.signals import post_pipeline_finish, post_pipeline_revoke
logger = logging.getLogger("root")
@receiver(post_save, sender=PipelineTemplate)
def template_post_save_handler(sender, instance, created, **kwargs):
template = instance
template_id = template.template_id
ComponentInTemplate.objects.filter(template_id=template_id).delete()
data = template.data
component_list = []
for act_id, act in data[PE.activities].items():
if act["type"] == PE.ServiceActivity:
component = ComponentInTemplate(
component_code=act["component"]["code"],
template_id=template_id,
node_id=act_id,
version=act["component"].get("version", LEGACY_PLUGINS_VERSION),
)
component_list.append(component)
else:
components = ComponentInTemplate.objects.filter(template_id=act["template_id"]).values(
"subprocess_stack", "component_code", "node_id", "version"
)
for component_sub in components:
stack = json.loads(component_sub["subprocess_stack"])
stack.insert(0, act_id)
component = ComponentInTemplate(
component_code=component_sub["component_code"],
template_id=template_id,
node_id=component_sub["node_id"],
is_sub=True,
subprocess_stack=json.dumps(stack),
version=component_sub["version"],
)
component_list.append(component)
ComponentInTemplate.objects.bulk_create(component_list)
atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(template.data)
TemplateInPipeline.objects.update_or_create(
template_id=template_id,
defaults={"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total},
)
@receiver(post_save, sender=PipelineInstance)
def pipeline_post_save_handler(sender, instance, created, **kwargs):
try:
if created:
pipeline_post_save_statistics_task.delay(instance_id=instance.instance_id)
except Exception:
logger.exception("pipeline_post_save_handler[instance_id={}] send message error".format(instance.id))
@receiver(post_pipeline_finish, sender=PipelineInstance)
def pipeline_post_finish_handler(sender, instance_id, **kwargs):
try:
pipeline_archive_statistics_task.delay(instance_id=instance_id)
except Exception:
logger.exception("pipeline_post_finish_handler[instance_id={}] send message error".format(instance_id))
@receiver(post_pipeline_revoke, sender=PipelineInstance)
def pipeline_post_revoke_handler(sender, instance_id, **kwargs):
try:
pipeline_archive_statistics_task.delay(instance_id=instance_id)
except Exception:
logger.exception("pipeline_post_revoke_handler[instance_id={}] send message error".format(instance_id))
| true | true |
f7fa850d57bed91e7514643f6128a61b647b89e8 | 740 | bzl | Python | tools/maven.bzl | google/inject-common | 587582c04da3ef701f3654eff654a1e02df83ccf | [
"Apache-2.0"
] | 9 | 2019-12-31T21:27:06.000Z | 2022-03-31T14:57:50.000Z | tools/maven.bzl | google/inject-common | 587582c04da3ef701f3654eff654a1e02df83ccf | [
"Apache-2.0"
] | null | null | null | tools/maven.bzl | google/inject-common | 587582c04da3ef701f3654eff654a1e02df83ccf | [
"Apache-2.0"
] | 7 | 2019-07-01T07:01:48.000Z | 2021-10-12T00:15:22.000Z | """Macros to simplify generating maven files.
"""
load("@google_bazel_common//tools/maven:pom_file.bzl", default_pom_file = "pom_file")
def pom_file(name, targets, artifact_name, artifact_id, packaging = None, **kwargs):
default_pom_file(
name = name,
targets = targets,
preferred_group_ids = [
"com.google.common.inject",
"com.google.inject",
"dagger",
"com.google",
],
template_file = "//tools:pom-template.xml",
substitutions = {
"{artifact_name}": artifact_name,
"{artifact_id}": artifact_id,
"{packaging}": packaging or "jar",
},
**kwargs
)
POM_VERSION = "${project.version}"
| 28.461538 | 85 | 0.572973 |
load("@google_bazel_common//tools/maven:pom_file.bzl", default_pom_file = "pom_file")
def pom_file(name, targets, artifact_name, artifact_id, packaging = None, **kwargs):
default_pom_file(
name = name,
targets = targets,
preferred_group_ids = [
"com.google.common.inject",
"com.google.inject",
"dagger",
"com.google",
],
template_file = "//tools:pom-template.xml",
substitutions = {
"{artifact_name}": artifact_name,
"{artifact_id}": artifact_id,
"{packaging}": packaging or "jar",
},
**kwargs
)
POM_VERSION = "${project.version}"
| true | true |
f7fa856af28f212df8f25c934a45353719af37b9 | 1,347 | py | Python | objects/CSCG/_3d/forms/standard/base/dofs/visualize/main.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | 1 | 2020-10-14T12:48:35.000Z | 2020-10-14T12:48:35.000Z | objects/CSCG/_3d/forms/standard/base/dofs/visualize/main.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | objects/CSCG/_3d/forms/standard/base/dofs/visualize/main.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | """"""
from screws.freeze.main import FrozenOnly
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._0sf import _3dCSCG_S0F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._1sf import _3dCSCG_S1F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._2sf import _3dCSCG_S2F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._3sf import _3dCSCG_S3F_DOFs_Matplot
class _3dCSCG_SF_DOFs_VISUALIZE(FrozenOnly):
""""""
def __init__(self, dofs):
""""""
self._dofs_ = dofs
self._matplot_ = None
self._freeze_self_()
def __call__(self, *args, **kwargs):
return self.matplot(*args, **kwargs)
@property
def matplot(self):
if self._matplot_ is None:
if self._dofs_._sf_.k == 0:
self._matplot_ = _3dCSCG_S0F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 1:
self._matplot_ = _3dCSCG_S1F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 2:
self._matplot_ = _3dCSCG_S2F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 3:
self._matplot_ = _3dCSCG_S3F_DOFs_Matplot(self._dofs_)
else:
raise Exception()
return self._matplot_ | 31.325581 | 101 | 0.665924 |
from screws.freeze.main import FrozenOnly
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._0sf import _3dCSCG_S0F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._1sf import _3dCSCG_S1F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._2sf import _3dCSCG_S2F_DOFs_Matplot
from objects.CSCG._3d.forms.standard.base.dofs.visualize.matplot._3sf import _3dCSCG_S3F_DOFs_Matplot
class _3dCSCG_SF_DOFs_VISUALIZE(FrozenOnly):
def __init__(self, dofs):
self._dofs_ = dofs
self._matplot_ = None
self._freeze_self_()
def __call__(self, *args, **kwargs):
return self.matplot(*args, **kwargs)
@property
def matplot(self):
if self._matplot_ is None:
if self._dofs_._sf_.k == 0:
self._matplot_ = _3dCSCG_S0F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 1:
self._matplot_ = _3dCSCG_S1F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 2:
self._matplot_ = _3dCSCG_S2F_DOFs_Matplot(self._dofs_)
elif self._dofs_._sf_.k == 3:
self._matplot_ = _3dCSCG_S3F_DOFs_Matplot(self._dofs_)
else:
raise Exception()
return self._matplot_ | true | true |
f7fa8715c06f78adaf05a9ecbbb6e1263d7d9967 | 26 | py | Python | __init__.py | Gu1nness/AsmInterpreter | 40316c3b8c103e4ea35504e71e26065c8bf4a82d | [
"MIT"
] | 1 | 2021-02-28T05:27:52.000Z | 2021-02-28T05:27:52.000Z | __init__.py | Gu1nness/AsmInterpreter | 40316c3b8c103e4ea35504e71e26065c8bf4a82d | [
"MIT"
] | null | null | null | __init__.py | Gu1nness/AsmInterpreter | 40316c3b8c103e4ea35504e71e26065c8bf4a82d | [
"MIT"
] | null | null | null | from . import interpreter
| 13 | 25 | 0.807692 | from . import interpreter
| true | true |
f7fa8716ce0c594456875110dbbcf7c396440882 | 3,351 | py | Python | feeder/util/mkcert.py | jordan-hamilton/petnet-feeder-service | 66c3192b0e66f4eefb5fd55cceb1219fa1ddb914 | [
"MIT"
] | 47 | 2020-04-23T20:28:27.000Z | 2020-10-07T19:49:10.000Z | feeder/util/mkcert.py | jordan-hamilton/petnet-feeder-service | 66c3192b0e66f4eefb5fd55cceb1219fa1ddb914 | [
"MIT"
] | 8 | 2020-04-26T01:45:21.000Z | 2020-10-07T22:17:31.000Z | feeder/util/mkcert.py | ericchapman80/petnet-api-hacking | 23cff84317d7380d7d1c0a2718cc153e83920906 | [
"MIT"
] | 9 | 2020-04-25T17:22:44.000Z | 2020-10-07T04:36:56.000Z | # https://nachtimwald.com/2019/11/14/python-self-signed-cert-gen/
import socket
import logging
import datetime
from typing import cast
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.x509 import NameOID, ExtensionOID, DNSName, ExtensionNotFound
from feeder import settings
logger = logging.getLogger(__name__)
sans = [
x509.DNSName(socket.getfqdn()),
x509.DNSName(socket.gethostname()),
x509.DNSName(f"{socket.gethostname()}"),
x509.DNSName("localhost"),
x509.DNSName("*.localhost"),
]
if settings.domain:
sans.append(x509.DNSName(settings.domain))
def generate_self_signed_certificate():
one_day = datetime.timedelta(1, 0, 0)
private_key = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend()
)
public_key = private_key.public_key()
builder = x509.CertificateBuilder()
builder = builder.subject_name(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())])
)
builder = builder.issuer_name(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())])
)
builder = builder.not_valid_before(datetime.datetime.today() - one_day)
builder = builder.not_valid_after(datetime.datetime.today() + (one_day * 365 * 5))
builder = builder.serial_number(x509.random_serial_number())
builder = builder.public_key(public_key)
logger.debug(
"Adding SANs for %(hostname)s, *.%(hostname)s, localhost, and *.localhost",
{"hostname": socket.gethostname()},
)
builder = builder.add_extension(
x509.SubjectAlternativeName(sans),
critical=False,
)
builder = builder.add_extension(
x509.BasicConstraints(ca=False, path_length=None), critical=True
)
certificate = builder.sign(
private_key=private_key, algorithm=hashes.SHA256(), backend=default_backend()
)
return (
certificate.public_bytes(serialization.Encoding.PEM),
private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption(),
),
)
def domain_in_subjects(certificate_path: str, domain: str) -> bool:
with open(certificate_path, "r", encoding="utf-8") as pem_file:
pem_data = pem_file.read().encode("utf-8")
cert = x509.load_pem_x509_certificate(pem_data, default_backend())
try:
extension = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
ext_value = cast(x509.SubjectAlternativeName, extension.value)
alt_names = ext_value.get_values_for_type(DNSName)
except ExtensionNotFound:
logger.warning(
"Failed to load SAN extension, cannot read certificate SANs!"
)
return False
# Check if domain is in list or parent domain with wildcard.
# Example: domain is pet.domain.com and list has *.domain.com
parent_wildcard = f"*.{'.'.join(domain.split('.')[1:])}"
return domain in alt_names or parent_wildcard in alt_names
| 36.032258 | 86 | 0.697404 |
import socket
import logging
import datetime
from typing import cast
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.x509 import NameOID, ExtensionOID, DNSName, ExtensionNotFound
from feeder import settings
logger = logging.getLogger(__name__)
sans = [
x509.DNSName(socket.getfqdn()),
x509.DNSName(socket.gethostname()),
x509.DNSName(f"{socket.gethostname()}"),
x509.DNSName("localhost"),
x509.DNSName("*.localhost"),
]
if settings.domain:
sans.append(x509.DNSName(settings.domain))
def generate_self_signed_certificate():
one_day = datetime.timedelta(1, 0, 0)
private_key = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend()
)
public_key = private_key.public_key()
builder = x509.CertificateBuilder()
builder = builder.subject_name(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())])
)
builder = builder.issuer_name(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())])
)
builder = builder.not_valid_before(datetime.datetime.today() - one_day)
builder = builder.not_valid_after(datetime.datetime.today() + (one_day * 365 * 5))
builder = builder.serial_number(x509.random_serial_number())
builder = builder.public_key(public_key)
logger.debug(
"Adding SANs for %(hostname)s, *.%(hostname)s, localhost, and *.localhost",
{"hostname": socket.gethostname()},
)
builder = builder.add_extension(
x509.SubjectAlternativeName(sans),
critical=False,
)
builder = builder.add_extension(
x509.BasicConstraints(ca=False, path_length=None), critical=True
)
certificate = builder.sign(
private_key=private_key, algorithm=hashes.SHA256(), backend=default_backend()
)
return (
certificate.public_bytes(serialization.Encoding.PEM),
private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption(),
),
)
def domain_in_subjects(certificate_path: str, domain: str) -> bool:
with open(certificate_path, "r", encoding="utf-8") as pem_file:
pem_data = pem_file.read().encode("utf-8")
cert = x509.load_pem_x509_certificate(pem_data, default_backend())
try:
extension = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
ext_value = cast(x509.SubjectAlternativeName, extension.value)
alt_names = ext_value.get_values_for_type(DNSName)
except ExtensionNotFound:
logger.warning(
"Failed to load SAN extension, cannot read certificate SANs!"
)
return False
parent_wildcard = f"*.{'.'.join(domain.split('.')[1:])}"
return domain in alt_names or parent_wildcard in alt_names
| true | true |
f7fa875060640cd16025c9f19ee32fae2d2fd492 | 21,414 | py | Python | twisted/internet/posixbase.py | hawkowl/twisted | c413aac3888dea2202c0dc26f978d7f88b4b837a | [
"Unlicense",
"MIT"
] | null | null | null | twisted/internet/posixbase.py | hawkowl/twisted | c413aac3888dea2202c0dc26f978d7f88b4b837a | [
"Unlicense",
"MIT"
] | null | null | null | twisted/internet/posixbase.py | hawkowl/twisted | c413aac3888dea2202c0dc26f978d7f88b4b837a | [
"Unlicense",
"MIT"
] | null | null | null | # -*- test-case-name: twisted.test.test_internet,twisted.internet.test.test_posixbase -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Posix reactor base class
"""
from __future__ import division, absolute_import
import socket
import errno
import os
import sys
from zope.interface import implementer, classImplements
from twisted.internet import error, udp, tcp
from twisted.internet.base import ReactorBase, _SignalReactorMixin
from twisted.internet.main import CONNECTION_DONE, CONNECTION_LOST
from twisted.internet.interfaces import IReactorUNIX, IReactorUNIXDatagram
from twisted.internet.interfaces import IReactorTCP, IReactorUDP, IReactorSSL
from twisted.internet.interfaces import IReactorSocket, IHalfCloseableDescriptor
from twisted.internet.interfaces import IReactorProcess, IReactorMulticast
from twisted.python import log, failure, util
from twisted.python.runtime import platformType, platform
# Exceptions that doSelect might return frequently
_NO_FILENO = error.ConnectionFdescWentAway('Handler has no fileno method')
_NO_FILEDESC = error.ConnectionFdescWentAway('File descriptor lost')
try:
from twisted.protocols import tls
except ImportError:
tls = None
try:
from twisted.internet import ssl
except ImportError:
ssl = None
unixEnabled = (platformType == 'posix')
processEnabled = False
if unixEnabled:
from twisted.internet import fdesc, unix
from twisted.internet import process, _signals
processEnabled = True
if platform.isWindows():
try:
import win32process
processEnabled = True
except ImportError:
win32process = None
class _SocketWaker(log.Logger):
"""
The I{self-pipe trick<http://cr.yp.to/docs/selfpipe.html>}, implemented
using a pair of sockets rather than pipes (due to the lack of support in
select() on Windows for pipes), used to wake up the main loop from
another thread.
"""
disconnected = 0
def __init__(self, reactor):
"""Initialize.
"""
self.reactor = reactor
# Following select_trigger (from asyncore)'s example;
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
server.bind(('127.0.0.1', 0))
server.listen(1)
client.connect(server.getsockname())
reader, clientaddr = server.accept()
client.setblocking(0)
reader.setblocking(0)
self.r = reader
self.w = client
self.fileno = self.r.fileno
def wakeUp(self):
"""Send a byte to my connection.
"""
try:
util.untilConcludes(self.w.send, b'x')
except socket.error as e:
if e.args[0] != errno.WSAEWOULDBLOCK:
raise
def doRead(self):
"""Read some data from my connection.
"""
try:
self.r.recv(8192)
except socket.error:
pass
def connectionLost(self, reason):
self.r.close()
self.w.close()
class _FDWaker(log.Logger, object):
"""
The I{self-pipe trick<http://cr.yp.to/docs/selfpipe.html>}, used to wake
up the main loop from another thread or a signal handler.
L{_FDWaker} is a base class for waker implementations based on
writing to a pipe being monitored by the reactor.
@ivar o: The file descriptor for the end of the pipe which can be
written to wake up a reactor monitoring this waker.
@ivar i: The file descriptor which should be monitored in order to
be awoken by this waker.
"""
disconnected = 0
i = None
o = None
def __init__(self, reactor):
"""Initialize.
"""
self.reactor = reactor
self.i, self.o = os.pipe()
fdesc.setNonBlocking(self.i)
fdesc._setCloseOnExec(self.i)
fdesc.setNonBlocking(self.o)
fdesc._setCloseOnExec(self.o)
self.fileno = lambda: self.i
def doRead(self):
"""
Read some bytes from the pipe and discard them.
"""
fdesc.readFromFD(self.fileno(), lambda data: None)
def connectionLost(self, reason):
"""Close both ends of my pipe.
"""
if not hasattr(self, "o"):
return
for fd in self.i, self.o:
try:
os.close(fd)
except IOError:
pass
del self.i, self.o
class _UnixWaker(_FDWaker):
"""
This class provides a simple interface to wake up the event loop.
This is used by threads or signals to wake up the event loop.
"""
def wakeUp(self):
"""Write one byte to the pipe, and flush it.
"""
# We don't use fdesc.writeToFD since we need to distinguish
# between EINTR (try again) and EAGAIN (do nothing).
if self.o is not None:
try:
util.untilConcludes(os.write, self.o, b'x')
except OSError as e:
# XXX There is no unit test for raising the exception
# for other errnos. See #4285.
if e.errno != errno.EAGAIN:
raise
if platformType == 'posix':
_Waker = _UnixWaker
else:
# Primarily Windows and Jython.
_Waker = _SocketWaker
class _SIGCHLDWaker(_FDWaker):
"""
L{_SIGCHLDWaker} can wake up a reactor whenever C{SIGCHLD} is
received.
@see: L{twisted.internet._signals}
"""
def __init__(self, reactor):
_FDWaker.__init__(self, reactor)
def install(self):
"""
Install the handler necessary to make this waker active.
"""
_signals.installHandler(self.o)
def uninstall(self):
"""
Remove the handler which makes this waker active.
"""
_signals.installHandler(-1)
def doRead(self):
"""
Having woken up the reactor in response to receipt of
C{SIGCHLD}, reap the process which exited.
This is called whenever the reactor notices the waker pipe is
writeable, which happens soon after any call to the C{wakeUp}
method.
"""
_FDWaker.doRead(self)
process.reapAllProcesses()
class _DisconnectSelectableMixin(object):
"""
Mixin providing the C{_disconnectSelectable} method.
"""
def _disconnectSelectable(self, selectable, why, isRead, faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost())
}):
"""
Utility function for disconnecting a selectable.
Supports half-close notification, isRead should be boolean indicating
whether error resulted from doRead().
"""
self.removeReader(selectable)
f = faildict.get(why.__class__)
if f:
if (isRead and why.__class__ == error.ConnectionDone
and IHalfCloseableDescriptor.providedBy(selectable)):
selectable.readConnectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(failure.Failure(why))
@implementer(IReactorTCP, IReactorUDP, IReactorMulticast)
class PosixReactorBase(_SignalReactorMixin, _DisconnectSelectableMixin,
ReactorBase):
"""
A basis for reactors that use file descriptors.
@ivar _childWaker: L{None} or a reference to the L{_SIGCHLDWaker}
which is used to properly notice child process termination.
"""
# Callable that creates a waker, overrideable so that subclasses can
# substitute their own implementation:
_wakerFactory = _Waker
def installWaker(self):
"""
Install a `waker' to allow threads and signals to wake up the IO thread.
We use the self-pipe trick (http://cr.yp.to/docs/selfpipe.html) to wake
the reactor. On Windows we use a pair of sockets.
"""
if not self.waker:
self.waker = self._wakerFactory(self)
self._internalReaders.add(self.waker)
self.addReader(self.waker)
_childWaker = None
def _handleSignals(self):
"""
Extend the basic signal handling logic to also support
handling SIGCHLD to know when to try to reap child processes.
"""
_SignalReactorMixin._handleSignals(self)
if platformType == 'posix' and processEnabled:
if not self._childWaker:
self._childWaker = _SIGCHLDWaker(self)
self._internalReaders.add(self._childWaker)
self.addReader(self._childWaker)
self._childWaker.install()
# Also reap all processes right now, in case we missed any
# signals before we installed the SIGCHLD waker/handler.
# This should only happen if someone used spawnProcess
# before calling reactor.run (and the process also exited
# already).
process.reapAllProcesses()
def _uninstallHandler(self):
"""
If a child waker was created and installed, uninstall it now.
Since this disables reactor functionality and is only called
when the reactor is stopping, it doesn't provide any directly
useful functionality, but the cleanup of reactor-related
process-global state that it does helps in unit tests
involving multiple reactors and is generally just a nice
thing.
"""
# XXX This would probably be an alright place to put all of
# the cleanup code for all internal readers (here and in the
# base class, anyway). See #3063 for that cleanup task.
if self._childWaker:
self._childWaker.uninstall()
# IReactorProcess
def spawnProcess(self, processProtocol, executable, args=(),
env={}, path=None,
uid=None, gid=None, usePTY=0, childFDs=None):
args, env = self._checkProcessArgs(args, env)
if platformType == 'posix':
if usePTY:
if childFDs is not None:
raise ValueError("Using childFDs is not supported with usePTY=True.")
return process.PTYProcess(self, executable, args, env, path,
processProtocol, uid, gid, usePTY)
else:
return process.Process(self, executable, args, env, path,
processProtocol, uid, gid, childFDs)
elif platformType == "win32":
if uid is not None:
raise ValueError("Setting UID is unsupported on this platform.")
if gid is not None:
raise ValueError("Setting GID is unsupported on this platform.")
if usePTY:
raise ValueError("The usePTY parameter is not supported on Windows.")
if childFDs:
raise ValueError("Customizing childFDs is not supported on Windows.")
if win32process:
from twisted.internet._dumbwin32proc import Process
return Process(self, processProtocol, executable, args, env, path)
else:
raise NotImplementedError(
"spawnProcess not available since pywin32 is not installed.")
else:
raise NotImplementedError(
"spawnProcess only available on Windows or POSIX.")
# IReactorUDP
def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
"""Connects a given L{DatagramProtocol} to the given numeric UDP port.
@returns: object conforming to L{IListeningPort}.
"""
p = udp.Port(port, protocol, interface, maxPacketSize, self)
p.startListening()
return p
# IReactorMulticast
def listenMulticast(self, port, protocol, interface='', maxPacketSize=8192, listenMultiple=False):
"""Connects a given DatagramProtocol to the given numeric UDP port.
EXPERIMENTAL.
@returns: object conforming to IListeningPort.
"""
p = udp.MulticastPort(port, protocol, interface, maxPacketSize, self, listenMultiple)
p.startListening()
return p
# IReactorUNIX
def connectUNIX(self, address, factory, timeout=30, checkPID=0):
assert unixEnabled, "UNIX support is not present"
c = unix.Connector(address, factory, timeout, self, checkPID)
c.connect()
return c
def listenUNIX(self, address, factory, backlog=50, mode=0o666, wantPID=0):
assert unixEnabled, "UNIX support is not present"
p = unix.Port(address, factory, backlog, mode, self, wantPID)
p.startListening()
return p
# IReactorUNIXDatagram
def listenUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=0o666):
"""
Connects a given L{DatagramProtocol} to the given path.
EXPERIMENTAL.
@returns: object conforming to L{IListeningPort}.
"""
assert unixEnabled, "UNIX support is not present"
p = unix.DatagramPort(address, protocol, maxPacketSize, mode, self)
p.startListening()
return p
def connectUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=0o666, bindAddress=None):
"""
Connects a L{ConnectedDatagramProtocol} instance to a path.
EXPERIMENTAL.
"""
assert unixEnabled, "UNIX support is not present"
p = unix.ConnectedDatagramPort(address, protocol, maxPacketSize, mode, bindAddress, self)
p.startListening()
return p
# IReactorSocket (but not on Windows)
def adoptStreamPort(self, fileDescriptor, addressFamily, factory):
"""
Create a new L{IListeningPort} from an already-initialized socket.
This just dispatches to a suitable port implementation (eg from
L{IReactorTCP}, etc) based on the specified C{addressFamily}.
@see: L{twisted.internet.interfaces.IReactorSocket.adoptStreamPort}
"""
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
p = tcp.Port._fromListeningDescriptor(
self, fileDescriptor, addressFamily, factory)
p.startListening()
return p
def adoptStreamConnection(self, fileDescriptor, addressFamily, factory):
"""
@see:
L{twisted.internet.interfaces.IReactorSocket.adoptStreamConnection}
"""
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
return tcp.Server._fromConnectedSocket(
fileDescriptor, addressFamily, factory, self)
def adoptDatagramPort(self, fileDescriptor, addressFamily, protocol,
maxPacketSize=8192):
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
p = udp.Port._fromListeningDescriptor(
self, fileDescriptor, addressFamily, protocol,
maxPacketSize=maxPacketSize)
p.startListening()
return p
# IReactorTCP
def listenTCP(self, port, factory, backlog=50, interface=''):
p = tcp.Port(port, factory, backlog, interface, self)
p.startListening()
return p
def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
c = tcp.Connector(host, port, factory, timeout, bindAddress, self)
c.connect()
return c
# IReactorSSL (sometimes, not implemented)
def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
if tls is not None:
tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, True, factory)
return self.connectTCP(host, port, tlsFactory, timeout, bindAddress)
elif ssl is not None:
c = ssl.Connector(
host, port, factory, contextFactory, timeout, bindAddress, self)
c.connect()
return c
else:
assert False, "SSL support is not present"
def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
if tls is not None:
tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, False, factory)
port = self.listenTCP(port, tlsFactory, backlog, interface)
port._type = 'TLS'
return port
elif ssl is not None:
p = ssl.Port(
port, factory, contextFactory, backlog, interface, self)
p.startListening()
return p
else:
assert False, "SSL support is not present"
def _removeAll(self, readers, writers):
"""
Remove all readers and writers, and list of removed L{IReadDescriptor}s
and L{IWriteDescriptor}s.
Meant for calling from subclasses, to implement removeAll, like::
def removeAll(self):
return self._removeAll(self._reads, self._writes)
where C{self._reads} and C{self._writes} are iterables.
"""
removedReaders = set(readers) - self._internalReaders
for reader in removedReaders:
self.removeReader(reader)
removedWriters = set(writers)
for writer in removedWriters:
self.removeWriter(writer)
return list(removedReaders | removedWriters)
class _PollLikeMixin(object):
"""
Mixin for poll-like reactors.
Subclasses must define the following attributes::
- _POLL_DISCONNECTED - Bitmask for events indicating a connection was
lost.
- _POLL_IN - Bitmask for events indicating there is input to read.
- _POLL_OUT - Bitmask for events indicating output can be written.
Must be mixed in to a subclass of PosixReactorBase (for
_disconnectSelectable).
"""
def _doReadOrWrite(self, selectable, fd, event):
"""
fd is available for read or write, do the work and raise errors if
necessary.
"""
why = None
inRead = False
if event & self._POLL_DISCONNECTED and not (event & self._POLL_IN):
# Handle disconnection. But only if we finished processing all
# the pending input.
if fd in self._reads:
# If we were reading from the descriptor then this is a
# clean shutdown. We know there are no read events pending
# because we just checked above. It also might be a
# half-close (which is why we have to keep track of inRead).
inRead = True
why = CONNECTION_DONE
else:
# If we weren't reading, this is an error shutdown of some
# sort.
why = CONNECTION_LOST
else:
# Any non-disconnect event turns into a doRead or a doWrite.
try:
# First check to see if the descriptor is still valid. This
# gives fileno() a chance to raise an exception, too.
# Ideally, disconnection would always be indicated by the
# return value of doRead or doWrite (or an exception from
# one of those methods), but calling fileno here helps make
# buggy applications more transparent.
if selectable.fileno() == -1:
# -1 is sort of a historical Python artifact. Python
# files and sockets used to change their file descriptor
# to -1 when they closed. For the time being, we'll
# continue to support this anyway in case applications
# replicated it, plus abstract.FileDescriptor.fileno
# returns -1. Eventually it'd be good to deprecate this
# case.
why = _NO_FILEDESC
else:
if event & self._POLL_IN:
# Handle a read event.
why = selectable.doRead()
inRead = True
if not why and event & self._POLL_OUT:
# Handle a write event, as long as doRead didn't
# disconnect us.
why = selectable.doWrite()
inRead = False
except:
# Any exception from application code gets logged and will
# cause us to disconnect the selectable.
why = sys.exc_info()[1]
log.err()
if why:
self._disconnectSelectable(selectable, why, inRead)
if tls is not None or ssl is not None:
classImplements(PosixReactorBase, IReactorSSL)
if unixEnabled:
classImplements(PosixReactorBase, IReactorUNIX, IReactorUNIXDatagram)
if processEnabled:
classImplements(PosixReactorBase, IReactorProcess)
if getattr(socket, 'fromfd', None) is not None:
classImplements(PosixReactorBase, IReactorSocket)
__all__ = ["PosixReactorBase"]
| 34.317308 | 102 | 0.621556 |
from __future__ import division, absolute_import
import socket
import errno
import os
import sys
from zope.interface import implementer, classImplements
from twisted.internet import error, udp, tcp
from twisted.internet.base import ReactorBase, _SignalReactorMixin
from twisted.internet.main import CONNECTION_DONE, CONNECTION_LOST
from twisted.internet.interfaces import IReactorUNIX, IReactorUNIXDatagram
from twisted.internet.interfaces import IReactorTCP, IReactorUDP, IReactorSSL
from twisted.internet.interfaces import IReactorSocket, IHalfCloseableDescriptor
from twisted.internet.interfaces import IReactorProcess, IReactorMulticast
from twisted.python import log, failure, util
from twisted.python.runtime import platformType, platform
_NO_FILENO = error.ConnectionFdescWentAway('Handler has no fileno method')
_NO_FILEDESC = error.ConnectionFdescWentAway('File descriptor lost')
try:
from twisted.protocols import tls
except ImportError:
tls = None
try:
from twisted.internet import ssl
except ImportError:
ssl = None
unixEnabled = (platformType == 'posix')
processEnabled = False
if unixEnabled:
from twisted.internet import fdesc, unix
from twisted.internet import process, _signals
processEnabled = True
if platform.isWindows():
try:
import win32process
processEnabled = True
except ImportError:
win32process = None
class _SocketWaker(log.Logger):
disconnected = 0
def __init__(self, reactor):
self.reactor = reactor
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
server.bind(('127.0.0.1', 0))
server.listen(1)
client.connect(server.getsockname())
reader, clientaddr = server.accept()
client.setblocking(0)
reader.setblocking(0)
self.r = reader
self.w = client
self.fileno = self.r.fileno
def wakeUp(self):
try:
util.untilConcludes(self.w.send, b'x')
except socket.error as e:
if e.args[0] != errno.WSAEWOULDBLOCK:
raise
def doRead(self):
try:
self.r.recv(8192)
except socket.error:
pass
def connectionLost(self, reason):
self.r.close()
self.w.close()
class _FDWaker(log.Logger, object):
disconnected = 0
i = None
o = None
def __init__(self, reactor):
self.reactor = reactor
self.i, self.o = os.pipe()
fdesc.setNonBlocking(self.i)
fdesc._setCloseOnExec(self.i)
fdesc.setNonBlocking(self.o)
fdesc._setCloseOnExec(self.o)
self.fileno = lambda: self.i
def doRead(self):
fdesc.readFromFD(self.fileno(), lambda data: None)
def connectionLost(self, reason):
if not hasattr(self, "o"):
return
for fd in self.i, self.o:
try:
os.close(fd)
except IOError:
pass
del self.i, self.o
class _UnixWaker(_FDWaker):
def wakeUp(self):
# We don't use fdesc.writeToFD since we need to distinguish
if self.o is not None:
try:
util.untilConcludes(os.write, self.o, b'x')
except OSError as e:
if e.errno != errno.EAGAIN:
raise
if platformType == 'posix':
_Waker = _UnixWaker
else:
_Waker = _SocketWaker
class _SIGCHLDWaker(_FDWaker):
def __init__(self, reactor):
_FDWaker.__init__(self, reactor)
def install(self):
_signals.installHandler(self.o)
def uninstall(self):
_signals.installHandler(-1)
def doRead(self):
_FDWaker.doRead(self)
process.reapAllProcesses()
class _DisconnectSelectableMixin(object):
def _disconnectSelectable(self, selectable, why, isRead, faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost())
}):
self.removeReader(selectable)
f = faildict.get(why.__class__)
if f:
if (isRead and why.__class__ == error.ConnectionDone
and IHalfCloseableDescriptor.providedBy(selectable)):
selectable.readConnectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(failure.Failure(why))
@implementer(IReactorTCP, IReactorUDP, IReactorMulticast)
class PosixReactorBase(_SignalReactorMixin, _DisconnectSelectableMixin,
ReactorBase):
_wakerFactory = _Waker
def installWaker(self):
if not self.waker:
self.waker = self._wakerFactory(self)
self._internalReaders.add(self.waker)
self.addReader(self.waker)
_childWaker = None
def _handleSignals(self):
_SignalReactorMixin._handleSignals(self)
if platformType == 'posix' and processEnabled:
if not self._childWaker:
self._childWaker = _SIGCHLDWaker(self)
self._internalReaders.add(self._childWaker)
self.addReader(self._childWaker)
self._childWaker.install()
process.reapAllProcesses()
def _uninstallHandler(self):
:
self._childWaker.uninstall()
def spawnProcess(self, processProtocol, executable, args=(),
env={}, path=None,
uid=None, gid=None, usePTY=0, childFDs=None):
args, env = self._checkProcessArgs(args, env)
if platformType == 'posix':
if usePTY:
if childFDs is not None:
raise ValueError("Using childFDs is not supported with usePTY=True.")
return process.PTYProcess(self, executable, args, env, path,
processProtocol, uid, gid, usePTY)
else:
return process.Process(self, executable, args, env, path,
processProtocol, uid, gid, childFDs)
elif platformType == "win32":
if uid is not None:
raise ValueError("Setting UID is unsupported on this platform.")
if gid is not None:
raise ValueError("Setting GID is unsupported on this platform.")
if usePTY:
raise ValueError("The usePTY parameter is not supported on Windows.")
if childFDs:
raise ValueError("Customizing childFDs is not supported on Windows.")
if win32process:
from twisted.internet._dumbwin32proc import Process
return Process(self, processProtocol, executable, args, env, path)
else:
raise NotImplementedError(
"spawnProcess not available since pywin32 is not installed.")
else:
raise NotImplementedError(
"spawnProcess only available on Windows or POSIX.")
def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
p = udp.Port(port, protocol, interface, maxPacketSize, self)
p.startListening()
return p
def listenMulticast(self, port, protocol, interface='', maxPacketSize=8192, listenMultiple=False):
p = udp.MulticastPort(port, protocol, interface, maxPacketSize, self, listenMultiple)
p.startListening()
return p
def connectUNIX(self, address, factory, timeout=30, checkPID=0):
assert unixEnabled, "UNIX support is not present"
c = unix.Connector(address, factory, timeout, self, checkPID)
c.connect()
return c
def listenUNIX(self, address, factory, backlog=50, mode=0o666, wantPID=0):
assert unixEnabled, "UNIX support is not present"
p = unix.Port(address, factory, backlog, mode, self, wantPID)
p.startListening()
return p
def listenUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=0o666):
assert unixEnabled, "UNIX support is not present"
p = unix.DatagramPort(address, protocol, maxPacketSize, mode, self)
p.startListening()
return p
def connectUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=0o666, bindAddress=None):
assert unixEnabled, "UNIX support is not present"
p = unix.ConnectedDatagramPort(address, protocol, maxPacketSize, mode, bindAddress, self)
p.startListening()
return p
def adoptStreamPort(self, fileDescriptor, addressFamily, factory):
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
p = tcp.Port._fromListeningDescriptor(
self, fileDescriptor, addressFamily, factory)
p.startListening()
return p
def adoptStreamConnection(self, fileDescriptor, addressFamily, factory):
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
return tcp.Server._fromConnectedSocket(
fileDescriptor, addressFamily, factory, self)
def adoptDatagramPort(self, fileDescriptor, addressFamily, protocol,
maxPacketSize=8192):
if addressFamily not in (socket.AF_INET, socket.AF_INET6):
raise error.UnsupportedAddressFamily(addressFamily)
p = udp.Port._fromListeningDescriptor(
self, fileDescriptor, addressFamily, protocol,
maxPacketSize=maxPacketSize)
p.startListening()
return p
def listenTCP(self, port, factory, backlog=50, interface=''):
p = tcp.Port(port, factory, backlog, interface, self)
p.startListening()
return p
def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
c = tcp.Connector(host, port, factory, timeout, bindAddress, self)
c.connect()
return c
def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
if tls is not None:
tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, True, factory)
return self.connectTCP(host, port, tlsFactory, timeout, bindAddress)
elif ssl is not None:
c = ssl.Connector(
host, port, factory, contextFactory, timeout, bindAddress, self)
c.connect()
return c
else:
assert False, "SSL support is not present"
def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
if tls is not None:
tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, False, factory)
port = self.listenTCP(port, tlsFactory, backlog, interface)
port._type = 'TLS'
return port
elif ssl is not None:
p = ssl.Port(
port, factory, contextFactory, backlog, interface, self)
p.startListening()
return p
else:
assert False, "SSL support is not present"
def _removeAll(self, readers, writers):
removedReaders = set(readers) - self._internalReaders
for reader in removedReaders:
self.removeReader(reader)
removedWriters = set(writers)
for writer in removedWriters:
self.removeWriter(writer)
return list(removedReaders | removedWriters)
class _PollLikeMixin(object):
def _doReadOrWrite(self, selectable, fd, event):
why = None
inRead = False
if event & self._POLL_DISCONNECTED and not (event & self._POLL_IN):
if fd in self._reads:
inRead = True
why = CONNECTION_DONE
else:
# sort.
why = CONNECTION_LOST
else:
# Any non-disconnect event turns into a doRead or a doWrite.
try:
# First check to see if the descriptor is still valid. This
# gives fileno() a chance to raise an exception, too.
# Ideally, disconnection would always be indicated by the
# return value of doRead or doWrite (or an exception from
# one of those methods), but calling fileno here helps make
# buggy applications more transparent.
if selectable.fileno() == -1:
# -1 is sort of a historical Python artifact. Python
# files and sockets used to change their file descriptor
# to -1 when they closed. For the time being, we'll
# case.
why = _NO_FILEDESC
else:
if event & self._POLL_IN:
# Handle a read event.
why = selectable.doRead()
inRead = True
if not why and event & self._POLL_OUT:
# Handle a write event, as long as doRead didn't
why = selectable.doWrite()
inRead = False
except:
why = sys.exc_info()[1]
log.err()
if why:
self._disconnectSelectable(selectable, why, inRead)
if tls is not None or ssl is not None:
classImplements(PosixReactorBase, IReactorSSL)
if unixEnabled:
classImplements(PosixReactorBase, IReactorUNIX, IReactorUNIXDatagram)
if processEnabled:
classImplements(PosixReactorBase, IReactorProcess)
if getattr(socket, 'fromfd', None) is not None:
classImplements(PosixReactorBase, IReactorSocket)
__all__ = ["PosixReactorBase"]
| true | true |
f7fa87ae49b91a52a4c262bf01fc163a8a1e56b4 | 807 | py | Python | pyqtgraph/flowchart/library/__init__.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 2,762 | 2015-01-02T14:34:10.000Z | 2022-03-30T14:06:07.000Z | pyqtgraph/flowchart/library/__init__.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 1,901 | 2015-01-12T03:20:30.000Z | 2022-03-31T16:33:36.000Z | pyqtgraph/flowchart/library/__init__.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 1,038 | 2015-01-01T04:05:49.000Z | 2022-03-31T11:57:51.000Z | # -*- coding: utf-8 -*-
from collections import OrderedDict
import os, types
from ...debug import printExc
from ..NodeLibrary import NodeLibrary, isNodeClass
from ... import reload as reload
# Build default library
LIBRARY = NodeLibrary()
# For backward compatibility, expose the default library's properties here:
NODE_LIST = LIBRARY.nodeList
NODE_TREE = LIBRARY.nodeTree
registerNodeType = LIBRARY.addNodeType
getNodeTree = LIBRARY.getNodeTree
getNodeType = LIBRARY.getNodeType
# Add all nodes to the default library
from . import Data, Display, Filters, Operators
for mod in [Data, Display, Filters, Operators]:
nodes = [getattr(mod, name) for name in dir(mod) if isNodeClass(getattr(mod, name))]
for node in nodes:
LIBRARY.addNodeType(node, [(mod.__name__.split('.')[-1],)])
| 27.827586 | 88 | 0.743494 |
from collections import OrderedDict
import os, types
from ...debug import printExc
from ..NodeLibrary import NodeLibrary, isNodeClass
from ... import reload as reload
LIBRARY = NodeLibrary()
NODE_LIST = LIBRARY.nodeList
NODE_TREE = LIBRARY.nodeTree
registerNodeType = LIBRARY.addNodeType
getNodeTree = LIBRARY.getNodeTree
getNodeType = LIBRARY.getNodeType
# Add all nodes to the default library
from . import Data, Display, Filters, Operators
for mod in [Data, Display, Filters, Operators]:
nodes = [getattr(mod, name) for name in dir(mod) if isNodeClass(getattr(mod, name))]
for node in nodes:
LIBRARY.addNodeType(node, [(mod.__name__.split('.')[-1],)])
| true | true |
f7fa880994b68638a07ab5a5999b28466ace20cc | 20,998 | py | Python | tools/wptrunner/wptrunner/wptrunner.py | qanat/wpt | 7c61a4594a95682531367b6956d1c37f8b8fd486 | [
"BSD-3-Clause"
] | null | null | null | tools/wptrunner/wptrunner/wptrunner.py | qanat/wpt | 7c61a4594a95682531367b6956d1c37f8b8fd486 | [
"BSD-3-Clause"
] | null | null | null | tools/wptrunner/wptrunner/wptrunner.py | qanat/wpt | 7c61a4594a95682531367b6956d1c37f8b8fd486 | [
"BSD-3-Clause"
] | null | null | null | import json
import os
import sys
from datetime import datetime, timedelta
import wptserve
from wptserve import sslutils
from . import environment as env
from . import instruments
from . import mpcontext
from . import products
from . import testloader
from . import wptcommandline
from . import wptlogging
from . import wpttest
from mozlog import capture, handlers
from .font import FontInstaller
from .testrunner import ManagerGroup
here = os.path.dirname(__file__)
logger = None
"""Runner for web-platform-tests
The runner has several design goals:
* Tests should run with no modification from upstream.
* Tests should be regarded as "untrusted" so that errors, timeouts and even
crashes in the tests can be handled without failing the entire test run.
* For performance tests can be run in multiple browsers in parallel.
The upstream repository has the facility for creating a test manifest in JSON
format. This manifest is used directly to determine which tests exist. Local
metadata files are used to store the expected test results.
"""
def setup_logging(*args, **kwargs):
global logger
logger = wptlogging.setup(*args, **kwargs)
return logger
def get_loader(test_paths, product, debug=None, run_info_extras=None, chunker_kwargs=None,
test_groups=None, **kwargs):
if run_info_extras is None:
run_info_extras = {}
run_info = wpttest.get_run_info(kwargs["run_info"], product,
browser_version=kwargs.get("browser_version"),
browser_channel=kwargs.get("browser_channel"),
verify=kwargs.get("verify"),
debug=debug,
extras=run_info_extras,
enable_webrender=kwargs.get("enable_webrender"),
device_serials=kwargs.get("device_serial"),
adb_binary=kwargs.get("adb_binary"))
test_manifests = testloader.ManifestLoader(test_paths, force_manifest_update=kwargs["manifest_update"],
manifest_download=kwargs["manifest_download"]).load()
manifest_filters = []
include = kwargs["include"]
if kwargs["include_file"]:
include = include or []
include.extend(testloader.read_include_from_file(kwargs["include_file"]))
if test_groups:
include = testloader.update_include_for_groups(test_groups, include)
if include or kwargs["exclude"] or kwargs["include_manifest"] or kwargs["default_exclude"]:
manifest_filters.append(testloader.TestFilter(include=include,
exclude=kwargs["exclude"],
manifest_path=kwargs["include_manifest"],
test_manifests=test_manifests,
explicit=kwargs["default_exclude"]))
ssl_enabled = sslutils.get_cls(kwargs["ssl_type"]).ssl_enabled
h2_enabled = wptserve.utils.http2_compatible()
test_loader = testloader.TestLoader(test_manifests,
kwargs["test_types"],
run_info,
manifest_filters=manifest_filters,
chunk_type=kwargs["chunk_type"],
total_chunks=kwargs["total_chunks"],
chunk_number=kwargs["this_chunk"],
include_https=ssl_enabled,
include_h2=h2_enabled,
include_webtransport_h3=kwargs["enable_webtransport_h3"],
skip_timeout=kwargs["skip_timeout"],
skip_implementation_status=kwargs["skip_implementation_status"],
chunker_kwargs=chunker_kwargs)
return run_info, test_loader
def list_test_groups(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for item in sorted(test_loader.groups(kwargs["test_types"])):
print(item)
def list_disabled(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
rv = []
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for test_type, tests in test_loader.disabled_tests.items():
for test in tests:
rv.append({"test": test.id, "reason": test.disabled()})
print(json.dumps(rv, indent=2))
def list_tests(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for test in test_loader.test_ids:
print(test)
def get_pause_after_test(test_loader, **kwargs):
if kwargs["pause_after_test"] is None:
if kwargs["repeat_until_unexpected"]:
return False
if kwargs["headless"]:
return False
if kwargs["debug_test"]:
return True
tests = test_loader.tests
is_single_testharness = (sum(len(item) for item in tests.values()) == 1 and
len(tests.get("testharness", [])) == 1)
if kwargs["repeat"] == 1 and kwargs["rerun"] == 1 and is_single_testharness:
return True
return False
return kwargs["pause_after_test"]
def run_test_iteration(test_status, test_loader, test_source_kwargs, test_source_cls, run_info,
recording, test_environment, product, run_test_kwargs):
"""Runs the entire test suite.
This is called for each repeat run requested."""
tests = []
for test_type in test_loader.test_types:
tests.extend(test_loader.tests[test_type])
try:
test_groups = test_source_cls.tests_by_group(
tests, **test_source_kwargs)
except Exception:
logger.critical("Loading tests failed")
return False
logger.suite_start(test_groups,
name='web-platform-test',
run_info=run_info,
extra={"run_by_dir": run_test_kwargs["run_by_dir"]})
for test_type in run_test_kwargs["test_types"]:
logger.info(f"Running {test_type} tests")
browser_cls = product.get_browser_cls(test_type)
browser_kwargs = product.get_browser_kwargs(logger,
test_type,
run_info,
config=test_environment.config,
num_test_groups=len(test_groups),
**run_test_kwargs)
executor_cls = product.executor_classes.get(test_type)
executor_kwargs = product.get_executor_kwargs(logger,
test_type,
test_environment,
run_info,
**run_test_kwargs)
if executor_cls is None:
logger.error(f"Unsupported test type {test_type} for product {product.name}")
continue
for test in test_loader.disabled_tests[test_type]:
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
test_status.skipped += 1
if test_type == "testharness":
run_tests = {"testharness": []}
for test in test_loader.tests["testharness"]:
if ((test.testdriver and not executor_cls.supports_testdriver) or
(test.jsshell and not executor_cls.supports_jsshell)):
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
test_status.skipped += 1
else:
run_tests["testharness"].append(test)
else:
run_tests = test_loader.tests
recording.pause()
with ManagerGroup("web-platform-tests",
run_test_kwargs["processes"],
test_source_cls,
test_source_kwargs,
browser_cls,
browser_kwargs,
executor_cls,
executor_kwargs,
run_test_kwargs["rerun"],
run_test_kwargs["pause_after_test"],
run_test_kwargs["pause_on_unexpected"],
run_test_kwargs["restart_on_unexpected"],
run_test_kwargs["debug_info"],
not run_test_kwargs["no_capture_stdio"],
recording=recording) as manager_group:
try:
manager_group.run(test_type, run_tests)
except KeyboardInterrupt:
logger.critical("Main thread got signal")
manager_group.stop()
raise
test_status.total_tests += manager_group.test_count()
test_status.unexpected += manager_group.unexpected_count()
test_status.unexpected_pass += manager_group.unexpected_pass_count()
return True
def evaluate_runs(test_status, run_test_kwargs):
"""Evaluates the test counts after the given number of repeat runs has finished"""
if test_status.total_tests == 0:
if test_status.skipped > 0:
logger.warning("All requested tests were skipped")
else:
if run_test_kwargs["default_exclude"]:
logger.info("No tests ran")
return True
else:
logger.critical("No tests ran")
return False
if test_status.unexpected and not run_test_kwargs["fail_on_unexpected"]:
logger.info(f"Tolerating {test_status.unexpected} unexpected results")
return True
all_unexpected_passed = (test_status.unexpected and
test_status.unexpected == test_status.unexpected_pass)
if all_unexpected_passed and not run_test_kwargs["fail_on_unexpected_pass"]:
logger.info(f"Tolerating {test_status.unexpected_pass} unexpected results "
"because they all PASS")
return True
return test_status.unexpected == 0
class TestStatus:
"""Class that stores information on the results of test runs for later reference"""
def __init__(self):
self.total_tests = 0
self.skipped = 0
self.unexpected = 0
self.unexpected_pass = 0
self.repeated_runs = 0
self.expected_repeated_runs = 0
self.all_skipped = False
def run_tests(config, test_paths, product, **kwargs):
"""Set up the test environment, load the list of tests to be executed, and
invoke the remainder of the code to execute tests"""
mp = mpcontext.get_context()
if kwargs["instrument_to_file"] is None:
recorder = instruments.NullInstrument()
else:
recorder = instruments.Instrument(kwargs["instrument_to_file"])
with recorder as recording, capture.CaptureIO(logger,
not kwargs["no_capture_stdio"],
mp_context=mp):
recording.set(["startup"])
env.do_delayed_imports(logger, test_paths)
product = products.Product(config, product)
env_extras = product.get_env_extras(**kwargs)
product.check_args(**kwargs)
if kwargs["install_fonts"]:
env_extras.append(FontInstaller(
logger,
font_dir=kwargs["font_dir"],
ahem=os.path.join(test_paths["/"]["tests_path"], "fonts/Ahem.ttf")
))
recording.set(["startup", "load_tests"])
test_groups = (testloader.TestGroupsFile(logger, kwargs["test_groups_file"])
if kwargs["test_groups_file"] else None)
(test_source_cls,
test_source_kwargs,
chunker_kwargs) = testloader.get_test_src(logger=logger,
test_groups=test_groups,
**kwargs)
run_info, test_loader = get_loader(test_paths,
product.name,
run_info_extras=product.run_info_extras(**kwargs),
chunker_kwargs=chunker_kwargs,
test_groups=test_groups,
**kwargs)
logger.info("Using %i client processes" % kwargs["processes"])
test_status = TestStatus()
repeat = kwargs["repeat"]
test_status.expected_repeat = repeat
if len(test_loader.test_ids) == 0 and kwargs["test_list"]:
logger.critical("Unable to find any tests at the path(s):")
for path in kwargs["test_list"]:
logger.critical(" %s" % path)
logger.critical("Please check spelling and make sure there are tests in the specified path(s).")
return False, test_status
kwargs["pause_after_test"] = get_pause_after_test(test_loader, **kwargs)
ssl_config = {"type": kwargs["ssl_type"],
"openssl": {"openssl_binary": kwargs["openssl_binary"]},
"pregenerated": {"host_key_path": kwargs["host_key_path"],
"host_cert_path": kwargs["host_cert_path"],
"ca_cert_path": kwargs["ca_cert_path"]}}
testharness_timeout_multipler = product.get_timeout_multiplier("testharness",
run_info,
**kwargs)
mojojs_path = kwargs["mojojs_path"] if kwargs["enable_mojojs"] else None
recording.set(["startup", "start_environment"])
with env.TestEnvironment(test_paths,
testharness_timeout_multipler,
kwargs["pause_after_test"],
kwargs["debug_test"],
kwargs["debug_info"],
product.env_options,
ssl_config,
env_extras,
kwargs["enable_webtransport_h3"],
mojojs_path) as test_environment:
recording.set(["startup", "ensure_environment"])
try:
test_environment.ensure_started()
start_time = datetime.now()
except env.TestEnvironmentError as e:
logger.critical("Error starting test environment: %s" % e)
raise
recording.set(["startup"])
max_time = None
if "repeat_max_time" in kwargs:
max_time = timedelta(minutes=kwargs["repeat_max_time"])
repeat_until_unexpected = kwargs["repeat_until_unexpected"]
# keep track of longest time taken to complete a test suite iteration
# so that the runs can be stopped to avoid a possible TC timeout.
longest_iteration_time = timedelta()
while test_status.repeated_runs < repeat or repeat_until_unexpected:
# if the next repeat run could cause the TC timeout to be reached,
# stop now and use the test results we have.
# Pad the total time by 10% to ensure ample time for the next iteration(s).
estimate = (datetime.now() +
timedelta(seconds=(longest_iteration_time.total_seconds() * 1.1)))
if not repeat_until_unexpected and max_time and estimate >= start_time + max_time:
logger.info(f"Ran {test_status.repeated_runs} of {repeat} iterations.")
break
# begin tracking runtime of the test suite
iteration_start = datetime.now()
test_status.repeated_runs += 1
if repeat_until_unexpected:
logger.info(f"Repetition {test_status.repeated_runs}")
elif repeat > 1:
logger.info(f"Repetition {test_status.repeated_runs} / {repeat}")
iter_success = run_test_iteration(test_status, test_loader, test_source_kwargs,
test_source_cls, run_info, recording,
test_environment, product, kwargs)
# if there were issues with the suite run(tests not loaded, etc.) return
if not iter_success:
return False, test_status
recording.set(["after-end"])
logger.info(f"Got {test_status.unexpected} unexpected results, "
f"with {test_status.unexpected_pass} unexpected passes")
logger.suite_end()
# Note this iteration's runtime
iteration_runtime = datetime.now() - iteration_start
# determine the longest test suite runtime seen.
longest_iteration_time = max(longest_iteration_time,
iteration_runtime)
if repeat_until_unexpected and test_status.unexpected > 0:
break
if test_status.repeated_runs == 1 and len(test_loader.test_ids) == test_status.skipped:
test_status.all_skipped = True
break
# Return the evaluation of the runs and the number of repeated iterations that were run.
return evaluate_runs(test_status, kwargs), test_status
def check_stability(**kwargs):
from . import stability
if kwargs["stability"]:
logger.warning("--stability is deprecated; please use --verify instead!")
kwargs['verify_max_time'] = None
kwargs['verify_chaos_mode'] = False
kwargs['verify_repeat_loop'] = 0
kwargs['verify_repeat_restart'] = 10 if kwargs['repeat'] == 1 else kwargs['repeat']
kwargs['verify_output_results'] = True
return stability.check_stability(logger,
max_time=kwargs['verify_max_time'],
chaos_mode=kwargs['verify_chaos_mode'],
repeat_loop=kwargs['verify_repeat_loop'],
repeat_restart=kwargs['verify_repeat_restart'],
output_results=kwargs['verify_output_results'],
**kwargs)
def start(**kwargs):
assert logger is not None
logged_critical = wptlogging.LoggedAboveLevelHandler("CRITICAL")
handler = handlers.LogLevelFilter(logged_critical, "CRITICAL")
logger.add_handler(handler)
rv = False
try:
if kwargs["list_test_groups"]:
list_test_groups(**kwargs)
elif kwargs["list_disabled"]:
list_disabled(**kwargs)
elif kwargs["list_tests"]:
list_tests(**kwargs)
elif kwargs["verify"] or kwargs["stability"]:
rv = check_stability(**kwargs) or logged_critical.has_log
else:
rv = not run_tests(**kwargs)[0] or logged_critical.has_log
finally:
logger.shutdown()
logger.remove_handler(handler)
return rv
def main():
"""Main entry point when calling from the command line"""
kwargs = wptcommandline.parse_args()
try:
if kwargs["prefs_root"] is None:
kwargs["prefs_root"] = os.path.abspath(os.path.join(here, "prefs"))
setup_logging(kwargs, {"raw": sys.stdout})
return start(**kwargs)
except Exception:
if kwargs["pdb"]:
import pdb
import traceback
print(traceback.format_exc())
pdb.post_mortem()
else:
raise
| 42.592292 | 108 | 0.564054 | import json
import os
import sys
from datetime import datetime, timedelta
import wptserve
from wptserve import sslutils
from . import environment as env
from . import instruments
from . import mpcontext
from . import products
from . import testloader
from . import wptcommandline
from . import wptlogging
from . import wpttest
from mozlog import capture, handlers
from .font import FontInstaller
from .testrunner import ManagerGroup
here = os.path.dirname(__file__)
logger = None
def setup_logging(*args, **kwargs):
global logger
logger = wptlogging.setup(*args, **kwargs)
return logger
def get_loader(test_paths, product, debug=None, run_info_extras=None, chunker_kwargs=None,
test_groups=None, **kwargs):
if run_info_extras is None:
run_info_extras = {}
run_info = wpttest.get_run_info(kwargs["run_info"], product,
browser_version=kwargs.get("browser_version"),
browser_channel=kwargs.get("browser_channel"),
verify=kwargs.get("verify"),
debug=debug,
extras=run_info_extras,
enable_webrender=kwargs.get("enable_webrender"),
device_serials=kwargs.get("device_serial"),
adb_binary=kwargs.get("adb_binary"))
test_manifests = testloader.ManifestLoader(test_paths, force_manifest_update=kwargs["manifest_update"],
manifest_download=kwargs["manifest_download"]).load()
manifest_filters = []
include = kwargs["include"]
if kwargs["include_file"]:
include = include or []
include.extend(testloader.read_include_from_file(kwargs["include_file"]))
if test_groups:
include = testloader.update_include_for_groups(test_groups, include)
if include or kwargs["exclude"] or kwargs["include_manifest"] or kwargs["default_exclude"]:
manifest_filters.append(testloader.TestFilter(include=include,
exclude=kwargs["exclude"],
manifest_path=kwargs["include_manifest"],
test_manifests=test_manifests,
explicit=kwargs["default_exclude"]))
ssl_enabled = sslutils.get_cls(kwargs["ssl_type"]).ssl_enabled
h2_enabled = wptserve.utils.http2_compatible()
test_loader = testloader.TestLoader(test_manifests,
kwargs["test_types"],
run_info,
manifest_filters=manifest_filters,
chunk_type=kwargs["chunk_type"],
total_chunks=kwargs["total_chunks"],
chunk_number=kwargs["this_chunk"],
include_https=ssl_enabled,
include_h2=h2_enabled,
include_webtransport_h3=kwargs["enable_webtransport_h3"],
skip_timeout=kwargs["skip_timeout"],
skip_implementation_status=kwargs["skip_implementation_status"],
chunker_kwargs=chunker_kwargs)
return run_info, test_loader
def list_test_groups(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for item in sorted(test_loader.groups(kwargs["test_types"])):
print(item)
def list_disabled(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
rv = []
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for test_type, tests in test_loader.disabled_tests.items():
for test in tests:
rv.append({"test": test.id, "reason": test.disabled()})
print(json.dumps(rv, indent=2))
def list_tests(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for test in test_loader.test_ids:
print(test)
def get_pause_after_test(test_loader, **kwargs):
if kwargs["pause_after_test"] is None:
if kwargs["repeat_until_unexpected"]:
return False
if kwargs["headless"]:
return False
if kwargs["debug_test"]:
return True
tests = test_loader.tests
is_single_testharness = (sum(len(item) for item in tests.values()) == 1 and
len(tests.get("testharness", [])) == 1)
if kwargs["repeat"] == 1 and kwargs["rerun"] == 1 and is_single_testharness:
return True
return False
return kwargs["pause_after_test"]
def run_test_iteration(test_status, test_loader, test_source_kwargs, test_source_cls, run_info,
recording, test_environment, product, run_test_kwargs):
tests = []
for test_type in test_loader.test_types:
tests.extend(test_loader.tests[test_type])
try:
test_groups = test_source_cls.tests_by_group(
tests, **test_source_kwargs)
except Exception:
logger.critical("Loading tests failed")
return False
logger.suite_start(test_groups,
name='web-platform-test',
run_info=run_info,
extra={"run_by_dir": run_test_kwargs["run_by_dir"]})
for test_type in run_test_kwargs["test_types"]:
logger.info(f"Running {test_type} tests")
browser_cls = product.get_browser_cls(test_type)
browser_kwargs = product.get_browser_kwargs(logger,
test_type,
run_info,
config=test_environment.config,
num_test_groups=len(test_groups),
**run_test_kwargs)
executor_cls = product.executor_classes.get(test_type)
executor_kwargs = product.get_executor_kwargs(logger,
test_type,
test_environment,
run_info,
**run_test_kwargs)
if executor_cls is None:
logger.error(f"Unsupported test type {test_type} for product {product.name}")
continue
for test in test_loader.disabled_tests[test_type]:
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
test_status.skipped += 1
if test_type == "testharness":
run_tests = {"testharness": []}
for test in test_loader.tests["testharness"]:
if ((test.testdriver and not executor_cls.supports_testdriver) or
(test.jsshell and not executor_cls.supports_jsshell)):
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
test_status.skipped += 1
else:
run_tests["testharness"].append(test)
else:
run_tests = test_loader.tests
recording.pause()
with ManagerGroup("web-platform-tests",
run_test_kwargs["processes"],
test_source_cls,
test_source_kwargs,
browser_cls,
browser_kwargs,
executor_cls,
executor_kwargs,
run_test_kwargs["rerun"],
run_test_kwargs["pause_after_test"],
run_test_kwargs["pause_on_unexpected"],
run_test_kwargs["restart_on_unexpected"],
run_test_kwargs["debug_info"],
not run_test_kwargs["no_capture_stdio"],
recording=recording) as manager_group:
try:
manager_group.run(test_type, run_tests)
except KeyboardInterrupt:
logger.critical("Main thread got signal")
manager_group.stop()
raise
test_status.total_tests += manager_group.test_count()
test_status.unexpected += manager_group.unexpected_count()
test_status.unexpected_pass += manager_group.unexpected_pass_count()
return True
def evaluate_runs(test_status, run_test_kwargs):
if test_status.total_tests == 0:
if test_status.skipped > 0:
logger.warning("All requested tests were skipped")
else:
if run_test_kwargs["default_exclude"]:
logger.info("No tests ran")
return True
else:
logger.critical("No tests ran")
return False
if test_status.unexpected and not run_test_kwargs["fail_on_unexpected"]:
logger.info(f"Tolerating {test_status.unexpected} unexpected results")
return True
all_unexpected_passed = (test_status.unexpected and
test_status.unexpected == test_status.unexpected_pass)
if all_unexpected_passed and not run_test_kwargs["fail_on_unexpected_pass"]:
logger.info(f"Tolerating {test_status.unexpected_pass} unexpected results "
"because they all PASS")
return True
return test_status.unexpected == 0
class TestStatus:
def __init__(self):
self.total_tests = 0
self.skipped = 0
self.unexpected = 0
self.unexpected_pass = 0
self.repeated_runs = 0
self.expected_repeated_runs = 0
self.all_skipped = False
def run_tests(config, test_paths, product, **kwargs):
mp = mpcontext.get_context()
if kwargs["instrument_to_file"] is None:
recorder = instruments.NullInstrument()
else:
recorder = instruments.Instrument(kwargs["instrument_to_file"])
with recorder as recording, capture.CaptureIO(logger,
not kwargs["no_capture_stdio"],
mp_context=mp):
recording.set(["startup"])
env.do_delayed_imports(logger, test_paths)
product = products.Product(config, product)
env_extras = product.get_env_extras(**kwargs)
product.check_args(**kwargs)
if kwargs["install_fonts"]:
env_extras.append(FontInstaller(
logger,
font_dir=kwargs["font_dir"],
ahem=os.path.join(test_paths["/"]["tests_path"], "fonts/Ahem.ttf")
))
recording.set(["startup", "load_tests"])
test_groups = (testloader.TestGroupsFile(logger, kwargs["test_groups_file"])
if kwargs["test_groups_file"] else None)
(test_source_cls,
test_source_kwargs,
chunker_kwargs) = testloader.get_test_src(logger=logger,
test_groups=test_groups,
**kwargs)
run_info, test_loader = get_loader(test_paths,
product.name,
run_info_extras=product.run_info_extras(**kwargs),
chunker_kwargs=chunker_kwargs,
test_groups=test_groups,
**kwargs)
logger.info("Using %i client processes" % kwargs["processes"])
test_status = TestStatus()
repeat = kwargs["repeat"]
test_status.expected_repeat = repeat
if len(test_loader.test_ids) == 0 and kwargs["test_list"]:
logger.critical("Unable to find any tests at the path(s):")
for path in kwargs["test_list"]:
logger.critical(" %s" % path)
logger.critical("Please check spelling and make sure there are tests in the specified path(s).")
return False, test_status
kwargs["pause_after_test"] = get_pause_after_test(test_loader, **kwargs)
ssl_config = {"type": kwargs["ssl_type"],
"openssl": {"openssl_binary": kwargs["openssl_binary"]},
"pregenerated": {"host_key_path": kwargs["host_key_path"],
"host_cert_path": kwargs["host_cert_path"],
"ca_cert_path": kwargs["ca_cert_path"]}}
testharness_timeout_multipler = product.get_timeout_multiplier("testharness",
run_info,
**kwargs)
mojojs_path = kwargs["mojojs_path"] if kwargs["enable_mojojs"] else None
recording.set(["startup", "start_environment"])
with env.TestEnvironment(test_paths,
testharness_timeout_multipler,
kwargs["pause_after_test"],
kwargs["debug_test"],
kwargs["debug_info"],
product.env_options,
ssl_config,
env_extras,
kwargs["enable_webtransport_h3"],
mojojs_path) as test_environment:
recording.set(["startup", "ensure_environment"])
try:
test_environment.ensure_started()
start_time = datetime.now()
except env.TestEnvironmentError as e:
logger.critical("Error starting test environment: %s" % e)
raise
recording.set(["startup"])
max_time = None
if "repeat_max_time" in kwargs:
max_time = timedelta(minutes=kwargs["repeat_max_time"])
repeat_until_unexpected = kwargs["repeat_until_unexpected"]
longest_iteration_time = timedelta()
while test_status.repeated_runs < repeat or repeat_until_unexpected:
estimate = (datetime.now() +
timedelta(seconds=(longest_iteration_time.total_seconds() * 1.1)))
if not repeat_until_unexpected and max_time and estimate >= start_time + max_time:
logger.info(f"Ran {test_status.repeated_runs} of {repeat} iterations.")
break
iteration_start = datetime.now()
test_status.repeated_runs += 1
if repeat_until_unexpected:
logger.info(f"Repetition {test_status.repeated_runs}")
elif repeat > 1:
logger.info(f"Repetition {test_status.repeated_runs} / {repeat}")
iter_success = run_test_iteration(test_status, test_loader, test_source_kwargs,
test_source_cls, run_info, recording,
test_environment, product, kwargs)
if not iter_success:
return False, test_status
recording.set(["after-end"])
logger.info(f"Got {test_status.unexpected} unexpected results, "
f"with {test_status.unexpected_pass} unexpected passes")
logger.suite_end()
iteration_runtime = datetime.now() - iteration_start
# determine the longest test suite runtime seen.
longest_iteration_time = max(longest_iteration_time,
iteration_runtime)
if repeat_until_unexpected and test_status.unexpected > 0:
break
if test_status.repeated_runs == 1 and len(test_loader.test_ids) == test_status.skipped:
test_status.all_skipped = True
break
# Return the evaluation of the runs and the number of repeated iterations that were run.
return evaluate_runs(test_status, kwargs), test_status
def check_stability(**kwargs):
from . import stability
if kwargs["stability"]:
logger.warning("--stability is deprecated; please use --verify instead!")
kwargs['verify_max_time'] = None
kwargs['verify_chaos_mode'] = False
kwargs['verify_repeat_loop'] = 0
kwargs['verify_repeat_restart'] = 10 if kwargs['repeat'] == 1 else kwargs['repeat']
kwargs['verify_output_results'] = True
return stability.check_stability(logger,
max_time=kwargs['verify_max_time'],
chaos_mode=kwargs['verify_chaos_mode'],
repeat_loop=kwargs['verify_repeat_loop'],
repeat_restart=kwargs['verify_repeat_restart'],
output_results=kwargs['verify_output_results'],
**kwargs)
def start(**kwargs):
assert logger is not None
logged_critical = wptlogging.LoggedAboveLevelHandler("CRITICAL")
handler = handlers.LogLevelFilter(logged_critical, "CRITICAL")
logger.add_handler(handler)
rv = False
try:
if kwargs["list_test_groups"]:
list_test_groups(**kwargs)
elif kwargs["list_disabled"]:
list_disabled(**kwargs)
elif kwargs["list_tests"]:
list_tests(**kwargs)
elif kwargs["verify"] or kwargs["stability"]:
rv = check_stability(**kwargs) or logged_critical.has_log
else:
rv = not run_tests(**kwargs)[0] or logged_critical.has_log
finally:
logger.shutdown()
logger.remove_handler(handler)
return rv
def main():
kwargs = wptcommandline.parse_args()
try:
if kwargs["prefs_root"] is None:
kwargs["prefs_root"] = os.path.abspath(os.path.join(here, "prefs"))
setup_logging(kwargs, {"raw": sys.stdout})
return start(**kwargs)
except Exception:
if kwargs["pdb"]:
import pdb
import traceback
print(traceback.format_exc())
pdb.post_mortem()
else:
raise
| true | true |
f7fa8854998422b639ba3f671bff082dd4c39411 | 4,376 | py | Python | model-optimizer/extensions/front/onnx/detection_output.py | monroid/openvino | 8272b3857ef5be0aaa8abbf7bd0d5d5615dc40b6 | [
"Apache-2.0"
] | 2,406 | 2020-04-22T15:47:54.000Z | 2022-03-31T10:27:37.000Z | model-optimizer/extensions/front/onnx/detection_output.py | thomas-yanxin/openvino | 031e998a15ec738c64cc2379d7f30fb73087c272 | [
"Apache-2.0"
] | 4,948 | 2020-04-22T15:12:39.000Z | 2022-03-31T18:45:42.000Z | model-optimizer/extensions/front/onnx/detection_output.py | thomas-yanxin/openvino | 031e998a15ec738c64cc2379d7f30fb73087c272 | [
"Apache-2.0"
] | 991 | 2020-04-23T18:21:09.000Z | 2022-03-31T18:40:57.000Z | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from extensions.ops.DetectionOutput import DetectionOutput
from mo.front.extractor import FrontExtractorOp
from mo.front.onnx.extractors.utils import onnx_attr
from mo.utils.error import Error
class DetectionOutputFrontExtractor(FrontExtractorOp):
op = 'DetectionOutput'
enabled = True
@classmethod
def extract(cls, node):
nms_threshold = onnx_attr(node, 'nms_threshold', 'f', default=0.0)
eta = onnx_attr(node, 'eta', 'f', default=0.0)
top_k = onnx_attr(node, 'top_k', 'i', default=-1)
code_type_values = {
b"CORNER": "caffe.PriorBoxParameter.CORNER",
b"CENTER_SIZE": "caffe.PriorBoxParameter.CENTER_SIZE",
}
code_type = onnx_attr(node, 'code_type', 's', default=code_type_values[b"CORNER"])
try:
code_type = code_type_values[code_type]
except KeyError:
raise Error("Incorrect value of code_type parameter {}".format(code_type))
resize_mode_values = {
b"": "",
b"WARP": "caffe.ResizeParameter.WARP",
b"FIT_SMALL_SIZE": "caffe.ResizeParameter.FIT_SMALL_SIZE",
b"FIT_LARGE_SIZE_AND_PAD": "caffe.ResizeParameter.FIT_LARGE_SIZE_AND_PAD",
}
resize_mode = onnx_attr(node, 'resize_mode', 's', default=b"")
try:
resize_mode = resize_mode_values[resize_mode]
except KeyError:
raise Error("Incorrect value of resize_mode parameter {}".format(resize_mode))
pad_mode_values = {
b"": "",
b"CONSTANT": "caffe.ResizeParameter.CONSTANT",
b"MIRRORED": "caffe.ResizeParameter.MIRRORED",
b"REPEAT_NEAREST": "caffe.ResizeParameter.REPEAT_NEAREST"
}
pad_mode = onnx_attr(node, 'pad_mode', 's', default=b"")
try:
pad_mode = pad_mode_values[pad_mode]
except KeyError:
raise Error("Incorrect value of pad_mode parameter {}".format(pad_mode))
interp_mode_values = {
b"": "",
b"LINEAR": "caffe.ResizeParameter.LINEAR",
b"AREA": "caffe.ResizeParameter.AREA",
b"NEAREST": "caffe.ResizeParameter.NEAREST",
b"CUBIC": "caffe.ResizeParameter.CUBIC",
b"LANCZOS4": "caffe.ResizeParameter.LANCZOS4"
}
interp_mode = onnx_attr(node, 'interp_mode', 's', default=b"")
try:
interp_mode = interp_mode_values[interp_mode]
except KeyError:
raise Error("Incorrect value of interp_mode parameter {}".format(interp_mode))
attrs = {
'num_classes': onnx_attr(node, 'num_classes', 'i', default=0),
'share_location': onnx_attr(node, 'share_location', 'i', default=0),
'background_label_id': onnx_attr(node, 'background_label_id', 'i', default=0),
'code_type': code_type,
'variance_encoded_in_target': onnx_attr(node, 'variance_encoded_in_target', 'i', default=0),
'keep_top_k': onnx_attr(node, 'keep_top_k', 'i', default=0),
'confidence_threshold': onnx_attr(node, 'confidence_threshold', 'f', default=0),
'visualize_threshold': onnx_attr(node, 'visualize_threshold', 'f', default=0.6),
# nms_param
'nms_threshold': nms_threshold,
'top_k': top_k,
'eta': eta,
# save_output_param.resize_param
'prob': onnx_attr(node, 'prob', 'f', default=0),
'resize_mode': resize_mode,
'height': onnx_attr(node, 'height', 'i', default=0),
'width': onnx_attr(node, 'width', 'i', default=0),
'height_scale': onnx_attr(node, 'height_scale', 'i', default=0),
'width_scale': onnx_attr(node, 'width_scale', 'i', default=0),
'pad_mode': pad_mode,
'pad_value': onnx_attr(node, 'pad_value', 's', default=""),
'interp_mode': interp_mode,
'input_width': onnx_attr(node, 'input_width', 'i', default=1),
'input_height': onnx_attr(node, 'input_height', 'i', default=1),
'normalized': onnx_attr(node, 'normalized', 'i', default=1),
}
# update the attributes of the node
DetectionOutput.update_node_stat(node, attrs)
return cls.enabled
| 43.76 | 104 | 0.609461 |
from extensions.ops.DetectionOutput import DetectionOutput
from mo.front.extractor import FrontExtractorOp
from mo.front.onnx.extractors.utils import onnx_attr
from mo.utils.error import Error
class DetectionOutputFrontExtractor(FrontExtractorOp):
op = 'DetectionOutput'
enabled = True
@classmethod
def extract(cls, node):
nms_threshold = onnx_attr(node, 'nms_threshold', 'f', default=0.0)
eta = onnx_attr(node, 'eta', 'f', default=0.0)
top_k = onnx_attr(node, 'top_k', 'i', default=-1)
code_type_values = {
b"CORNER": "caffe.PriorBoxParameter.CORNER",
b"CENTER_SIZE": "caffe.PriorBoxParameter.CENTER_SIZE",
}
code_type = onnx_attr(node, 'code_type', 's', default=code_type_values[b"CORNER"])
try:
code_type = code_type_values[code_type]
except KeyError:
raise Error("Incorrect value of code_type parameter {}".format(code_type))
resize_mode_values = {
b"": "",
b"WARP": "caffe.ResizeParameter.WARP",
b"FIT_SMALL_SIZE": "caffe.ResizeParameter.FIT_SMALL_SIZE",
b"FIT_LARGE_SIZE_AND_PAD": "caffe.ResizeParameter.FIT_LARGE_SIZE_AND_PAD",
}
resize_mode = onnx_attr(node, 'resize_mode', 's', default=b"")
try:
resize_mode = resize_mode_values[resize_mode]
except KeyError:
raise Error("Incorrect value of resize_mode parameter {}".format(resize_mode))
pad_mode_values = {
b"": "",
b"CONSTANT": "caffe.ResizeParameter.CONSTANT",
b"MIRRORED": "caffe.ResizeParameter.MIRRORED",
b"REPEAT_NEAREST": "caffe.ResizeParameter.REPEAT_NEAREST"
}
pad_mode = onnx_attr(node, 'pad_mode', 's', default=b"")
try:
pad_mode = pad_mode_values[pad_mode]
except KeyError:
raise Error("Incorrect value of pad_mode parameter {}".format(pad_mode))
interp_mode_values = {
b"": "",
b"LINEAR": "caffe.ResizeParameter.LINEAR",
b"AREA": "caffe.ResizeParameter.AREA",
b"NEAREST": "caffe.ResizeParameter.NEAREST",
b"CUBIC": "caffe.ResizeParameter.CUBIC",
b"LANCZOS4": "caffe.ResizeParameter.LANCZOS4"
}
interp_mode = onnx_attr(node, 'interp_mode', 's', default=b"")
try:
interp_mode = interp_mode_values[interp_mode]
except KeyError:
raise Error("Incorrect value of interp_mode parameter {}".format(interp_mode))
attrs = {
'num_classes': onnx_attr(node, 'num_classes', 'i', default=0),
'share_location': onnx_attr(node, 'share_location', 'i', default=0),
'background_label_id': onnx_attr(node, 'background_label_id', 'i', default=0),
'code_type': code_type,
'variance_encoded_in_target': onnx_attr(node, 'variance_encoded_in_target', 'i', default=0),
'keep_top_k': onnx_attr(node, 'keep_top_k', 'i', default=0),
'confidence_threshold': onnx_attr(node, 'confidence_threshold', 'f', default=0),
'visualize_threshold': onnx_attr(node, 'visualize_threshold', 'f', default=0.6),
'nms_threshold': nms_threshold,
'top_k': top_k,
'eta': eta,
'prob': onnx_attr(node, 'prob', 'f', default=0),
'resize_mode': resize_mode,
'height': onnx_attr(node, 'height', 'i', default=0),
'width': onnx_attr(node, 'width', 'i', default=0),
'height_scale': onnx_attr(node, 'height_scale', 'i', default=0),
'width_scale': onnx_attr(node, 'width_scale', 'i', default=0),
'pad_mode': pad_mode,
'pad_value': onnx_attr(node, 'pad_value', 's', default=""),
'interp_mode': interp_mode,
'input_width': onnx_attr(node, 'input_width', 'i', default=1),
'input_height': onnx_attr(node, 'input_height', 'i', default=1),
'normalized': onnx_attr(node, 'normalized', 'i', default=1),
}
DetectionOutput.update_node_stat(node, attrs)
return cls.enabled
| true | true |
f7fa88fb1e122c97d664dcd2445d538e23bc3c3b | 805 | py | Python | run.py | TheJarX/python-time-lapse | 4cb0dfce78545bca237c520d94ae71aeed78539c | [
"MIT"
] | 2 | 2019-09-28T03:46:25.000Z | 2019-12-05T14:42:30.000Z | run.py | TheJarX/Python-time-lapse | 4cb0dfce78545bca237c520d94ae71aeed78539c | [
"MIT"
] | null | null | null | run.py | TheJarX/Python-time-lapse | 4cb0dfce78545bca237c520d94ae71aeed78539c | [
"MIT"
] | null | null | null | from flask import Flask, request, render_template, redirect,json
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello, world!'
@app.route('/cheers')
def cheers():
name = request.args.get('name', 'Mr. Anonymous')
return 'Hello, {}'.format(name)
@app.route( "/upload-image", methods = ["GET","POST"] )
def upload_image():
if request.method == 'POST':
if request.data:
#image = request.files['image']
#print( image )
#return json.dumps({'success': True}, 200, {'ContentType' : 'application/json'})
res = app.response._class(
response = json.dumps({'status': True}),
status=200,
mimetype='application/json'
)
return res
if __name__ == '__main__':
app.run(debug = True, port = 5000)
| 25.15625 | 83 | 0.590062 | from flask import Flask, request, render_template, redirect,json
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello, world!'
@app.route('/cheers')
def cheers():
name = request.args.get('name', 'Mr. Anonymous')
return 'Hello, {}'.format(name)
@app.route( "/upload-image", methods = ["GET","POST"] )
def upload_image():
if request.method == 'POST':
if request.data:
res = app.response._class(
response = json.dumps({'status': True}),
status=200,
mimetype='application/json'
)
return res
if __name__ == '__main__':
app.run(debug = True, port = 5000)
| false | true |
f7fa89a814ce4edf7375322d329a7b9bcab7588a | 554 | py | Python | hanlp/datasets/parsing/ctb5.py | antfootAlex/HanLP | e8044b27ae1de54b9070db08549853d3ca8271e2 | [
"Apache-2.0"
] | 27,208 | 2015-03-27T10:25:45.000Z | 2022-03-31T13:26:32.000Z | hanlp/datasets/parsing/ctb5.py | hushaoyun/HanLP | 967b52404c9d0adbc0cff2699690c127ecfca36e | [
"Apache-2.0"
] | 1,674 | 2015-03-30T06:36:44.000Z | 2022-03-16T01:52:56.000Z | hanlp/datasets/parsing/ctb5.py | hushaoyun/HanLP | 967b52404c9d0adbc0cff2699690c127ecfca36e | [
"Apache-2.0"
] | 7,710 | 2015-03-27T08:07:57.000Z | 2022-03-31T14:57:23.000Z | # -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-28 18:44
from hanlp_common.constant import HANLP_URL
_CTB_HOME = HANLP_URL + 'embeddings/SUDA-LA-CIP_20200109_021624.zip#'
_CTB5_DEP_HOME = _CTB_HOME + 'BPNN/data/ctb5/'
CTB5_DEP_TRAIN = _CTB5_DEP_HOME + 'train.conll'
'''Training set for ctb5 dependency parsing.'''
CTB5_DEP_DEV = _CTB5_DEP_HOME + 'dev.conll'
'''Dev set for ctb5 dependency parsing.'''
CTB5_DEP_TEST = _CTB5_DEP_HOME + 'test.conll'
'''Test set for ctb5 dependency parsing.'''
CIP_W2V_100_CN = _CTB_HOME + 'BPNN/data/embed.txt'
| 30.777778 | 69 | 0.741877 |
from hanlp_common.constant import HANLP_URL
_CTB_HOME = HANLP_URL + 'embeddings/SUDA-LA-CIP_20200109_021624.zip#'
_CTB5_DEP_HOME = _CTB_HOME + 'BPNN/data/ctb5/'
CTB5_DEP_TRAIN = _CTB5_DEP_HOME + 'train.conll'
CTB5_DEP_DEV = _CTB5_DEP_HOME + 'dev.conll'
CTB5_DEP_TEST = _CTB5_DEP_HOME + 'test.conll'
CIP_W2V_100_CN = _CTB_HOME + 'BPNN/data/embed.txt'
| true | true |
f7fa8b51fb7593e4b4bb5e270c3e1f1db23870a2 | 4,337 | py | Python | neybuz/settings.py | RKatana/neybuzz | 147e5b9369c10dd1698ec8d1609bdddcc55224d9 | [
"MIT"
] | 1 | 2021-04-19T08:03:57.000Z | 2021-04-19T08:03:57.000Z | neybuz/settings.py | RKatana/neybuzz | 147e5b9369c10dd1698ec8d1609bdddcc55224d9 | [
"MIT"
] | null | null | null | neybuz/settings.py | RKatana/neybuzz | 147e5b9369c10dd1698ec8d1609bdddcc55224d9 | [
"MIT"
] | null | null | null | """
Django settings for neybuz project.
Generated by 'django-admin startproject' using Django 3.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
import cloudinary
import cloudinary.uploader
import cloudinary.api
import django_heroku
import dj_database_url
from decouple import config
from pathlib import Path
from dotenv import load_dotenv
load_dotenv()
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-7k+@^-yaz#t^y3=w$3ffgvdw!yda6zf9ltoq*%vn(khn9$1-th'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', 'neybuzz.herokuapp.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'whitenoise.runserver_nostatic',
'hood',
'cloudinary',
'bootstrap4',
'django_registration',
'rest_framework',
'rest_framework.authtoken',
]
cloudinary.config(
cloud_name = "msyoka37",
api_key = "174158974217617",
api_secret = "7LjgdbpLuseNQnRknQtyQGv8Ua0",
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'neybuz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'neybuz.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default':{
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME', 'postgres'),
'USER': config('DB_USER', 'postgres'),
'PASSWORD': config('DB_PASSWORD', ''),
'HOST': config('DB_HOST', 'localhost'),
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / "static"
]
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR / "media"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' | 26.445122 | 91 | 0.705326 |
import os
import cloudinary
import cloudinary.uploader
import cloudinary.api
import django_heroku
import dj_database_url
from decouple import config
from pathlib import Path
from dotenv import load_dotenv
load_dotenv()
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = 'django-insecure-7k+@^-yaz#t^y3=w$3ffgvdw!yda6zf9ltoq*%vn(khn9$1-th'
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', 'neybuzz.herokuapp.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'whitenoise.runserver_nostatic',
'hood',
'cloudinary',
'bootstrap4',
'django_registration',
'rest_framework',
'rest_framework.authtoken',
]
cloudinary.config(
cloud_name = "msyoka37",
api_key = "174158974217617",
api_secret = "7LjgdbpLuseNQnRknQtyQGv8Ua0",
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'neybuz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'neybuz.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default':{
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME', 'postgres'),
'USER': config('DB_USER', 'postgres'),
'PASSWORD': config('DB_PASSWORD', ''),
'HOST': config('DB_HOST', 'localhost'),
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / "static"
]
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR / "media"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' | true | true |
f7fa8cc0965da7f9a7749d851548a7128d432f26 | 7,006 | py | Python | temboo/core/Library/eBay/Trading/GetItem.py | jordanemedlock/psychtruths | 52e09033ade9608bd5143129f8a1bfac22d634dd | [
"Apache-2.0"
] | 7 | 2016-03-07T02:07:21.000Z | 2022-01-21T02:22:41.000Z | temboo/core/Library/eBay/Trading/GetItem.py | jordanemedlock/psychtruths | 52e09033ade9608bd5143129f8a1bfac22d634dd | [
"Apache-2.0"
] | null | null | null | temboo/core/Library/eBay/Trading/GetItem.py | jordanemedlock/psychtruths | 52e09033ade9608bd5143129f8a1bfac22d634dd | [
"Apache-2.0"
] | 8 | 2016-06-14T06:01:11.000Z | 2020-04-22T09:21:44.000Z | # -*- coding: utf-8 -*-
###############################################################################
#
# GetItem
# Returns item data such as title, description, price information, and seller information.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetItem(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetItem Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetItem, self).__init__(temboo_session, '/Library/eBay/Trading/GetItem')
def new_input_set(self):
return GetItemInputSet()
def _make_result_set(self, result, path):
return GetItemResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetItemChoreographyExecution(session, exec_id, path)
class GetItemInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetItem
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_DetailLevel(self, value):
"""
Set the value of the DetailLevel input for this Choreo. ((optional, string) The response detail level. Valid values are: ItemReturnAttributes, ItemReturnDescription, and ReturnAll.)
"""
super(GetItemInputSet, self)._set_input('DetailLevel', value)
def set_IncludeItemSpecifics(self, value):
"""
Set the value of the IncludeItemSpecifics input for this Choreo. ((optional, boolean) If set to true, the response returns the ItemSpecifics node (if the listing has custom Item Specifics).)
"""
super(GetItemInputSet, self)._set_input('IncludeItemSpecifics', value)
def set_IncludeTaxTable(self, value):
"""
Set the value of the IncludeTaxTable input for this Choreo. ((optional, boolean) If set to true, an associated tax table is returned in the response.)
"""
super(GetItemInputSet, self)._set_input('IncludeTaxTable', value)
def set_IncludeWatchCount(self, value):
"""
Set the value of the IncludeWatchCount input for this Choreo. ((optional, boolean) Indicates if the caller wants to include watch count for that item in the response when set to true. Only the seller is allowed to use this argument.)
"""
super(GetItemInputSet, self)._set_input('IncludeWatchCount', value)
def set_ItemID(self, value):
"""
Set the value of the ItemID input for this Choreo. ((required, string) The ItemID that uniquely identifies the item listing to retrieve.)
"""
super(GetItemInputSet, self)._set_input('ItemID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(GetItemInputSet, self)._set_input('ResponseFormat', value)
def set_SandboxMode(self, value):
"""
Set the value of the SandboxMode input for this Choreo. ((conditional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
"""
super(GetItemInputSet, self)._set_input('SandboxMode', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.)
"""
super(GetItemInputSet, self)._set_input('SiteID', value)
def set_TransactionID(self, value):
"""
Set the value of the TransactionID input for this Choreo. ((optional, string) A unique identifier for a transaction (i.e. an order line item). An order line item is created when the buyer commits to purchasing an item.)
"""
super(GetItemInputSet, self)._set_input('TransactionID', value)
def set_UserToken(self, value):
"""
Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.)
"""
super(GetItemInputSet, self)._set_input('UserToken', value)
class GetItemResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetItem Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.)
"""
return self._output.get('Response', None)
def get_CurrentPrice(self):
"""
Retrieve the value for the "CurrentPrice" output from this Choreo execution. ((decimal) The current price for the item.)
"""
return self._output.get('CurrentPrice', None)
def get_DaysLeft(self):
"""
Retrieve the value for the "DaysLeft" output from this Choreo execution. ((integer) The number of days until the auction ends.)
"""
return self._output.get('DaysLeft', None)
def get_HoursLeft(self):
"""
Retrieve the value for the "HoursLeft" output from this Choreo execution. ((integer) The number of hours until the auction ends.)
"""
return self._output.get('HoursLeft', None)
def get_MinutesLeft(self):
"""
Retrieve the value for the "MinutesLeft" output from this Choreo execution. ((integer) The number of minutes until the auction ends.)
"""
return self._output.get('MinutesLeft', None)
def get_SecondsLeft(self):
"""
Retrieve the value for the "SecondsLeft" output from this Choreo execution. ((integer) The number of seconds until the auction ends.)
"""
return self._output.get('SecondsLeft', None)
class GetItemChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetItemResultSet(response, path)
| 45.79085 | 241 | 0.676848 | true | true | |
f7fa8e89abf63baa88cd1495217dea2b95cc4a5f | 661 | py | Python | operations using tkinter/textBox.py | XORsalaria/python-GUI | f12bd0636c5726160784e491e31bf60235bec56d | [
"MIT"
] | null | null | null | operations using tkinter/textBox.py | XORsalaria/python-GUI | f12bd0636c5726160784e491e31bf60235bec56d | [
"MIT"
] | null | null | null | operations using tkinter/textBox.py | XORsalaria/python-GUI | f12bd0636c5726160784e491e31bf60235bec56d | [
"MIT"
] | null | null | null | from tkinter import *
myGui = Tk()
def Hello():
b = a.get()
myLabel3 = Label(text=b,fg='white',bg='black').pack()
def Bye():
d = c.get()
myLabel4 = Label(text='Bye, Bot!').pack()
a = StringVar()
c = StringVar()
myGui.title("Python-GUI")
myGui.geometry("500x500+100+50")
myLabel1 = Label(text='Label One',fg='red',font=30).pack()
# Class is Button and object is myButton1
myButton1 = Button(text='enter username',font=30,command = Hello).pack()
text1 = Entry(textvariable=a).pack()
myButton2 = Button(text='enterpassword',font=30,command = Bye).pack()
text2 = Entry(textvariable=c).pack()
myGui.mainloop()
| 25.423077 | 72 | 0.636914 | from tkinter import *
myGui = Tk()
def Hello():
b = a.get()
myLabel3 = Label(text=b,fg='white',bg='black').pack()
def Bye():
d = c.get()
myLabel4 = Label(text='Bye, Bot!').pack()
a = StringVar()
c = StringVar()
myGui.title("Python-GUI")
myGui.geometry("500x500+100+50")
myLabel1 = Label(text='Label One',fg='red',font=30).pack()
myButton1 = Button(text='enter username',font=30,command = Hello).pack()
text1 = Entry(textvariable=a).pack()
myButton2 = Button(text='enterpassword',font=30,command = Bye).pack()
text2 = Entry(textvariable=c).pack()
myGui.mainloop()
| true | true |
f7fa8ee844a4b369657a7e4bda3955090c424495 | 5,812 | py | Python | source/NACDVOCDetection.py | ogrenenmakine/censusEnumerators | e63b5f888a0aaefa69dbc0413d567b1643c5c503 | [
"Apache-2.0"
] | null | null | null | source/NACDVOCDetection.py | ogrenenmakine/censusEnumerators | e63b5f888a0aaefa69dbc0413d567b1643c5c503 | [
"Apache-2.0"
] | null | null | null | source/NACDVOCDetection.py | ogrenenmakine/censusEnumerators | e63b5f888a0aaefa69dbc0413d567b1643c5c503 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
import os
import logging
import numpy as np
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import mxnet as mx
from source.base import VisionDataset
class NACDDetection(VisionDataset):
"""Pascal VOC detection Dataset.
Parameters
----------
root : str, default '~/mxnet/datasets/voc'
Path to folder storing the dataset.
splits : list of tuples, default ((2007, 'trainval'), (2012, 'trainval'))
List of combinations of (year, name)
For years, candidates can be: 2007, 2012.
For names, candidates can be: 'train', 'val', 'trainval', 'test'.
transform : callable, defaut None
A function that takes data and label and transforms them. Refer to
:doc:`./transforms` for examples.
A transform function for object detection should take label into consideration,
because any geometric modification will require label to be modified.
index_map : dict, default None
In default, the 20 classes are mapped into indices from 0 to 19. We can
customize it by providing a str to int dict specifying how to map class
names to indicies. Use by advanced users only, when you want to swap the orders
of class labels.
preload_label : bool, default True
If True, then parse and load all labels into memory during
initialization. It often accelerate speed but require more memory
usage. Typical preloaded labels took tens of MB. You only need to disable it
when your dataset is extreamly large.
"""
CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor', 'negative', 'cluster')
def __init__(self, root=os.path.join('~', '.mxnet', 'datasets', 'voc'),
splits=((2007, 'trainval'), (2012, 'trainval')),
transform=None, index_map=None, preload_label=True):
super(NACDDetection, self).__init__(root)
self._im_shapes = {}
self._root = os.path.expanduser(root)
self._transform = transform
self._splits = splits
self._items = self._load_items(splits)
self._anno_path = os.path.join('{}', 'Annotations', '{}.xml')
self._image_path = os.path.join('{}', 'JPEGImages', '{}.jpg')
self.index_map = index_map or dict(zip(self.classes, range(self.num_class)))
self._label_cache = self._preload_labels() if preload_label else None
def __str__(self):
detail = ','.join([str(s[0]) + s[1] for s in self._splits])
return self.__class__.__name__ + '(' + detail + ')'
@property
def classes(self):
"""Category names."""
return type(self).CLASSES
def __len__(self):
return len(self._items)
def __getitem__(self, idx):
img_id = self._items[idx]
img_path = self._image_path.format(*img_id)
label = self._label_cache[idx] if self._label_cache else self._load_label(idx)
img = mx.image.imread(img_path, 1)
if self._transform is not None:
return self._transform(img, label)
return img, label
def _load_items(self, splits):
"""Load individual image indices from splits."""
ids = []
for year, name in splits:
root = os.path.join(self._root, 'VOC' + str(year))
lf = os.path.join(root, 'ImageSets', 'Main', name + '.txt')
with open(lf, 'r') as f:
ids += [(root, line.strip()) for line in f.readlines()]
return ids
def _load_label(self, idx):
"""Parse xml file and return labels."""
img_id = self._items[idx]
anno_path = self._anno_path.format(*img_id)
root = ET.parse(anno_path).getroot()
size = root.find('size')
width = float(size.find('width').text)
height = float(size.find('height').text)
if idx not in self._im_shapes:
# store the shapes for later usage
self._im_shapes[idx] = (width, height)
label = []
for obj in root.iter('object'):
difficult = 0
cls_name = obj.find('name').text.strip().lower()
cls_id = self.index_map[cls_name]
xml_box = obj.find('bndbox')
xmin = (float(xml_box.find('xmin').text) - 1)
ymin = (float(xml_box.find('ymin').text) - 1)
xmax = (float(xml_box.find('xmax').text) - 1)
ymax = (float(xml_box.find('ymax').text) - 1)
try:
self._validate_label(xmin, ymin, xmax, ymax, width, height)
except AssertionError as e:
raise RuntimeError("Invalid label at {}, {}".format(anno_path, e))
label.append([xmin, ymin, xmax, ymax, cls_id, difficult])
return np.array(label)
def _validate_label(self, xmin, ymin, xmax, ymax, width, height):
"""Validate labels."""
assert xmin >= -1 and xmin < width, (
"xmin must in [0, {}), given {}".format(width, xmin))
assert ymin >= -1 and ymin < height, (
"ymin must in [0, {}), given {}".format(height, ymin))
assert xmax > xmin and xmax <= width + 1, (
"xmax must in (xmin, {}], given {}".format(width, xmax))
assert ymax > ymin and ymax <= height + 1, (
"ymax must in (ymin, {}], given {}".format(height, ymax))
def _preload_labels(self):
"""Preload all labels into memory."""
logging.debug("Preloading %s labels into memory...", str(self))
return [self._load_label(idx) for idx in range(len(self))]
| 44.030303 | 101 | 0.602891 | from __future__ import absolute_import
from __future__ import division
import os
import logging
import numpy as np
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import mxnet as mx
from source.base import VisionDataset
class NACDDetection(VisionDataset):
CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor', 'negative', 'cluster')
def __init__(self, root=os.path.join('~', '.mxnet', 'datasets', 'voc'),
splits=((2007, 'trainval'), (2012, 'trainval')),
transform=None, index_map=None, preload_label=True):
super(NACDDetection, self).__init__(root)
self._im_shapes = {}
self._root = os.path.expanduser(root)
self._transform = transform
self._splits = splits
self._items = self._load_items(splits)
self._anno_path = os.path.join('{}', 'Annotations', '{}.xml')
self._image_path = os.path.join('{}', 'JPEGImages', '{}.jpg')
self.index_map = index_map or dict(zip(self.classes, range(self.num_class)))
self._label_cache = self._preload_labels() if preload_label else None
def __str__(self):
detail = ','.join([str(s[0]) + s[1] for s in self._splits])
return self.__class__.__name__ + '(' + detail + ')'
@property
def classes(self):
return type(self).CLASSES
def __len__(self):
return len(self._items)
def __getitem__(self, idx):
img_id = self._items[idx]
img_path = self._image_path.format(*img_id)
label = self._label_cache[idx] if self._label_cache else self._load_label(idx)
img = mx.image.imread(img_path, 1)
if self._transform is not None:
return self._transform(img, label)
return img, label
def _load_items(self, splits):
ids = []
for year, name in splits:
root = os.path.join(self._root, 'VOC' + str(year))
lf = os.path.join(root, 'ImageSets', 'Main', name + '.txt')
with open(lf, 'r') as f:
ids += [(root, line.strip()) for line in f.readlines()]
return ids
def _load_label(self, idx):
img_id = self._items[idx]
anno_path = self._anno_path.format(*img_id)
root = ET.parse(anno_path).getroot()
size = root.find('size')
width = float(size.find('width').text)
height = float(size.find('height').text)
if idx not in self._im_shapes:
self._im_shapes[idx] = (width, height)
label = []
for obj in root.iter('object'):
difficult = 0
cls_name = obj.find('name').text.strip().lower()
cls_id = self.index_map[cls_name]
xml_box = obj.find('bndbox')
xmin = (float(xml_box.find('xmin').text) - 1)
ymin = (float(xml_box.find('ymin').text) - 1)
xmax = (float(xml_box.find('xmax').text) - 1)
ymax = (float(xml_box.find('ymax').text) - 1)
try:
self._validate_label(xmin, ymin, xmax, ymax, width, height)
except AssertionError as e:
raise RuntimeError("Invalid label at {}, {}".format(anno_path, e))
label.append([xmin, ymin, xmax, ymax, cls_id, difficult])
return np.array(label)
def _validate_label(self, xmin, ymin, xmax, ymax, width, height):
assert xmin >= -1 and xmin < width, (
"xmin must in [0, {}), given {}".format(width, xmin))
assert ymin >= -1 and ymin < height, (
"ymin must in [0, {}), given {}".format(height, ymin))
assert xmax > xmin and xmax <= width + 1, (
"xmax must in (xmin, {}], given {}".format(width, xmax))
assert ymax > ymin and ymax <= height + 1, (
"ymax must in (ymin, {}], given {}".format(height, ymax))
def _preload_labels(self):
logging.debug("Preloading %s labels into memory...", str(self))
return [self._load_label(idx) for idx in range(len(self))]
| true | true |
f7fa8f3afd9e46a4248c41b3a0518cdd30e45368 | 7,399 | py | Python | pilot/workflow/generic.py | davidgcameron/pilot2 | b2557caecfefe08a1695efb0508fc68cffdaa5c2 | [
"Apache-2.0"
] | null | null | null | pilot/workflow/generic.py | davidgcameron/pilot2 | b2557caecfefe08a1695efb0508fc68cffdaa5c2 | [
"Apache-2.0"
] | null | null | null | pilot/workflow/generic.py | davidgcameron/pilot2 | b2557caecfefe08a1695efb0508fc68cffdaa5c2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Mario Lassnig, mario.lassnig@cern.ch, 2016-2017
# - Daniel Drizhuk, d.drizhuk@gmail.com, 2017
# - Paul Nilsson, paul.nilsson@cern.ch, 2017-2018
from __future__ import print_function
import functools
import signal
import threading
from time import time, sleep
from sys import stderr
from os import getpid
from shutil import rmtree
try:
import Queue as queue # noqa: N813
except Exception:
import queue # python 3
from collections import namedtuple
from pilot.common.exception import ExcThread
from pilot.control import job, payload, data, monitor
from pilot.util.constants import SUCCESS, PILOT_KILL_SIGNAL, MAX_KILL_WAIT_TIME
from pilot.util.processes import kill_processes
from pilot.util.timing import add_to_pilot_timing
import logging
logger = logging.getLogger(__name__)
def interrupt(args, signum, frame):
"""
Interrupt function on the receiving end of kill signals.
This function is forwarded any incoming signals (SIGINT, SIGTERM, etc) and will set abort_job which instructs
the threads to abort the job.
:param args: pilot arguments.
:param signum: signal.
:param frame: stack/execution frame pointing to the frame that was interrupted by the signal.
:return:
"""
sig = [v for v, k in signal.__dict__.iteritems() if k == signum][0]
args.signal_counter += 1
# keep track of when first kill signal arrived, any stuck loops should abort at a defined cut off time
if args.kill_time == 0:
args.kill_time = int(time())
max_kill_wait_time = MAX_KILL_WAIT_TIME + 60 # add another minute of grace to let threads finish
current_time = int(time())
if args.kill_time and current_time - args.kill_time > max_kill_wait_time:
logger.warning('passed maximum waiting time after first kill signal - will commit suicide - farewell')
try:
rmtree(args.sourcedir)
except Exception as e:
logger.warning(e)
logging.shutdown()
kill_processes(getpid())
add_to_pilot_timing('0', PILOT_KILL_SIGNAL, time(), args)
logger.warning('caught signal: %s' % sig)
args.signal = sig
logger.warning('will instruct threads to abort and update the server')
args.abort_job.set()
logger.warning('waiting for threads to finish')
args.job_aborted.wait()
logger.warning('setting graceful stop (in case it was not set already), pilot will abort')
args.graceful_stop.set()
def run(args):
"""
Main execution function for the generic workflow.
The function sets up the internal queues which handle the flow of jobs.
:param args: pilot arguments.
:returns: traces.
"""
logger.info('setting up signal handling')
signal.signal(signal.SIGINT, functools.partial(interrupt, args))
signal.signal(signal.SIGTERM, functools.partial(interrupt, args))
signal.signal(signal.SIGQUIT, functools.partial(interrupt, args))
signal.signal(signal.SIGSEGV, functools.partial(interrupt, args))
signal.signal(signal.SIGXCPU, functools.partial(interrupt, args))
signal.signal(signal.SIGUSR1, functools.partial(interrupt, args))
signal.signal(signal.SIGBUS, functools.partial(interrupt, args))
logger.info('setting up queues')
queues = namedtuple('queues', ['jobs', 'payloads', 'data_in', 'data_out', 'current_data_in',
'validated_jobs', 'validated_payloads', 'monitored_payloads',
'finished_jobs', 'finished_payloads', 'finished_data_in', 'finished_data_out',
'failed_jobs', 'failed_payloads', 'failed_data_in', 'failed_data_out',
'completed_jobs'])
queues.jobs = queue.Queue()
queues.payloads = queue.Queue()
queues.data_in = queue.Queue()
queues.data_out = queue.Queue()
queues.current_data_in = queue.Queue()
queues.validated_jobs = queue.Queue()
queues.validated_payloads = queue.Queue()
queues.monitored_payloads = queue.Queue()
queues.finished_jobs = queue.Queue()
queues.finished_payloads = queue.Queue()
queues.finished_data_in = queue.Queue()
queues.finished_data_out = queue.Queue()
queues.failed_jobs = queue.Queue()
queues.failed_payloads = queue.Queue()
queues.failed_data_in = queue.Queue()
queues.failed_data_out = queue.Queue()
queues.completed_jobs = queue.Queue()
logger.info('setting up tracing')
traces = namedtuple('traces', ['pilot'])
traces.pilot = {'state': SUCCESS,
'nr_jobs': 0,
'error_code': 0,
'command': None}
# initial sanity check defined by pilot user
try:
user = __import__('pilot.user.%s.common' % args.pilot_user.lower(), globals(), locals(), [args.pilot_user.lower()], -1)
exit_code = user.sanity_check()
except Exception as e:
logger.info('skipping sanity check since: %s' % e)
else:
if exit_code != 0:
logger.info('aborting workflow since sanity check failed')
traces.pilot['error_code'] = exit_code
return traces
else:
logger.info('passed sanity check')
# define the threads
targets = {'job': job.control, 'payload': payload.control, 'data': data.control, 'monitor': monitor.control}
threads = [ExcThread(bucket=queue.Queue(), target=target, kwargs={'queues': queues, 'traces': traces, 'args': args},
name=name) for name, target in targets.items()]
logger.info('starting threads')
[thread.start() for thread in threads]
logger.info('waiting for interrupts')
thread_count = threading.activeCount()
while threading.activeCount() > 1:
for thread in threads:
bucket = thread.get_bucket()
try:
exc = bucket.get(block=False)
except queue.Empty:
pass
else:
exc_type, exc_obj, exc_trace = exc
# deal with the exception
print('received exception from bucket queue in generic workflow: %s' % exc_obj, file=stderr)
# logger.fatal('caught exception: %s' % exc_obj)
thread.join(0.1)
abort = False
if thread_count != threading.activeCount():
thread_count = threading.activeCount()
logger.debug('thread count now at %d threads' % thread_count)
logger.debug('enumerate: %s' % str(threading.enumerate()))
# count all non-daemon threads
daemon_threads = 0
for thread in threading.enumerate():
if thread.isDaemon(): # ignore any daemon threads, they will be aborted when python ends
daemon_threads += 1
if thread_count - daemon_threads == 1:
logger.debug('aborting since there is[are] %d daemon thread[s] which can be ignored' % daemon_threads)
abort = True
if abort:
break
sleep(0.1)
logger.info('end of generic workflow (traces error code: %d)' % traces.pilot['error_code'])
return traces
| 37.558376 | 127 | 0.656035 |
from __future__ import print_function
import functools
import signal
import threading
from time import time, sleep
from sys import stderr
from os import getpid
from shutil import rmtree
try:
import Queue as queue
except Exception:
import queue
from collections import namedtuple
from pilot.common.exception import ExcThread
from pilot.control import job, payload, data, monitor
from pilot.util.constants import SUCCESS, PILOT_KILL_SIGNAL, MAX_KILL_WAIT_TIME
from pilot.util.processes import kill_processes
from pilot.util.timing import add_to_pilot_timing
import logging
logger = logging.getLogger(__name__)
def interrupt(args, signum, frame):
sig = [v for v, k in signal.__dict__.iteritems() if k == signum][0]
args.signal_counter += 1
if args.kill_time == 0:
args.kill_time = int(time())
max_kill_wait_time = MAX_KILL_WAIT_TIME + 60
current_time = int(time())
if args.kill_time and current_time - args.kill_time > max_kill_wait_time:
logger.warning('passed maximum waiting time after first kill signal - will commit suicide - farewell')
try:
rmtree(args.sourcedir)
except Exception as e:
logger.warning(e)
logging.shutdown()
kill_processes(getpid())
add_to_pilot_timing('0', PILOT_KILL_SIGNAL, time(), args)
logger.warning('caught signal: %s' % sig)
args.signal = sig
logger.warning('will instruct threads to abort and update the server')
args.abort_job.set()
logger.warning('waiting for threads to finish')
args.job_aborted.wait()
logger.warning('setting graceful stop (in case it was not set already), pilot will abort')
args.graceful_stop.set()
def run(args):
logger.info('setting up signal handling')
signal.signal(signal.SIGINT, functools.partial(interrupt, args))
signal.signal(signal.SIGTERM, functools.partial(interrupt, args))
signal.signal(signal.SIGQUIT, functools.partial(interrupt, args))
signal.signal(signal.SIGSEGV, functools.partial(interrupt, args))
signal.signal(signal.SIGXCPU, functools.partial(interrupt, args))
signal.signal(signal.SIGUSR1, functools.partial(interrupt, args))
signal.signal(signal.SIGBUS, functools.partial(interrupt, args))
logger.info('setting up queues')
queues = namedtuple('queues', ['jobs', 'payloads', 'data_in', 'data_out', 'current_data_in',
'validated_jobs', 'validated_payloads', 'monitored_payloads',
'finished_jobs', 'finished_payloads', 'finished_data_in', 'finished_data_out',
'failed_jobs', 'failed_payloads', 'failed_data_in', 'failed_data_out',
'completed_jobs'])
queues.jobs = queue.Queue()
queues.payloads = queue.Queue()
queues.data_in = queue.Queue()
queues.data_out = queue.Queue()
queues.current_data_in = queue.Queue()
queues.validated_jobs = queue.Queue()
queues.validated_payloads = queue.Queue()
queues.monitored_payloads = queue.Queue()
queues.finished_jobs = queue.Queue()
queues.finished_payloads = queue.Queue()
queues.finished_data_in = queue.Queue()
queues.finished_data_out = queue.Queue()
queues.failed_jobs = queue.Queue()
queues.failed_payloads = queue.Queue()
queues.failed_data_in = queue.Queue()
queues.failed_data_out = queue.Queue()
queues.completed_jobs = queue.Queue()
logger.info('setting up tracing')
traces = namedtuple('traces', ['pilot'])
traces.pilot = {'state': SUCCESS,
'nr_jobs': 0,
'error_code': 0,
'command': None}
try:
user = __import__('pilot.user.%s.common' % args.pilot_user.lower(), globals(), locals(), [args.pilot_user.lower()], -1)
exit_code = user.sanity_check()
except Exception as e:
logger.info('skipping sanity check since: %s' % e)
else:
if exit_code != 0:
logger.info('aborting workflow since sanity check failed')
traces.pilot['error_code'] = exit_code
return traces
else:
logger.info('passed sanity check')
targets = {'job': job.control, 'payload': payload.control, 'data': data.control, 'monitor': monitor.control}
threads = [ExcThread(bucket=queue.Queue(), target=target, kwargs={'queues': queues, 'traces': traces, 'args': args},
name=name) for name, target in targets.items()]
logger.info('starting threads')
[thread.start() for thread in threads]
logger.info('waiting for interrupts')
thread_count = threading.activeCount()
while threading.activeCount() > 1:
for thread in threads:
bucket = thread.get_bucket()
try:
exc = bucket.get(block=False)
except queue.Empty:
pass
else:
exc_type, exc_obj, exc_trace = exc
print('received exception from bucket queue in generic workflow: %s' % exc_obj, file=stderr)
thread.join(0.1)
abort = False
if thread_count != threading.activeCount():
thread_count = threading.activeCount()
logger.debug('thread count now at %d threads' % thread_count)
logger.debug('enumerate: %s' % str(threading.enumerate()))
daemon_threads = 0
for thread in threading.enumerate():
if thread.isDaemon():
daemon_threads += 1
if thread_count - daemon_threads == 1:
logger.debug('aborting since there is[are] %d daemon thread[s] which can be ignored' % daemon_threads)
abort = True
if abort:
break
sleep(0.1)
logger.info('end of generic workflow (traces error code: %d)' % traces.pilot['error_code'])
return traces
| true | true |
f7fa8f449c323a09ff6e15ae88bbe6ae711680b3 | 60,381 | py | Python | openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_vendored_deps/library/oc_serviceaccount_secret.py | fahlmant/openshift-tools | dbb4f16ccde3404c36c23108c45ca7b67138ee12 | [
"Apache-2.0"
] | 164 | 2015-07-29T17:35:04.000Z | 2021-12-16T16:38:04.000Z | openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_vendored_deps/library/oc_serviceaccount_secret.py | fahlmant/openshift-tools | dbb4f16ccde3404c36c23108c45ca7b67138ee12 | [
"Apache-2.0"
] | 3,634 | 2015-06-09T13:49:15.000Z | 2022-03-23T20:55:44.000Z | openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_vendored_deps/library/oc_serviceaccount_secret.py | fahlmant/openshift-tools | dbb4f16ccde3404c36c23108c45ca7b67138ee12 | [
"Apache-2.0"
] | 250 | 2015-06-08T19:53:11.000Z | 2022-03-01T04:51:23.000Z | #!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/serviceaccount_secret -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_serviceaccount_secret
short_description: Module to manage openshift service account secrets
description:
- Manage openshift service account secrets programmatically.
options:
state:
description:
- If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: false
aliases: []
service_account:
description:
- Name of the service account.
required: true
default: None
aliases: []
namespace:
description:
- Namespace of the service account and secret.
required: true
default: None
aliases: []
secret:
description:
- The secret that should be linked to the service account.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: get secrets of a service account
oc_serviceaccount_secret:
state: list
service_account: builder
namespace: default
register: sasecretout
- name: Link a service account to a specific secret
oc_serviceaccount_secret:
service_account: builder
secret: mynewsecret
namespace: default
register: sasecretout
'''
# -*- -*- -*- End included fragment: doc/serviceaccount_secret -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup_ext=None,
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
if backup_ext is None:
self.backup_ext = ".{}".format(time.strftime("%Y%m%dT%H%M%S"))
else:
self.backup_ext = backup_ext
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.safe_load(str(invalue))
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
backup_ext=params['backup_ext'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/serviceaccount.py -*- -*- -*-
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' instantiate a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets is None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# -*- -*- -*- End included fragment: lib/serviceaccount.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the ansible idempotent code '''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
# -*- -*- -*- End included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
def main():
'''
ansible oc module to manage service account secrets.
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default=None, required=True, type='str'),
secret=dict(default=None, type='str'),
service_account=dict(required=True, type='str'),
),
supports_check_mode=True,
)
rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
| 34.307386 | 301 | 0.551879 |
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = '''
---
module: oc_serviceaccount_secret
short_description: Module to manage openshift service account secrets
description:
- Manage openshift service account secrets programmatically.
options:
state:
description:
- If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: false
aliases: []
service_account:
description:
- Name of the service account.
required: true
default: None
aliases: []
namespace:
description:
- Namespace of the service account and secret.
required: true
default: None
aliases: []
secret:
description:
- The secret that should be linked to the service account.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: get secrets of a service account
oc_serviceaccount_secret:
state: list
service_account: builder
namespace: default
register: sasecretout
- name: Link a service account to a specific secret
oc_serviceaccount_secret:
service_account: builder
secret: mynewsecret
namespace: default
register: sasecretout
'''
class YeditException(Exception):
pass
class Yedit(object):
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup_ext=None,
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
if backup_ext is None:
self.backup_ext = ".{}".format(time.strftime("%Y%m%dT%H%M%S"))
else:
self.backup_ext = backup_ext
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
return self._separator
@separator.setter
def separator(self, inc_sep):
self._separator = inc_sep
@property
def yaml_dict(self):
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]:
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
if not self.file_exists():
# deepcopy didn't work
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.safe_load(str(invalue))
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
backup_ext=params['backup_ext'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
@staticmethod
def _write(filename, contents):
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
versions_dict = {}
for tech, version in versions.items():
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:]
versions_dict[tech + '_numeric'] = version.split('+')[0]
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
return self._options
def to_option_list(self, ascommalist=''):
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
class ServiceAccountConfig(object):
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
class ServiceAccount(Yedit):
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
if self._image_pull_secrets is None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
self._image_pull_secrets = secrets
@property
def secrets(self):
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
self._secrets = secrets
def delete_secret(self, inc_secret):
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
if self.secrets:
self.secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
class OCServiceAccountSecret(OpenShiftCLI):
kind = 'sa'
def __init__(self, config, verbose=False):
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
self._service_account = data
def exists(self, in_secret):
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
def run_ansible(params, check_mode):
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
f state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
def main():
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default=None, required=True, type='str'),
secret=dict(default=None, type='str'),
service_account=dict(required=True, type='str'),
),
supports_check_mode=True,
)
rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
| true | true |
f7fa8f960ff94e07e4dbe18c0b7667db558354bf | 645 | py | Python | hashable/tools.py | minmax/hashable | b38e325354c253fd41e9c92572eb5ba1e8b2ce8f | [
"MIT"
] | null | null | null | hashable/tools.py | minmax/hashable | b38e325354c253fd41e9c92572eb5ba1e8b2ce8f | [
"MIT"
] | null | null | null | hashable/tools.py | minmax/hashable | b38e325354c253fd41e9c92572eb5ba1e8b2ce8f | [
"MIT"
] | null | null | null | from operator import attrgetter, methodcaller
def create_getters_list(attributes=None, methods=None):
getters_list = []
if attributes is not None:
getters_list.extend(map(attrgetter, attributes))
if methods is not None:
getters_list.extend(map(methodcaller, methods))
return getters_list
def get_super_not_buildin_method_or_none(obj, name):
method = getattr(obj, name, None)
if method is None:
return None
if isinstance(method, build_in_methods_types):
return None
return method
class _C(object):
pass
build_in_methods_types = (type(_C.__eq__), type(_C.__hash__))
del _C
| 24.807692 | 61 | 0.717829 | from operator import attrgetter, methodcaller
def create_getters_list(attributes=None, methods=None):
getters_list = []
if attributes is not None:
getters_list.extend(map(attrgetter, attributes))
if methods is not None:
getters_list.extend(map(methodcaller, methods))
return getters_list
def get_super_not_buildin_method_or_none(obj, name):
method = getattr(obj, name, None)
if method is None:
return None
if isinstance(method, build_in_methods_types):
return None
return method
class _C(object):
pass
build_in_methods_types = (type(_C.__eq__), type(_C.__hash__))
del _C
| true | true |
f7fa8fad476aa9040c6103d5e030d4f5f09f8191 | 367 | py | Python | src/crime_prediction/preprocessingWeatherData.py | ras9841/UP-STAT-2018 | cad06bfac3c12b4cb14c3b703e23c52cc391383a | [
"MIT"
] | null | null | null | src/crime_prediction/preprocessingWeatherData.py | ras9841/UP-STAT-2018 | cad06bfac3c12b4cb14c3b703e23c52cc391383a | [
"MIT"
] | 1 | 2018-05-08T12:16:50.000Z | 2018-05-08T21:28:40.000Z | src/crime_prediction/preprocessingWeatherData.py | ras9841/UP-STAT-2018 | cad06bfac3c12b4cb14c3b703e23c52cc391383a | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 29 16:09:02 2018
@author: valentinarodriguez
"""
def dataCleaning(data, columnsToDrop):
data = data.replace(to_replace="NVC",value=-1)
data = data.replace(to_replace="VC",value=+1)
data = data.drop(columnsToDrop, axis=1)
print("dataCleaning")
print(data)
return data
| 18.35 | 50 | 0.659401 |
def dataCleaning(data, columnsToDrop):
data = data.replace(to_replace="NVC",value=-1)
data = data.replace(to_replace="VC",value=+1)
data = data.drop(columnsToDrop, axis=1)
print("dataCleaning")
print(data)
return data
| true | true |
f7fa904110ed31639e372d170acecafaf2cdadfb | 477 | py | Python | nomi/migrations/0070_auto_20170614_1754.py | TheDjangoBoys/Gymkhana-Nominations | 6ce13fb3a21fe91630e0c8fdaf597e61c87f2d06 | [
"MIT"
] | null | null | null | nomi/migrations/0070_auto_20170614_1754.py | TheDjangoBoys/Gymkhana-Nominations | 6ce13fb3a21fe91630e0c8fdaf597e61c87f2d06 | [
"MIT"
] | null | null | null | nomi/migrations/0070_auto_20170614_1754.py | TheDjangoBoys/Gymkhana-Nominations | 6ce13fb3a21fe91630e0c8fdaf597e61c87f2d06 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-14 17:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nomi', '0069_auto_20170614_1645'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='room_no',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
| 22.714286 | 73 | 0.624738 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nomi', '0069_auto_20170614_1645'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='room_no',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
| true | true |
f7fa90743322737adb8f67cd5f6dff918c6c7607 | 711 | py | Python | Ekeopara_Praise/Phase 1/Python Basic 2/Day20 Tasks/Task10.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 6 | 2020-05-23T19:53:25.000Z | 2021-05-08T20:21:30.000Z | Ekeopara_Praise/Phase 1/Python Basic 2/Day20 Tasks/Task10.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 8 | 2020-05-14T18:53:12.000Z | 2020-07-03T00:06:20.000Z | Ekeopara_Praise/Phase 1/Python Basic 2/Day20 Tasks/Task10.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 39 | 2020-05-10T20:55:02.000Z | 2020-09-12T17:40:59.000Z | '''10. Write a Python program to check whether a point (x,y) is in a triangle or not. There is a triangle formed by three points.
Input:
x1,y1,x2,y2,x3,y3,xp,yp separated by a single space.
Input three coordinate of the circle:
9 3 6 8 3 6
Radius of the said circle:
3.358
Central coordinate (x, y) of the circle:
6.071 4.643'''
print("Input x1,y1,x2,y2,x3,y3,xp,yp:")
x1,y1,x2,y2,x3,y3,xp,yp = map(float, input().split())
c1 = (x2-x1)*(yp-y1)-(y2-y1)*(xp-x1)
c2 = (x3-x2)*(yp-y2)-(y3-y2)*(xp-x2)
c3 = (x1-x3)*(yp-y3)-(y1-y3)*(xp-x3)
if (c1<0 and c2<0 and c3<0) or (c1>0 and c2>0 and c3>0):
print("The point is in the triangle.")
else:
print("The point is outside the triangle.")
#Reference: w3reource | 33.857143 | 129 | 0.662447 |
print("Input x1,y1,x2,y2,x3,y3,xp,yp:")
x1,y1,x2,y2,x3,y3,xp,yp = map(float, input().split())
c1 = (x2-x1)*(yp-y1)-(y2-y1)*(xp-x1)
c2 = (x3-x2)*(yp-y2)-(y3-y2)*(xp-x2)
c3 = (x1-x3)*(yp-y3)-(y1-y3)*(xp-x3)
if (c1<0 and c2<0 and c3<0) or (c1>0 and c2>0 and c3>0):
print("The point is in the triangle.")
else:
print("The point is outside the triangle.")
| true | true |
f7fa909795cb9691247faf1b0d065dff915ecda9 | 265 | py | Python | whimsical-woodpeckers/anon/migrations/0008_merge_20200809_0006.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 40 | 2020-08-02T07:38:22.000Z | 2021-07-26T01:46:50.000Z | whimsical-woodpeckers/anon/migrations/0008_merge_20200809_0006.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 134 | 2020-07-31T12:15:45.000Z | 2020-12-13T04:42:19.000Z | whimsical-woodpeckers/anon/migrations/0008_merge_20200809_0006.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 101 | 2020-07-31T12:00:47.000Z | 2021-11-01T09:06:58.000Z | # Generated by Django 3.1 on 2020-08-09 04:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('anon', '0007_auto_20200807_2303'),
('anon', '0002_auto_20200804_1744'),
]
operations = [
]
| 17.666667 | 45 | 0.641509 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('anon', '0007_auto_20200807_2303'),
('anon', '0002_auto_20200804_1744'),
]
operations = [
]
| true | true |
f7fa90edbafb8af36e90f40679d278feaafdcd0b | 759 | py | Python | old_filter.py | SeriousDim/gr09 | db23a064c422c146ade39c81bb287ee958038731 | [
"MIT"
] | null | null | null | old_filter.py | SeriousDim/gr09 | db23a064c422c146ade39c81bb287ee958038731 | [
"MIT"
] | 1 | 2021-11-11T14:51:12.000Z | 2021-11-11T14:51:12.000Z | old_filter.py | SeriousDim/gr09 | db23a064c422c146ade39c81bb287ee958038731 | [
"MIT"
] | 13 | 2021-11-10T13:43:16.000Z | 2021-11-24T11:17:03.000Z | from PIL import Image
import numpy as np
img = Image.open("img2.jpg")
arr = np.array(img)
a = len(arr)
a1 = len(arr[1])
i = 0
while i < a - 11:
j = 0
while j < a1 - 11:
s = 0
for n in range(i, i + 10):
for n1 in range(j, j + 10):
n1 = arr[n][n1][0]
n2 = arr[n][n1][1]
n3 = arr[n][n1][2]
M = n1 + n2 + n3
s += M
s = int(s // 100)
for n in range(i, i + 10):
for n1 in range(j, j + 10):
arr[n][n1][0] = int(s // 50) * 50
arr[n][n1][1] = int(s // 50) * 50
arr[n][n1][2] = int(s // 50) * 50
j = j + 10
i = i + 10
res = Image.fromarray(arr)
res.save('res.jpg')
| 26.172414 | 49 | 0.392622 | from PIL import Image
import numpy as np
img = Image.open("img2.jpg")
arr = np.array(img)
a = len(arr)
a1 = len(arr[1])
i = 0
while i < a - 11:
j = 0
while j < a1 - 11:
s = 0
for n in range(i, i + 10):
for n1 in range(j, j + 10):
n1 = arr[n][n1][0]
n2 = arr[n][n1][1]
n3 = arr[n][n1][2]
M = n1 + n2 + n3
s += M
s = int(s // 100)
for n in range(i, i + 10):
for n1 in range(j, j + 10):
arr[n][n1][0] = int(s // 50) * 50
arr[n][n1][1] = int(s // 50) * 50
arr[n][n1][2] = int(s // 50) * 50
j = j + 10
i = i + 10
res = Image.fromarray(arr)
res.save('res.jpg')
| true | true |
f7fa925649218e2c995b08979fb24fe352795975 | 7,979 | py | Python | graalpython/com.oracle.graal.python.test/src/tests/test_set.py | cmueh/graalpython | 4267e1be9d36377d6ccb5911d5404e9d77011211 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null | graalpython/com.oracle.graal.python.test/src/tests/test_set.py | cmueh/graalpython | 4267e1be9d36377d6ccb5911d5404e9d77011211 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null | graalpython/com.oracle.graal.python.test/src/tests/test_set.py | cmueh/graalpython | 4267e1be9d36377d6ccb5911d5404e9d77011211 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null | # Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# The Universal Permissive License (UPL), Version 1.0
#
# Subject to the condition set forth below, permission is hereby granted to any
# person obtaining a copy of this software, associated documentation and/or
# data (collectively the "Software"), free of charge and under any and all
# copyright rights in the Software, and any and all patent rights owned or
# freely licensable by each licensor hereunder covering either (i) the
# unmodified Software as contributed to or provided by such licensor, or (ii)
# the Larger Works (as defined below), to deal in both
#
# (a) the Software, and
#
# (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
# one is included with the Software each a "Larger Work" to which the Software
# is contributed by such licensors),
#
# without restriction, including without limitation the rights to copy, create
# derivative works of, display, perform, and distribute the Software and make,
# use, sell, offer for sale, import, export, have made, and have sold the
# Software and the Larger Work(s), and to sublicense the foregoing rights on
# either these or other terms.
#
# This license is subject to the following condition:
#
# The above copyright notice and either this complete permission notice or at a
# minimum a reference to the UPL must be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ankitv 10/10/13
# Iterating by Sequence Index
def assert_raises(err, fn, *args, **kwargs):
raised = False
try:
fn(*args, **kwargs)
except err:
raised = True
assert raised
class PassThru(Exception):
pass
def check_pass_thru():
raise PassThru
yield 1
def test_set_or_union():
s1 = {1, 2, 3}
s2 = {4, 5, 6}
s3 = {1, 2, 4}
s4 = {1, 2, 3}
or_result = s1 | s2
union_result = s1.union(s2)
assert or_result == {1, 2, 3, 4, 5, 6}
assert union_result == {1, 2, 3, 4, 5, 6}
or_result = s1 | s3
union_result = s1.union(s3)
assert or_result == {1, 2, 3, 4}
assert union_result == {1, 2, 3, 4}
or_result = s1 | s4
union_result = s1.union(s4)
assert or_result == {1, 2, 3}
assert union_result == {1, 2, 3}
def test_set_union():
assert {1, 2, 3}.union({1: 'a', 2: 'b', 4: 'd'}) == {1, 2, 3, 4}
assert {1, 2, 3}.union([2, 3, 4, 5]) == {1, 2, 3, 4, 5}
assert {1, 2, 3}.union((3, 4, 5, 6)) == {1, 2, 3, 4, 5, 6}
def test_set_remove():
s = {1, 2, 3}
assert s == {1, 2, 3}
s.remove(3)
assert s == {1, 2}
def test_set_le():
assert set("a") <= set("abc")
def test_difference():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
i = s.difference(otherword)
for c in letters:
assert (c in i) == (c in d and c not in otherword)
assert s == set(word)
assert type(i) == set
assert_raises(PassThru, s.difference, check_pass_thru())
assert_raises(TypeError, s.difference, [[]])
for C in set, frozenset, dict.fromkeys, str, list, tuple:
assert set('abcba').difference(C('cdc')) == set('ab')
assert set('abcba').difference(C('efgfe')) == set('abc')
assert set('abcba').difference(C('ccb')) == set('a')
assert set('abcba').difference(C('ef')) == set('abc')
assert set('abcba').difference() == set('abc')
assert set('abcba').difference(C('a'), C('b')) == set('c')
def test_difference_update():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
retval = s.difference_update(otherword)
assert retval == None
for c in (word + otherword):
if c in word and c not in otherword:
assert c in s
else:
assert c not in s
assert_raises(PassThru, s.difference_update, check_pass_thru())
assert_raises(TypeError, s.difference_update, [[]])
# assert_raises(TypeError, s.symmetric_difference_update, [[]])
for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
for C in set, frozenset, dict.fromkeys, str, list, tuple:
s = set('abcba')
assert s.difference_update(C(p)) == None
assert s == set(q)
s = set('abcdefghih')
s.difference_update()
assert s == set('abcdefghih')
s = set('abcdefghih')
s.difference_update(C('aba'))
assert s == set('cdefghih')
s = set('abcdefghih')
s.difference_update(C('cdc'), C('aba'))
assert s == set('efghih')
def test_sub_and_super():
for thetype in [set, frozenset]:
p, q, r = map(thetype, ['ab', 'abcde', 'def'])
assert p < q
assert p <= q
assert q <= q
assert q > p
assert q >= p
assert not q < r
assert not q <= r
assert not q > r
assert not q >= r
assert set('a').issubset('abc')
assert set('abc').issuperset('a')
assert not set('a').issubset('cbs')
assert not set('cbs').issuperset('a')
def test_intersection():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
i = s.intersection(otherword)
for c in letters:
assert (c in i) == (c in d and c in otherword)
assert s == set(word)
# assert type(i) == set
assert_raises(PassThru, s.intersection, check_pass_thru())
for C in set, frozenset, dict.fromkeys, str, list, tuple:
assert set('abcba').intersection(C('cdc')) == set('cc')
assert set('abcba').intersection(C('efgfe')) == set('')
assert set('abcba').intersection(C('ccb')) == set('bc')
assert set('abcba').intersection(C('ef')) == set('')
assert set('abcba').intersection(C('cbcf'), C('bag')) == set('b')
# TODO: currently the id function behaves a bit differently than the one in cPython
# s = set('abcba')
# z = s.intersection()
# if set == frozenset():
# assert id(s) == id(z)
# else:
# assert id(s) != id(z)
def test_same_id():
empty_ids = set([id(frozenset()) for i in range(100)])
assert len(empty_ids) == 1
def test_rich_compare():
class TestRichSetCompare:
def __gt__(self, some_set):
self.gt_called = True
return False
def __lt__(self, some_set):
self.lt_called = True
return False
def __ge__(self, some_set):
self.ge_called = True
return False
def __le__(self, some_set):
self.le_called = True
return False
# This first tries the builtin rich set comparison, which doesn't know
# how to handle the custom object. Upon returning NotImplemented, the
# corresponding comparison on the right object is invoked.
myset = {1, 2, 3}
myobj = TestRichSetCompare()
myset < myobj
assert myobj.gt_called
myobj = TestRichSetCompare()
myset > myobj
assert myobj.lt_called
myobj = TestRichSetCompare()
myset <= myobj
assert myobj.ge_called
myobj = TestRichSetCompare()
myset >= myobj
assert myobj.le_called
| 31.290196 | 87 | 0.621632 |
def assert_raises(err, fn, *args, **kwargs):
raised = False
try:
fn(*args, **kwargs)
except err:
raised = True
assert raised
class PassThru(Exception):
pass
def check_pass_thru():
raise PassThru
yield 1
def test_set_or_union():
s1 = {1, 2, 3}
s2 = {4, 5, 6}
s3 = {1, 2, 4}
s4 = {1, 2, 3}
or_result = s1 | s2
union_result = s1.union(s2)
assert or_result == {1, 2, 3, 4, 5, 6}
assert union_result == {1, 2, 3, 4, 5, 6}
or_result = s1 | s3
union_result = s1.union(s3)
assert or_result == {1, 2, 3, 4}
assert union_result == {1, 2, 3, 4}
or_result = s1 | s4
union_result = s1.union(s4)
assert or_result == {1, 2, 3}
assert union_result == {1, 2, 3}
def test_set_union():
assert {1, 2, 3}.union({1: 'a', 2: 'b', 4: 'd'}) == {1, 2, 3, 4}
assert {1, 2, 3}.union([2, 3, 4, 5]) == {1, 2, 3, 4, 5}
assert {1, 2, 3}.union((3, 4, 5, 6)) == {1, 2, 3, 4, 5, 6}
def test_set_remove():
s = {1, 2, 3}
assert s == {1, 2, 3}
s.remove(3)
assert s == {1, 2}
def test_set_le():
assert set("a") <= set("abc")
def test_difference():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
i = s.difference(otherword)
for c in letters:
assert (c in i) == (c in d and c not in otherword)
assert s == set(word)
assert type(i) == set
assert_raises(PassThru, s.difference, check_pass_thru())
assert_raises(TypeError, s.difference, [[]])
for C in set, frozenset, dict.fromkeys, str, list, tuple:
assert set('abcba').difference(C('cdc')) == set('ab')
assert set('abcba').difference(C('efgfe')) == set('abc')
assert set('abcba').difference(C('ccb')) == set('a')
assert set('abcba').difference(C('ef')) == set('abc')
assert set('abcba').difference() == set('abc')
assert set('abcba').difference(C('a'), C('b')) == set('c')
def test_difference_update():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
retval = s.difference_update(otherword)
assert retval == None
for c in (word + otherword):
if c in word and c not in otherword:
assert c in s
else:
assert c not in s
assert_raises(PassThru, s.difference_update, check_pass_thru())
assert_raises(TypeError, s.difference_update, [[]])
for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
for C in set, frozenset, dict.fromkeys, str, list, tuple:
s = set('abcba')
assert s.difference_update(C(p)) == None
assert s == set(q)
s = set('abcdefghih')
s.difference_update()
assert s == set('abcdefghih')
s = set('abcdefghih')
s.difference_update(C('aba'))
assert s == set('cdefghih')
s = set('abcdefghih')
s.difference_update(C('cdc'), C('aba'))
assert s == set('efghih')
def test_sub_and_super():
for thetype in [set, frozenset]:
p, q, r = map(thetype, ['ab', 'abcde', 'def'])
assert p < q
assert p <= q
assert q <= q
assert q > p
assert q >= p
assert not q < r
assert not q <= r
assert not q > r
assert not q >= r
assert set('a').issubset('abc')
assert set('abc').issuperset('a')
assert not set('a').issubset('cbs')
assert not set('cbs').issuperset('a')
def test_intersection():
word = 'simsalabim'
otherword = 'madagascar'
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
s = set(word)
d = dict.fromkeys(word)
i = s.intersection(otherword)
for c in letters:
assert (c in i) == (c in d and c in otherword)
assert s == set(word)
assert_raises(PassThru, s.intersection, check_pass_thru())
for C in set, frozenset, dict.fromkeys, str, list, tuple:
assert set('abcba').intersection(C('cdc')) == set('cc')
assert set('abcba').intersection(C('efgfe')) == set('')
assert set('abcba').intersection(C('ccb')) == set('bc')
assert set('abcba').intersection(C('ef')) == set('')
assert set('abcba').intersection(C('cbcf'), C('bag')) == set('b')
def test_same_id():
empty_ids = set([id(frozenset()) for i in range(100)])
assert len(empty_ids) == 1
def test_rich_compare():
class TestRichSetCompare:
def __gt__(self, some_set):
self.gt_called = True
return False
def __lt__(self, some_set):
self.lt_called = True
return False
def __ge__(self, some_set):
self.ge_called = True
return False
def __le__(self, some_set):
self.le_called = True
return False
# how to handle the custom object. Upon returning NotImplemented, the
# corresponding comparison on the right object is invoked.
myset = {1, 2, 3}
myobj = TestRichSetCompare()
myset < myobj
assert myobj.gt_called
myobj = TestRichSetCompare()
myset > myobj
assert myobj.lt_called
myobj = TestRichSetCompare()
myset <= myobj
assert myobj.ge_called
myobj = TestRichSetCompare()
myset >= myobj
assert myobj.le_called
| true | true |
f7fa9298602b9714532538ca56331fd22e1e1f5e | 1,222 | py | Python | opennebula_gc/cli.py | cloudlinux/opennebula-gc | 50711d14d7426c6e7fc65099c652270976630022 | [
"MIT"
] | 1 | 2017-03-29T12:47:23.000Z | 2017-03-29T12:47:23.000Z | opennebula_gc/cli.py | cloudlinux/opennebula-gc | 50711d14d7426c6e7fc65099c652270976630022 | [
"MIT"
] | null | null | null | opennebula_gc/cli.py | cloudlinux/opennebula-gc | 50711d14d7426c6e7fc65099c652270976630022 | [
"MIT"
] | null | null | null | import sys
import argparse
from opennebula_gc import clean_opennebula
def cli():
parser = argparse.ArgumentParser(
prog="Clean OpenNebula Resources",
description="CLI tool to clean outdated resources in OpenNebula")
commands = parser.add_subparsers(
title="Commands",
dest="command")
cmd_clean = commands.add_parser(
"clean",
help="Delete outdated resources",
description="Delete outdated resources in OpenNebula cloud")
cmd_clean.set_defaults(func=clean_opennebula)
cmd_clean.add_argument(
"-O", "--outdated-hours", default=2, dest='hours')
cmd_clean.add_argument(
"-n", "--network", dest='network')
cmd_clean.add_argument(
"-H", "--endpoint", dest="nebula_endpoint")
cmd_clean.add_argument(
"-u", "--user", dest="user")
cmd_clean.add_argument(
"-p", "--password", dest="pwd")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main():
args = cli()
args.func(
hours=args.hours,
network=args.network,
user=args.user_name,
pwd=args.pwd,
endpoint=args.nebula_endpoint,
)
| 24.938776 | 73 | 0.621931 | import sys
import argparse
from opennebula_gc import clean_opennebula
def cli():
parser = argparse.ArgumentParser(
prog="Clean OpenNebula Resources",
description="CLI tool to clean outdated resources in OpenNebula")
commands = parser.add_subparsers(
title="Commands",
dest="command")
cmd_clean = commands.add_parser(
"clean",
help="Delete outdated resources",
description="Delete outdated resources in OpenNebula cloud")
cmd_clean.set_defaults(func=clean_opennebula)
cmd_clean.add_argument(
"-O", "--outdated-hours", default=2, dest='hours')
cmd_clean.add_argument(
"-n", "--network", dest='network')
cmd_clean.add_argument(
"-H", "--endpoint", dest="nebula_endpoint")
cmd_clean.add_argument(
"-u", "--user", dest="user")
cmd_clean.add_argument(
"-p", "--password", dest="pwd")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main():
args = cli()
args.func(
hours=args.hours,
network=args.network,
user=args.user_name,
pwd=args.pwd,
endpoint=args.nebula_endpoint,
)
| true | true |
f7fa94054efbf16cdd7e617e40023ae3c262e949 | 3,201 | py | Python | app/process_engine/action/remote_call_action.py | patromi/tracardi | cd5029bd9eb20d4cea210cc7a62fa12e8327686e | [
"MIT"
] | null | null | null | app/process_engine/action/remote_call_action.py | patromi/tracardi | cd5029bd9eb20d4cea210cc7a62fa12e8327686e | [
"MIT"
] | null | null | null | app/process_engine/action/remote_call_action.py | patromi/tracardi | cd5029bd9eb20d4cea210cc7a62fa12e8327686e | [
"MIT"
] | null | null | null | import json
from datetime import datetime
import aiohttp
from tracardi_plugin_sdk.domain.register import Plugin, Spec, MetaData
from tracardi_plugin_sdk.domain.result import Result
from tracardi_plugin_sdk.action_runner import ActionRunner
class RemoteCallAction(ActionRunner):
def __init__(self, **kwargs):
if 'url' not in kwargs:
raise ValueError("Url is not set. Define it in config section.")
self.url = kwargs['url']
if 'payloadIn' in kwargs:
payload_in = kwargs['payloadIn']
if payload_in not in ['body', 'params']:
raise ValueError("Param `payloadIn` must be either 'body' or 'params'. {} given".format(payload_in))
self.payload_in = payload_in
else:
self.payload_in = "body"
if 'timeout' in kwargs:
self.timeout = int(kwargs['timeout'])
else:
self.timeout = 30
if 'method' not in kwargs:
self.method = 'get'
else:
self.method = kwargs['method']
@staticmethod
def _datetime_handler(date):
if isinstance(date, datetime):
return date.isoformat()
raise TypeError("Unknown type")
async def run(self, payload):
if self.payload_in == 'params':
if type(payload) not in [str, bool, int, float]:
raise ValueError("Payload is not str, bool, int or float. Can not convert it URL into params.")
timeout = aiohttp.ClientTimeout(total=self.timeout)
async with aiohttp.ClientSession(timeout=timeout) as session:
payload = json.dumps(payload, default=self._datetime_handler)
payload = json.loads(payload)
kwargs = {
"json" if self.payload_in == 'body' else 'params': payload
}
async with session.request(
method=self.method,
url=self.url,
**kwargs
) as response:
result = {
"status": response.status,
"content": await response.json()
}
if response.status in [200, 201, 202, 203]:
return Result(port="response", value=result), Result(port="error", value=None)
else:
return Result(port="response", value=None), Result(port="error", value=result)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='app.process_engine.action.remote_call_action',
className='RemoteCallAction',
inputs=['payload'],
outputs=["response", "error"],
init={
"method": "undefined",
"url": "undefined",
"payloadIn": "body|params"
},
manual="remote_call_action"
),
metadata=MetaData(
name='Remote call',
desc='Sends request to remote API endpoint.',
type='flowNode',
width=200,
height=100,
icon='cloud',
group=["Integration"]
)
)
| 32.663265 | 116 | 0.542643 | import json
from datetime import datetime
import aiohttp
from tracardi_plugin_sdk.domain.register import Plugin, Spec, MetaData
from tracardi_plugin_sdk.domain.result import Result
from tracardi_plugin_sdk.action_runner import ActionRunner
class RemoteCallAction(ActionRunner):
def __init__(self, **kwargs):
if 'url' not in kwargs:
raise ValueError("Url is not set. Define it in config section.")
self.url = kwargs['url']
if 'payloadIn' in kwargs:
payload_in = kwargs['payloadIn']
if payload_in not in ['body', 'params']:
raise ValueError("Param `payloadIn` must be either 'body' or 'params'. {} given".format(payload_in))
self.payload_in = payload_in
else:
self.payload_in = "body"
if 'timeout' in kwargs:
self.timeout = int(kwargs['timeout'])
else:
self.timeout = 30
if 'method' not in kwargs:
self.method = 'get'
else:
self.method = kwargs['method']
@staticmethod
def _datetime_handler(date):
if isinstance(date, datetime):
return date.isoformat()
raise TypeError("Unknown type")
async def run(self, payload):
if self.payload_in == 'params':
if type(payload) not in [str, bool, int, float]:
raise ValueError("Payload is not str, bool, int or float. Can not convert it URL into params.")
timeout = aiohttp.ClientTimeout(total=self.timeout)
async with aiohttp.ClientSession(timeout=timeout) as session:
payload = json.dumps(payload, default=self._datetime_handler)
payload = json.loads(payload)
kwargs = {
"json" if self.payload_in == 'body' else 'params': payload
}
async with session.request(
method=self.method,
url=self.url,
**kwargs
) as response:
result = {
"status": response.status,
"content": await response.json()
}
if response.status in [200, 201, 202, 203]:
return Result(port="response", value=result), Result(port="error", value=None)
else:
return Result(port="response", value=None), Result(port="error", value=result)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='app.process_engine.action.remote_call_action',
className='RemoteCallAction',
inputs=['payload'],
outputs=["response", "error"],
init={
"method": "undefined",
"url": "undefined",
"payloadIn": "body|params"
},
manual="remote_call_action"
),
metadata=MetaData(
name='Remote call',
desc='Sends request to remote API endpoint.',
type='flowNode',
width=200,
height=100,
icon='cloud',
group=["Integration"]
)
)
| true | true |
f7fa942da094dc5d83f8d491d941434cb724d1f1 | 196 | py | Python | ihome/ihome/settings/csrf.py | xyuzi/ihome | ab74e4efb73c14d2b410f6c19c6b2a35f8fd720e | [
"MIT"
] | null | null | null | ihome/ihome/settings/csrf.py | xyuzi/ihome | ab74e4efb73c14d2b410f6c19c6b2a35f8fd720e | [
"MIT"
] | 10 | 2021-03-19T11:31:11.000Z | 2022-02-10T10:44:16.000Z | ihome/ihome/settings/csrf.py | xyuzi/ihome | ab74e4efb73c14d2b410f6c19c6b2a35f8fd720e | [
"MIT"
] | null | null | null | from django.utils.deprecation import MiddlewareMixin
class DisableCSRFCheck(MiddlewareMixin):
def process_request(self, request):
setattr(request, '_dont_enforce_csrf_checks', True)
| 28 | 59 | 0.790816 | from django.utils.deprecation import MiddlewareMixin
class DisableCSRFCheck(MiddlewareMixin):
def process_request(self, request):
setattr(request, '_dont_enforce_csrf_checks', True)
| true | true |
f7fa94d1c418d9ee22f452e15cd24f8accbad154 | 1,187 | py | Python | modules/crlibm/doc/log_rz.py | brycelelbach/nt2 | 73d7e8dd390fa4c8d251c6451acdae65def70e0b | [
"BSL-1.0"
] | 1 | 2022-03-24T03:35:10.000Z | 2022-03-24T03:35:10.000Z | modules/crlibm/doc/log_rz.py | brycelelbach/nt2 | 73d7e8dd390fa4c8d251c6451acdae65def70e0b | [
"BSL-1.0"
] | null | null | null | modules/crlibm/doc/log_rz.py | brycelelbach/nt2 | 73d7e8dd390fa4c8d251c6451acdae65def70e0b | [
"BSL-1.0"
] | null | null | null | [ ## this file was manually modified by jt
{
'functor' : {
'arity' : '1',
'call_types' : [],
'ret_arity' : '0',
'rturn' : {
'default' : 'T',
},
'simd_types' : [],
'special' : ['crlibm'],
'type_defs' : [],
'types' : ['real_'],
},
'info' : 'manually modified',
'unit' : {
'global_header' : {
'first_stamp' : 'created by jt the 02/03/2011',
'included' : ['#include <nt2/include/functions/log.hpp>'],
'notes' : [],
'stamp' : 'modified by jt the 02/03/2011',
},
'ranges' : {
'default' : [['T(-100)', 'T(100)']],
},
'specific_values' : {
},
'verif_test' : {
'property_call' : {
'default' : ['nt2::crlibm::log_rz(a0)'],
},
'property_value' : {
'default' : ['nt2::log(a0)'],
},
'simd' : {
},
'ulp_thresh' : {
'default' : ['1'],
},
},
},
},
]
| 26.977273 | 71 | 0.333614 | [ ty' : '1',
'call_types' : [],
'ret_arity' : '0',
'rturn' : {
'default' : 'T',
},
'simd_types' : [],
'special' : ['crlibm'],
'type_defs' : [],
'types' : ['real_'],
},
'info' : 'manually modified',
'unit' : {
'global_header' : {
'first_stamp' : 'created by jt the 02/03/2011',
'included' : ['#include <nt2/include/functions/log.hpp>'],
'notes' : [],
'stamp' : 'modified by jt the 02/03/2011',
},
'ranges' : {
'default' : [['T(-100)', 'T(100)']],
},
'specific_values' : {
},
'verif_test' : {
'property_call' : {
'default' : ['nt2::crlibm::log_rz(a0)'],
},
'property_value' : {
'default' : ['nt2::log(a0)'],
},
'simd' : {
},
'ulp_thresh' : {
'default' : ['1'],
},
},
},
},
]
| true | true |
f7fa951dadf854827f170957c5a10be64d8433a8 | 5,873 | py | Python | sdk/python/pulumi_checkly/provider.py | stack72/pulumi-checkly-1 | d63bdf5ea95b54a69a9ce07f42210bf3483491ba | [
"MIT"
] | 7 | 2022-02-16T15:04:48.000Z | 2022-03-30T16:40:17.000Z | sdk/python/pulumi_checkly/provider.py | checkly/pulumi-chekly | 0e4b7f0e20e857765876fce2661e49bc8f197a20 | [
"MIT"
] | 5 | 2022-03-11T17:01:17.000Z | 2022-03-16T02:33:14.000Z | sdk/python/pulumi_checkly/provider.py | checkly/pulumi-chekly | 0e4b7f0e20e857765876fce2661e49bc8f197a20 | [
"MIT"
] | 1 | 2022-03-11T16:55:01.000Z | 2022-03-11T16:55:01.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ProviderArgs', 'Provider']
@pulumi.input_type
class ProviderArgs:
def __init__(__self__, *,
api_key: pulumi.Input[str],
account_id: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Provider resource.
"""
pulumi.set(__self__, "api_key", api_key)
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if api_url is not None:
pulumi.set(__self__, "api_url", api_url)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Input[str]:
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: pulumi.Input[str]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="apiUrl")
def api_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_url")
@api_url.setter
def api_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_url", value)
class Provider(pulumi.ProviderResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The provider type for the checkly package. By default, resources use package-wide configuration
settings, however an explicit `Provider` instance may be created and passed during resource
construction to achieve fine-grained programmatic control over provider settings. See the
[documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProviderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The provider type for the checkly package. By default, resources use package-wide configuration
settings, however an explicit `Provider` instance may be created and passed during resource
construction to achieve fine-grained programmatic control over provider settings. See the
[documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
:param str resource_name: The name of the resource.
:param ProviderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProviderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.plugin_download_url is None:
opts.plugin_download_url = _utilities.get_plugin_download_url()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProviderArgs.__new__(ProviderArgs)
__props__.__dict__["account_id"] = account_id
if api_key is None and not opts.urn:
raise TypeError("Missing required property 'api_key'")
__props__.__dict__["api_key"] = api_key
__props__.__dict__["api_url"] = api_url
super(Provider, __self__).__init__(
'checkly',
resource_name,
__props__,
opts)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "account_id")
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Output[str]:
return pulumi.get(self, "api_key")
@property
@pulumi.getter(name="apiUrl")
def api_url(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "api_url")
| 40.784722 | 134 | 0.640729 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ProviderArgs', 'Provider']
@pulumi.input_type
class ProviderArgs:
def __init__(__self__, *,
api_key: pulumi.Input[str],
account_id: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "api_key", api_key)
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if api_url is not None:
pulumi.set(__self__, "api_url", api_url)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Input[str]:
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: pulumi.Input[str]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="apiUrl")
def api_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_url")
@api_url.setter
def api_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_url", value)
class Provider(pulumi.ProviderResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: ProviderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProviderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
api_url: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.plugin_download_url is None:
opts.plugin_download_url = _utilities.get_plugin_download_url()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProviderArgs.__new__(ProviderArgs)
__props__.__dict__["account_id"] = account_id
if api_key is None and not opts.urn:
raise TypeError("Missing required property 'api_key'")
__props__.__dict__["api_key"] = api_key
__props__.__dict__["api_url"] = api_url
super(Provider, __self__).__init__(
'checkly',
resource_name,
__props__,
opts)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "account_id")
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Output[str]:
return pulumi.get(self, "api_key")
@property
@pulumi.getter(name="apiUrl")
def api_url(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "api_url")
| true | true |
f7fa959d161b335557b336d45d312ebe6a257756 | 1,051 | py | Python | src/OTLMOW/PostenMapping/Model/Post060379901.py | davidvlaminck/OTLClassPython | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | 2 | 2022-02-01T08:58:11.000Z | 2022-02-08T13:35:17.000Z | src/OTLMOW/PostenMapping/Model/Post060379901.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | src/OTLMOW/PostenMapping/Model/Post060379901.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | # coding=utf-8
from OTLMOW.PostenMapping.StandaardPost import StandaardPost
from OTLMOW.PostenMapping.StandaardPostMapping import StandaardPostMapping
# Generated with PostenCreator. To modify: extend, do not edit
class Post060379901(StandaardPost):
def __init__(self):
super().__init__(
nummer='0603.79901',
beschrijving='Heropvoegen van natuursteentegels volgens 6-3.8',
meetstaateenheid='M2',
mappings=[StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='',
dotnotatie='',
defaultWaarde='',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=1,
isBasisMapping=1,
mappingStatus='wordt gemapt 2.0',
mappingOpmerking='Activiteit [Heropvoegen] komt niet voor in de OTL',
standaardpostnummer='0603.79901')])
| 40.423077 | 111 | 0.615604 |
from OTLMOW.PostenMapping.StandaardPost import StandaardPost
from OTLMOW.PostenMapping.StandaardPostMapping import StandaardPostMapping
class Post060379901(StandaardPost):
def __init__(self):
super().__init__(
nummer='0603.79901',
beschrijving='Heropvoegen van natuursteentegels volgens 6-3.8',
meetstaateenheid='M2',
mappings=[StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='',
dotnotatie='',
defaultWaarde='',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=1,
isBasisMapping=1,
mappingStatus='wordt gemapt 2.0',
mappingOpmerking='Activiteit [Heropvoegen] komt niet voor in de OTL',
standaardpostnummer='0603.79901')])
| true | true |
f7fa9705f784566bd8f5aec4a40f6908ad4e076a | 5,321 | py | Python | movienightbot/actions/__init__.py | itsTheFae/MovieNightBot | b7c0eff0c9f3c5a8b403140267bfbc2f6ace78dc | [
"WTFPL"
] | null | null | null | movienightbot/actions/__init__.py | itsTheFae/MovieNightBot | b7c0eff0c9f3c5a8b403140267bfbc2f6ace78dc | [
"WTFPL"
] | null | null | null | movienightbot/actions/__init__.py | itsTheFae/MovieNightBot | b7c0eff0c9f3c5a8b403140267bfbc2f6ace78dc | [
"WTFPL"
] | null | null | null | from abc import ABC, abstractmethod
from typing import Dict, Tuple, Union, List
from pathlib import Path
from importlib import import_module
import ast
import logging
import discord
from movienightbot.db.controllers import ServerController
__ALL__ = ["KNOWN_ACTIONS", "unknown_default_action"]
logger = logging.getLogger("movienightbot")
async def unknown_default_action(msg: discord.message, command: str) -> None:
await msg.channel.send(
f"Unknown command {command} given, try reading the tutorial at `m!help` "
f"to see what commands are available!"
)
class BaseAction(ABC):
# action name is what the action will be called on discord
action_name = None
admin = False
guild_only = True
async def _check_proceed(self, msg: discord.message) -> bool:
if self.guild_only and msg.guild is None:
logging.debug(f"User {msg.author.name} trying non-DM action in a DM")
await msg.author.send("You can't do this command from a DM!")
return False
server_settings = ServerController().get_by_id(msg.guild.id)
if msg.channel.id != server_settings.channel:
logging.debug(
f"User {msg.author.name} using non-permitted channel {msg.channel.name} "
f"instead of {server_settings.channel}"
)
return False
if not msg.author.guild_permissions.administrator and (
self.admin
and server_settings.admin_role not in {r.name for r in msg.author.roles}
):
logging.debug(f"User {msg.author.name} does not have admin")
await msg.channel.send("Hey now, you're not an admin on this server!")
return False
return True
async def __call__(self, msg: discord.message) -> None:
error_message = (
"OOPSIE WOOPSIE!! UwU We made a fucky wucky!! A wittle fucko boingo! The code "
"monkeys at our headquarters are working VEWY HAWD to fix this!"
)
try:
if not await self._check_proceed(msg):
return
except Exception as e:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
guild = msg.guild.name if msg.guild is not None else "DM"
logger.info(f"Running action {self.action_name} on server {guild}")
try:
await self.action(msg)
except discord.Forbidden as e:
if e.code == 50007:
await msg.channel.send(f"I can't DM you {msg.author.name}!")
return
else:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
except Exception as e:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
@staticmethod
def get_message_data(
msg: discord.message, data_parts: int = 1
) -> Union[str, Tuple[str]]:
"""Gets and sanitizes the message data associated with the command
Parameters
----------
msg
The discord message object
data_parts
The number of pieces of information expected, space separated. Default 1.
For example, if the message text is "m!suggest Some Movie Name" and we set to 1,
this will return "Some Movie Name". Set to 2, it will return ("Some", "Movie Name")
Notes
-----
Will return an empty string if the data_parts is set to 1 but no data is given. Will return an empty tuple
if data_parts is > 1 and no data given.
"""
data = msg.content.strip().split(" ", data_parts)[1:]
# sanitize the input to only have one space in case multiple put in
data = tuple(" ".join(s.split()) for s in data)
if data_parts == 1:
return "" if not data else data[0]
return data
@property
@abstractmethod
def help_text(self) -> str:
return
@property
def help_options(self) -> List[str]:
return []
@abstractmethod
async def action(self, msg: discord.message) -> None:
return
def _get_actions() -> Dict[str, BaseAction]:
"""Loads all actions in the submodule to a dict
Returns
-------
dict of str
The actions, with class name as key and an instantiated class as value
Notes
-----
Any Action class must be a child of the BaseAction ABC to be added to this dict.
Done this way so you can create a new file in the actions submodule and it will auto-import and register.
"""
base_dir = Path(__file__).parent
actions = {}
for file in base_dir.iterdir():
if file.is_dir() or file.name.startswith("__") or not file.name.endswith(".py"):
continue
with file.open() as f:
parsed = ast.parse(f.read())
classes = [
node.name for node in ast.walk(parsed) if isinstance(node, ast.ClassDef)
]
rc = import_module(f"movienightbot.actions.{file.stem}")
for class_name in classes:
class_def = rc.__dict__[class_name]
if not issubclass(class_def, BaseAction):
continue
actions[class_def.action_name] = class_def()
return actions
KNOWN_ACTIONS = _get_actions()
| 35.238411 | 114 | 0.61605 | from abc import ABC, abstractmethod
from typing import Dict, Tuple, Union, List
from pathlib import Path
from importlib import import_module
import ast
import logging
import discord
from movienightbot.db.controllers import ServerController
__ALL__ = ["KNOWN_ACTIONS", "unknown_default_action"]
logger = logging.getLogger("movienightbot")
async def unknown_default_action(msg: discord.message, command: str) -> None:
await msg.channel.send(
f"Unknown command {command} given, try reading the tutorial at `m!help` "
f"to see what commands are available!"
)
class BaseAction(ABC):
action_name = None
admin = False
guild_only = True
async def _check_proceed(self, msg: discord.message) -> bool:
if self.guild_only and msg.guild is None:
logging.debug(f"User {msg.author.name} trying non-DM action in a DM")
await msg.author.send("You can't do this command from a DM!")
return False
server_settings = ServerController().get_by_id(msg.guild.id)
if msg.channel.id != server_settings.channel:
logging.debug(
f"User {msg.author.name} using non-permitted channel {msg.channel.name} "
f"instead of {server_settings.channel}"
)
return False
if not msg.author.guild_permissions.administrator and (
self.admin
and server_settings.admin_role not in {r.name for r in msg.author.roles}
):
logging.debug(f"User {msg.author.name} does not have admin")
await msg.channel.send("Hey now, you're not an admin on this server!")
return False
return True
async def __call__(self, msg: discord.message) -> None:
error_message = (
"OOPSIE WOOPSIE!! UwU We made a fucky wucky!! A wittle fucko boingo! The code "
"monkeys at our headquarters are working VEWY HAWD to fix this!"
)
try:
if not await self._check_proceed(msg):
return
except Exception as e:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
guild = msg.guild.name if msg.guild is not None else "DM"
logger.info(f"Running action {self.action_name} on server {guild}")
try:
await self.action(msg)
except discord.Forbidden as e:
if e.code == 50007:
await msg.channel.send(f"I can't DM you {msg.author.name}!")
return
else:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
except Exception as e:
logger.error(e, exc_info=e)
await msg.channel.send(error_message)
@staticmethod
def get_message_data(
msg: discord.message, data_parts: int = 1
) -> Union[str, Tuple[str]]:
data = msg.content.strip().split(" ", data_parts)[1:]
# sanitize the input to only have one space in case multiple put in
data = tuple(" ".join(s.split()) for s in data)
if data_parts == 1:
return "" if not data else data[0]
return data
@property
@abstractmethod
def help_text(self) -> str:
return
@property
def help_options(self) -> List[str]:
return []
@abstractmethod
async def action(self, msg: discord.message) -> None:
return
def _get_actions() -> Dict[str, BaseAction]:
base_dir = Path(__file__).parent
actions = {}
for file in base_dir.iterdir():
if file.is_dir() or file.name.startswith("__") or not file.name.endswith(".py"):
continue
with file.open() as f:
parsed = ast.parse(f.read())
classes = [
node.name for node in ast.walk(parsed) if isinstance(node, ast.ClassDef)
]
rc = import_module(f"movienightbot.actions.{file.stem}")
for class_name in classes:
class_def = rc.__dict__[class_name]
if not issubclass(class_def, BaseAction):
continue
actions[class_def.action_name] = class_def()
return actions
KNOWN_ACTIONS = _get_actions()
| true | true |
f7fa974f827032fc53735e0c88b047a45dea81b4 | 3,641 | py | Python | wd/wdapp/timeseries_analysis.py | LiYangCom1994/companylair | e8d085e3357b08f178b089c4a52e5dc2f9eb103f | [
"MIT"
] | null | null | null | wd/wdapp/timeseries_analysis.py | LiYangCom1994/companylair | e8d085e3357b08f178b089c4a52e5dc2f9eb103f | [
"MIT"
] | null | null | null | wd/wdapp/timeseries_analysis.py | LiYangCom1994/companylair | e8d085e3357b08f178b089c4a52e5dc2f9eb103f | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import statsmodels.api as sm
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import learning_curve
from sklearn.metrics import fbeta_score, make_scorer
from sklearn.metrics import r2_score, mean_squared_error, make_scorer
from sklearn.grid_search import GridSearchCV
def plot_learning_curve(clf, title):
train_sizes, train_scores, test_scores = learning_curve(clf,
X,
Y,
cv=10,
n_jobs=-1,
train_sizes=np.linspace(.1, 1., 10),
verbose=0)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.figure()
plt.title(title)
plt.xlabel("Training examples")
plt.ylabel("Score")
# Plot the average training and test score lines at each training set size
plt.plot(train_sizes, train_scores_mean, 'o-', color="b", label="Training score")
plt.plot(train_sizes, test_scores_mean, 'o-', color="r", label="Test score")
# Plot the std deviation as a transparent range at each training set size
plt.fill_between(train_sizes, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, alpha=0.1, color="b")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, alpha=0.1, color="r")
plt.legend(loc="best")
def encode(x):
if (x.dtype is np.dtype('O') and x.name != 'sales') or x.name == 'date':
return x.astype('category').cat.codes
return x
df_tmp = df_tmp.apply(encode)
x_col = df_tmp.columns.values[df_tmp.columns.values != 'sales']
X = df_tmp[x_col].values
Y = df_tmp['sales'].values
display(X)
display(Y)
dsr = DecisionTreeRegressor(random_state = 0, min_samples_split = 15, max_depth = 10)
scores = cross_val_score(dsr, X, Y, cv = 15)
display(scores)
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
rfr = RandomForestRegressor(n_estimators = 10)
scores = cross_val_score(rfr, X, Y, cv = 10)
display(scores)
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
dsr.fit(X, Y)
pre_y_by_dsr = dsr.predict(X)
rfr.fit(X, Y)
pre_y_by_rfr = rfr.predict(X)
f = pd.DataFrame(index=df_tmp.index)
df['month'] = list(map(lambda x: x.month, df_tmp.index.date))
df['pred_by_decision_tree_regressor'] = pre_y_by_dsr
df['pred_by_random_forest_regressor'] = pre_y_by_rfr
df['country'] = df_tmp['country']
df['actual'] = Y
m = df.groupby(['country', 'date'])['pred_by_decision_tree_regressor', 'pred_by_random_forest_regressor', 'actual'].mean()
v = df.groupby(['country', 'date'])['pred_by_decision_tree_regressor', 'pred_by_random_forest_regressor', 'actual'].var()
fig, axes = plt.subplots(len(countries), 2, figsize=(20, 25))
for i in range(3):
m.xs(i).plot(title = countries[i] + " (Mean)", ax = axes[i, 0])
v.xs(i).plot(title = countries[i] + " (Variance)", ax = axes[i, 1])
plt.legend(loc='best')
plot_learning_curve(dsr, "Decision_Tree_Regressor")
plot_learning_curve(rfr, "Random_Forest_Regressor") | 36.777778 | 132 | 0.652568 | import pandas as pd
import numpy as np
import statsmodels.api as sm
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import learning_curve
from sklearn.metrics import fbeta_score, make_scorer
from sklearn.metrics import r2_score, mean_squared_error, make_scorer
from sklearn.grid_search import GridSearchCV
def plot_learning_curve(clf, title):
train_sizes, train_scores, test_scores = learning_curve(clf,
X,
Y,
cv=10,
n_jobs=-1,
train_sizes=np.linspace(.1, 1., 10),
verbose=0)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.figure()
plt.title(title)
plt.xlabel("Training examples")
plt.ylabel("Score")
plt.plot(train_sizes, train_scores_mean, 'o-', color="b", label="Training score")
plt.plot(train_sizes, test_scores_mean, 'o-', color="r", label="Test score")
plt.fill_between(train_sizes, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, alpha=0.1, color="b")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, alpha=0.1, color="r")
plt.legend(loc="best")
def encode(x):
if (x.dtype is np.dtype('O') and x.name != 'sales') or x.name == 'date':
return x.astype('category').cat.codes
return x
df_tmp = df_tmp.apply(encode)
x_col = df_tmp.columns.values[df_tmp.columns.values != 'sales']
X = df_tmp[x_col].values
Y = df_tmp['sales'].values
display(X)
display(Y)
dsr = DecisionTreeRegressor(random_state = 0, min_samples_split = 15, max_depth = 10)
scores = cross_val_score(dsr, X, Y, cv = 15)
display(scores)
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
rfr = RandomForestRegressor(n_estimators = 10)
scores = cross_val_score(rfr, X, Y, cv = 10)
display(scores)
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
dsr.fit(X, Y)
pre_y_by_dsr = dsr.predict(X)
rfr.fit(X, Y)
pre_y_by_rfr = rfr.predict(X)
f = pd.DataFrame(index=df_tmp.index)
df['month'] = list(map(lambda x: x.month, df_tmp.index.date))
df['pred_by_decision_tree_regressor'] = pre_y_by_dsr
df['pred_by_random_forest_regressor'] = pre_y_by_rfr
df['country'] = df_tmp['country']
df['actual'] = Y
m = df.groupby(['country', 'date'])['pred_by_decision_tree_regressor', 'pred_by_random_forest_regressor', 'actual'].mean()
v = df.groupby(['country', 'date'])['pred_by_decision_tree_regressor', 'pred_by_random_forest_regressor', 'actual'].var()
fig, axes = plt.subplots(len(countries), 2, figsize=(20, 25))
for i in range(3):
m.xs(i).plot(title = countries[i] + " (Mean)", ax = axes[i, 0])
v.xs(i).plot(title = countries[i] + " (Variance)", ax = axes[i, 1])
plt.legend(loc='best')
plot_learning_curve(dsr, "Decision_Tree_Regressor")
plot_learning_curve(rfr, "Random_Forest_Regressor") | true | true |
f7fa990ffb5ad2f3cde5b220ac889d31b9c7bd82 | 209 | py | Python | pyWAPOR/Collect/Landsat/PrepareLandsat.py | hectornieto/wapor-et-look | bc0b98be58d1680de2f82909809ac876f78f2d85 | [
"Apache-2.0"
] | 1 | 2021-05-24T08:12:03.000Z | 2021-05-24T08:12:03.000Z | pyWAPOR/Collect/Landsat/PrepareLandsat.py | hectornieto/wapor-et-look | bc0b98be58d1680de2f82909809ac876f78f2d85 | [
"Apache-2.0"
] | 2 | 2020-06-25T08:27:55.000Z | 2020-08-28T07:38:17.000Z | pyWAPOR/Collect/Landsat/PrepareLandsat.py | DHI-GRAS/wapor-et-look | e05b8f24616af8fc99ac1d646c878b353cb35aef | [
"Apache-2.0"
] | 4 | 2020-09-23T09:51:59.000Z | 2021-08-10T08:59:14.000Z | import sys
from pyWAPOR.Collect.Landsat.PreprocessLandsat import PreprocessLandsat
def main(data_dir, output_dir):
PreprocessLandsat(data_dir, output_dir)
if __name__ == '__main__':
main(sys.argv)
| 19 | 71 | 0.779904 | import sys
from pyWAPOR.Collect.Landsat.PreprocessLandsat import PreprocessLandsat
def main(data_dir, output_dir):
PreprocessLandsat(data_dir, output_dir)
if __name__ == '__main__':
main(sys.argv)
| true | true |
f7fa9a3a0085d2c9c70e32daa8ce6853068eb974 | 4,239 | py | Python | flexbe_core/flexbe_core/proxy/proxy_publisher.py | FlexBE/flexbe_behavior_engine | 735a3b68dfbe817db9383e53fef63afd6868219d | [
"BSD-3-Clause"
] | 1 | 2022-03-11T04:56:31.000Z | 2022-03-11T04:56:31.000Z | flexbe_core/flexbe_core/proxy/proxy_publisher.py | FlexBE/flexbe_behavior_engine | 735a3b68dfbe817db9383e53fef63afd6868219d | [
"BSD-3-Clause"
] | null | null | null | flexbe_core/flexbe_core/proxy/proxy_publisher.py | FlexBE/flexbe_behavior_engine | 735a3b68dfbe817db9383e53fef63afd6868219d | [
"BSD-3-Clause"
] | null | null | null | import rclpy
import time
from threading import Timer
from flexbe_core.logger import Logger
from flexbe_core.proxy.qos import QOS_DEFAULT
class ProxyPublisher(object):
"""
A proxy for publishing topics.
"""
_node = None
_topics = {}
def _initialize(node):
ProxyPublisher._node = node
Logger.initialize(node)
def __init__(self, topics={}, qos=None, **kwargs):
"""
Initializes the proxy with optionally a given set of topics.
Automatically creates a publisher for sending status messages.
@type topics: dictionary string - message class
@param topics: A dictionay containing a collection of topic - message type pairs.
@type _latch: bool
@param: _latch: Defines if messages on the given topics should be latched.
@type _queue_size: int
@param: _queue_size: Defines the queue size of the new publishers.
"""
for topic, msg_type in topics.items():
self.createPublisher(topic, msg_type, qos, **kwargs)
def createPublisher(self, topic, msg_type, qos=None, **kwargs):
"""
Adds a new publisher to the proxy.
@type topic: string
@param topic: The topic to publish on.
@type msg_type: a message class
@param msg_type: The type of messages of this topic.
"""
if '_latch' in kwargs or '_queue_size' in kwargs:
Logger.warning('DEPRECATED use of arguments in publisher')
if topic not in ProxyPublisher._topics:
qos = qos or QOS_DEFAULT
ProxyPublisher._topics[topic] = ProxyPublisher._node.create_publisher(msg_type, topic, qos)
def is_available(self, topic):
"""
Checks if the publisher on the given topic is available.
@type topic: string
@param topic: The topic of interest.
"""
return topic in ProxyPublisher._topics
def publish(self, topic, msg):
"""
Publishes a message on the specified topic.
@type topic: string
@param topic: The topic to publish on.
@type msg: message class (defined when created publisher)
@param msg: The message to publish.
"""
if topic not in ProxyPublisher._topics:
Logger.warning('ProxyPublisher: topic %s not yet registered!' % topic)
return
try:
ProxyPublisher._topics[topic].publish(msg)
except Exception as e:
Logger.warning('Something went wrong when publishing to %s!\n%s' % (topic, str(e)))
def wait_for_any(self, topic, timeout=5.0):
"""
Blocks until there are any subscribers to the given topic.
@type topic: string
@param topic: The topic to publish on.
@type timeout: float
@param timeout: How many seconds should be the maximum blocked time.
"""
pub = ProxyPublisher._topics.get(topic)
if pub is None:
Logger.error("Publisher %s not yet registered, need to add it first!" % topic)
return False
t = Timer(.5, self._print_wait_warning, [topic])
t.start()
available = self._wait_for_subscribers(pub, timeout)
warning_sent = False
try:
t.cancel()
except Exception:
# already printed the warning
warning_sent = True
# Problem here
if not available:
Logger.error("Waiting for subscribers on %s timed out!" % topic)
return False
else:
if warning_sent:
Logger.info("Finally found subscriber on %s..." % (topic))
return True
def _print_wait_warning(self, topic):
Logger.warning("Waiting for subscribers on %s..." % (topic))
def _wait_for_subscribers(self, pub, timeout=5.0):
starting_time = ProxyPublisher._node.get_clock().now()
rate = ProxyPublisher._node.create_rate(100, ProxyPublisher._node.get_clock())
while (ProxyPublisher._node.get_clock().now() - starting_time).nanoseconds * 10 ** -9 < timeout:
if pub.get_subscription_count() > 0:
return True
rate.sleep()
return False
| 32.860465 | 104 | 0.615004 | import rclpy
import time
from threading import Timer
from flexbe_core.logger import Logger
from flexbe_core.proxy.qos import QOS_DEFAULT
class ProxyPublisher(object):
_node = None
_topics = {}
def _initialize(node):
ProxyPublisher._node = node
Logger.initialize(node)
def __init__(self, topics={}, qos=None, **kwargs):
for topic, msg_type in topics.items():
self.createPublisher(topic, msg_type, qos, **kwargs)
def createPublisher(self, topic, msg_type, qos=None, **kwargs):
if '_latch' in kwargs or '_queue_size' in kwargs:
Logger.warning('DEPRECATED use of arguments in publisher')
if topic not in ProxyPublisher._topics:
qos = qos or QOS_DEFAULT
ProxyPublisher._topics[topic] = ProxyPublisher._node.create_publisher(msg_type, topic, qos)
def is_available(self, topic):
return topic in ProxyPublisher._topics
def publish(self, topic, msg):
if topic not in ProxyPublisher._topics:
Logger.warning('ProxyPublisher: topic %s not yet registered!' % topic)
return
try:
ProxyPublisher._topics[topic].publish(msg)
except Exception as e:
Logger.warning('Something went wrong when publishing to %s!\n%s' % (topic, str(e)))
def wait_for_any(self, topic, timeout=5.0):
pub = ProxyPublisher._topics.get(topic)
if pub is None:
Logger.error("Publisher %s not yet registered, need to add it first!" % topic)
return False
t = Timer(.5, self._print_wait_warning, [topic])
t.start()
available = self._wait_for_subscribers(pub, timeout)
warning_sent = False
try:
t.cancel()
except Exception:
warning_sent = True
if not available:
Logger.error("Waiting for subscribers on %s timed out!" % topic)
return False
else:
if warning_sent:
Logger.info("Finally found subscriber on %s..." % (topic))
return True
def _print_wait_warning(self, topic):
Logger.warning("Waiting for subscribers on %s..." % (topic))
def _wait_for_subscribers(self, pub, timeout=5.0):
starting_time = ProxyPublisher._node.get_clock().now()
rate = ProxyPublisher._node.create_rate(100, ProxyPublisher._node.get_clock())
while (ProxyPublisher._node.get_clock().now() - starting_time).nanoseconds * 10 ** -9 < timeout:
if pub.get_subscription_count() > 0:
return True
rate.sleep()
return False
| true | true |
f7fa9b8365284acd185fed91ce8bdc1ac798ad0d | 2,091 | py | Python | composer/models/huggingface.py | growlix/composer | 27418a3c65dca26d90ac09c6ae67cbd5d0202ccf | [
"Apache-2.0"
] | 945 | 2021-10-13T16:24:20.000Z | 2022-03-31T21:21:54.000Z | composer/models/huggingface.py | growlix/composer | 27418a3c65dca26d90ac09c6ae67cbd5d0202ccf | [
"Apache-2.0"
] | 544 | 2021-10-13T20:23:27.000Z | 2022-03-31T02:47:54.000Z | composer/models/huggingface.py | growlix/composer | 27418a3c65dca26d90ac09c6ae67cbd5d0202ccf | [
"Apache-2.0"
] | 39 | 2021-10-13T14:33:33.000Z | 2022-03-31T11:13:19.000Z | # Copyright 2022 MosaicML Composer authors
# SPDX-License-Identifier: Apache-2.0
"""A wrapper class that converts 🤗 Transformers models to composer models"""
from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional
from torchmetrics import Metric
from torchmetrics.collections import MetricCollection
from composer.models.base import ComposerModel
if TYPE_CHECKING:
import transformers
__all__ = ["HuggingFaceModel"]
class HuggingFaceModel(ComposerModel):
"""
A wrapper class that converts 🤗 Transformers models to composer models.
Args:
model (transformers.PreTrainedModel): A 🤗 Transformers model.
metrics (list[Metric], optional): list of torchmetrics to apply to the output of `validate`. Default: ``None``.
.. warning:: This wrapper is designed to work with 🤗 datasets that define a `labels` column.
Example:
.. testcode::
import transformers
from composer.models import HuggingFaceModel
hf_model = transformers.AutoModelForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2)
model = HuggingFaceModel(hf_model)
"""
def __init__(self, model: transformers.PreTrainedModel, metrics: Optional[List[Metric]] = None) -> None:
super().__init__()
self.model = model
self.train_metrics = None
self.valid_metrics = None
if metrics:
metric_collection = MetricCollection(metrics)
self.train_metrics = metric_collection.clone(prefix='train_')
self.valid_metrics = metric_collection.clone(prefix='val_')
def forward(self, batch):
output = self.model(**batch) # type: ignore (thirdparty)
return output
def loss(self, outputs, batch):
return outputs['loss']
def validate(self, batch):
labels = batch.pop('labels')
output = self.forward(batch)
output = output['logits']
return output, labels
def metrics(self, train: bool = False):
return self.train_metrics if train else self.valid_metrics
| 30.304348 | 119 | 0.693926 |
from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional
from torchmetrics import Metric
from torchmetrics.collections import MetricCollection
from composer.models.base import ComposerModel
if TYPE_CHECKING:
import transformers
__all__ = ["HuggingFaceModel"]
class HuggingFaceModel(ComposerModel):
def __init__(self, model: transformers.PreTrainedModel, metrics: Optional[List[Metric]] = None) -> None:
super().__init__()
self.model = model
self.train_metrics = None
self.valid_metrics = None
if metrics:
metric_collection = MetricCollection(metrics)
self.train_metrics = metric_collection.clone(prefix='train_')
self.valid_metrics = metric_collection.clone(prefix='val_')
def forward(self, batch):
output = self.model(**batch)
return output
def loss(self, outputs, batch):
return outputs['loss']
def validate(self, batch):
labels = batch.pop('labels')
output = self.forward(batch)
output = output['logits']
return output, labels
def metrics(self, train: bool = False):
return self.train_metrics if train else self.valid_metrics
| true | true |
f7fa9babcc7fac5e2b1da9a07e4660107e1120b6 | 197 | py | Python | web/apps/__init__.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | 2 | 2017-06-12T07:58:51.000Z | 2018-03-08T09:43:35.000Z | web/apps/__init__.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | 1 | 2017-06-10T02:05:52.000Z | 2017-07-04T03:57:28.000Z | web/apps/__init__.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | null | null | null |
from flask import Flask, jsonify, abort, make_response, render_template
app = Flask(__name__)
app.config.from_object("config")
app.template_folder = "../templates"
app.static_folder = "../static" | 28.142857 | 71 | 0.766497 |
from flask import Flask, jsonify, abort, make_response, render_template
app = Flask(__name__)
app.config.from_object("config")
app.template_folder = "../templates"
app.static_folder = "../static" | true | true |
f7fa9c78cb43be16736ccdab579c0f2223e3a457 | 2,887 | py | Python | 2019/Day3-CrossedWires/main.py | limited/AdventOfCode | 36f92b9c42d7b81f23f4e14d07657a03a9a2beb3 | [
"Apache-2.0"
] | null | null | null | 2019/Day3-CrossedWires/main.py | limited/AdventOfCode | 36f92b9c42d7b81f23f4e14d07657a03a9a2beb3 | [
"Apache-2.0"
] | null | null | null | 2019/Day3-CrossedWires/main.py | limited/AdventOfCode | 36f92b9c42d7b81f23f4e14d07657a03a9a2beb3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# UGH, needed a hint from reddit about handle to handle negative numbers. The grid doesn't wrap around, its a 4 quadrant coordinate space
# Also picked up using a dict as a sparse representation from reddit.
from collections import defaultdict
def read_input(filename):
with open(filename, 'r') as f:
lines = [l.rstrip().split(',') for l in f]
return lines
def update_grid(grid, cur_y, cur_x, steps):
if (cur_y, cur_x) not in grid:
grid[(cur_y,cur_x)] = steps
def fill_grid(grid, wires):
min_distance = 5000000
grid = [{},{}]
for (idx, wire) in enumerate(wires):
val = 1 << idx
cur_x = 0
cur_y = 0
steps = 0
print("new wire", idx, val)
for inst in wire:
dir = inst[0]
count = int(inst[1:])
print(dir,count)
if dir == 'U':
for i in range(0,count):
steps += 1
cur_y += 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'R':
for i in range(0,count):
steps += 1
cur_x += 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'L':
for i in range(0,count):
steps += 1
cur_x -= 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'D':
for i in range(0,count):
steps += 1
cur_y -= 1
update_grid(grid[idx], cur_y, cur_x, steps)
else:
raise RuntimeError("Unknown Direction: "+dir)
#print(grid[idx])
check_intersection_steps(grid)
#for v in grid.values():
# if v == 3:
# print("FOUND A 3")
#print(f"Min Distance: {min_distance}")
def check_intersection_manhattan(grid, cur_y, cur_x, min_distance):
if grid[(cur_y,cur_x)] == 3:
print(f"Intersection at {cur_y},{cur_x}, {abs(cur_y)+abs(cur_x)}")
min_distance = min(min_distance, abs(cur_x)+abs(cur_y))
return min_distance
def check_intersection_steps(grid):
intersections = set(grid[0].keys()) & set(grid[1].keys())
# print(intersections)
# for i in intersections:
# print(f"{i} {grid[0][i]} {grid[1][i]} ")
distances = [grid[0][i]+grid[1][i] for i in intersections]
print(min(distances))
def main():
grid = defaultdict(int)
wires = read_input('game_input')
print(wires)
fill_grid(grid, wires)
# 3884 too high
if __name__ == '__main__':
main()
| 29.762887 | 137 | 0.489435 |
# Also picked up using a dict as a sparse representation from reddit.
from collections import defaultdict
def read_input(filename):
with open(filename, 'r') as f:
lines = [l.rstrip().split(',') for l in f]
return lines
def update_grid(grid, cur_y, cur_x, steps):
if (cur_y, cur_x) not in grid:
grid[(cur_y,cur_x)] = steps
def fill_grid(grid, wires):
min_distance = 5000000
grid = [{},{}]
for (idx, wire) in enumerate(wires):
val = 1 << idx
cur_x = 0
cur_y = 0
steps = 0
print("new wire", idx, val)
for inst in wire:
dir = inst[0]
count = int(inst[1:])
print(dir,count)
if dir == 'U':
for i in range(0,count):
steps += 1
cur_y += 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'R':
for i in range(0,count):
steps += 1
cur_x += 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'L':
for i in range(0,count):
steps += 1
cur_x -= 1
update_grid(grid[idx], cur_y, cur_x, steps)
elif dir == 'D':
for i in range(0,count):
steps += 1
cur_y -= 1
update_grid(grid[idx], cur_y, cur_x, steps)
else:
raise RuntimeError("Unknown Direction: "+dir)
#print(grid[idx])
check_intersection_steps(grid)
#for v in grid.values():
# if v == 3:
# print("FOUND A 3")
#print(f"Min Distance: {min_distance}")
def check_intersection_manhattan(grid, cur_y, cur_x, min_distance):
if grid[(cur_y,cur_x)] == 3:
print(f"Intersection at {cur_y},{cur_x}, {abs(cur_y)+abs(cur_x)}")
min_distance = min(min_distance, abs(cur_x)+abs(cur_y))
return min_distance
def check_intersection_steps(grid):
intersections = set(grid[0].keys()) & set(grid[1].keys())
# print(intersections)
# for i in intersections:
# print(f"{i} {grid[0][i]} {grid[1][i]} ")
distances = [grid[0][i]+grid[1][i] for i in intersections]
print(min(distances))
def main():
grid = defaultdict(int)
wires = read_input('game_input')
print(wires)
fill_grid(grid, wires)
# 3884 too high
if __name__ == '__main__':
main()
| true | true |
f7fa9d885e89165fa59218cd66ef4b684ac40a2f | 7,135 | py | Python | empire/server/common/credentials.py | awsmhacks/Empire | 6a6f0881798ce92a54ce9896d2ffe4855855872d | [
"BSD-3-Clause"
] | null | null | null | empire/server/common/credentials.py | awsmhacks/Empire | 6a6f0881798ce92a54ce9896d2ffe4855855872d | [
"BSD-3-Clause"
] | null | null | null | empire/server/common/credentials.py | awsmhacks/Empire | 6a6f0881798ce92a54ce9896d2ffe4855855872d | [
"BSD-3-Clause"
] | null | null | null | """
Credential handling functionality for Empire.
"""
from __future__ import absolute_import, print_function
import os
from builtins import input, object, str
from sqlalchemy import and_, or_
from empire.server.database import models
from empire.server.database.base import Session
from . import helpers
class Credentials(object):
"""
Class that handles interaction with the backend credential model
(adding creds, displaying, etc.).
"""
def __init__(self, MainMenu, args=None):
# pull out the controller objects
self.mainMenu = MainMenu
self.installPath = self.mainMenu.installPath
self.args = args
# credential database schema:
# (ID, credtype, domain, username, password, host, OS, notes, sid)
# credtype = hash or plaintext
# sid is stored for krbtgt
def is_credential_valid(self, credentialID):
"""
Check if this credential ID is valid.
"""
results = (
Session()
.query(models.Credential)
.filter(models.Credential.id == credentialID)
.all()
)
return len(results) > 0
def get_credentials(self, filter_term=None, credtype=None, note=None, os=None):
"""
Return credentials from the database.
'credtype' can be specified to return creds of a specific type.
Values are: hash, plaintext, and token.
"""
# if we're returning a single credential by ID
if self.is_credential_valid(filter_term):
results = (
Session()
.query(models.Credential)
.filter(models.Credential.id == filter_term)
.first()
)
# if we're filtering by host/username
elif filter_term and filter_term != "":
filter_term = filter_term.replace("*", "%")
search = "%{}%".format(filter_term)
results = (
Session()
.query(models.Credential)
.filter(
or_(
models.Credential.domain.like(search),
models.Credential.username.like(search),
models.Credential.host.like(search),
models.Credential.password.like(search),
)
)
.all()
)
# if we're filtering by credential type (hash, plaintext, token)
elif credtype and credtype != "":
results = (
Session()
.query(models.Credential)
.filter(models.Credential.credtype.ilike(f"%credtype%"))
.all()
)
# if we're filtering by content in the note field
elif note and note != "":
search = "%{}%".format(note)
results = (
Session()
.query(models.Credential)
.filter(models.Credential.note.ilike(f"%search%"))
.all()
)
# if we're filtering by content in the OS field
elif os and os != "":
search = "%{}%".format(os)
results = (
Session()
.query(models.Credential)
.filter(models.Credential.os.ilike("%search%"))
.all()
)
# otherwise return all credentials
else:
results = Session().query(models.Credential).all()
return results
def get_krbtgt(self):
"""
Return all krbtgt credentials from the database.
"""
return self.get_credentials(credtype="hash", filterTerm="krbtgt")
def add_credential(
self, credtype, domain, username, password, host, os="", sid="", notes=""
):
"""
Add a credential with the specified information to the database.
"""
results = (
Session()
.query(models.Credential)
.filter(
and_(
models.Credential.credtype.like(credtype),
models.Credential.domain.like(domain),
models.Credential.username.like(username),
models.Credential.password.like(password),
)
)
.all()
)
if len(results) == 0:
credential = models.Credential(
credtype=credtype,
domain=domain,
username=username,
password=password,
host=host,
os=os,
sid=sid,
notes=notes,
)
Session().add(credential)
Session().commit()
return credential
def add_credential_note(self, credential_id, note):
"""
Update a note to a credential in the database.
"""
results = (
Session()
.query(models.Agent)
.filter(models.Credential.id == credential_id)
.first()
)
results.notes = note
Session().commit()
def remove_credentials(self, credIDs):
"""
Removes a list of IDs from the database
"""
for credID in credIDs:
cred_entry = (
Session()
.query(models.Credential)
.filter(models.Credential.id == credID)
.first()
)
Session().delete(cred_entry)
Session().commit()
def remove_all_credentials(self):
"""
Remove all credentials from the database.
"""
creds = Session().query(models.Credential).all()
for cred in creds:
Session().delete(cred)
Session().commit()
def export_credentials(self, export_path=""):
"""
Export the credentials in the database to an output file.
"""
if export_path == "":
print(helpers.color("[!] Export path cannot be ''"))
export_path += ".csv"
if os.path.exists(export_path):
try:
choice = input(
helpers.color(
"[>] File %s already exists, overwrite? [y/N] " % (export_path),
"red",
)
)
if choice.lower() != "" and choice.lower()[0] == "y":
pass
else:
return
except KeyboardInterrupt:
return
creds = self.get_credentials()
if len(creds) == 0:
print(helpers.color("[!] No credentials in the database."))
return
with open(export_path, "w") as output_file:
output_file.write(
"CredID,CredType,Domain,Username,Password,Host,OS,SID,Notes\n"
)
for cred in creds:
output_file.write('"%s"\n' % ('","'.join([str(x) for x in cred])))
print(
"\n" + helpers.color("[*] Credentials exported to %s\n" % (export_path))
)
| 30.105485 | 88 | 0.502453 | from __future__ import absolute_import, print_function
import os
from builtins import input, object, str
from sqlalchemy import and_, or_
from empire.server.database import models
from empire.server.database.base import Session
from . import helpers
class Credentials(object):
def __init__(self, MainMenu, args=None):
self.mainMenu = MainMenu
self.installPath = self.mainMenu.installPath
self.args = args
def is_credential_valid(self, credentialID):
results = (
Session()
.query(models.Credential)
.filter(models.Credential.id == credentialID)
.all()
)
return len(results) > 0
def get_credentials(self, filter_term=None, credtype=None, note=None, os=None):
if self.is_credential_valid(filter_term):
results = (
Session()
.query(models.Credential)
.filter(models.Credential.id == filter_term)
.first()
)
# if we're filtering by host/username
elif filter_term and filter_term != "":
filter_term = filter_term.replace("*", "%")
search = "%{}%".format(filter_term)
results = (
Session()
.query(models.Credential)
.filter(
or_(
models.Credential.domain.like(search),
models.Credential.username.like(search),
models.Credential.host.like(search),
models.Credential.password.like(search),
)
)
.all()
)
elif credtype and credtype != "":
results = (
Session()
.query(models.Credential)
.filter(models.Credential.credtype.ilike(f"%credtype%"))
.all()
)
# if we're filtering by content in the note field
elif note and note != "":
search = "%{}%".format(note)
results = (
Session()
.query(models.Credential)
.filter(models.Credential.note.ilike(f"%search%"))
.all()
)
elif os and os != "":
search = "%{}%".format(os)
results = (
Session()
.query(models.Credential)
.filter(models.Credential.os.ilike("%search%"))
.all()
)
# otherwise return all credentials
else:
results = Session().query(models.Credential).all()
return results
def get_krbtgt(self):
return self.get_credentials(credtype="hash", filterTerm="krbtgt")
def add_credential(
self, credtype, domain, username, password, host, os="", sid="", notes=""
):
results = (
Session()
.query(models.Credential)
.filter(
and_(
models.Credential.credtype.like(credtype),
models.Credential.domain.like(domain),
models.Credential.username.like(username),
models.Credential.password.like(password),
)
)
.all()
)
if len(results) == 0:
credential = models.Credential(
credtype=credtype,
domain=domain,
username=username,
password=password,
host=host,
os=os,
sid=sid,
notes=notes,
)
Session().add(credential)
Session().commit()
return credential
def add_credential_note(self, credential_id, note):
results = (
Session()
.query(models.Agent)
.filter(models.Credential.id == credential_id)
.first()
)
results.notes = note
Session().commit()
def remove_credentials(self, credIDs):
for credID in credIDs:
cred_entry = (
Session()
.query(models.Credential)
.filter(models.Credential.id == credID)
.first()
)
Session().delete(cred_entry)
Session().commit()
def remove_all_credentials(self):
creds = Session().query(models.Credential).all()
for cred in creds:
Session().delete(cred)
Session().commit()
def export_credentials(self, export_path=""):
if export_path == "":
print(helpers.color("[!] Export path cannot be ''"))
export_path += ".csv"
if os.path.exists(export_path):
try:
choice = input(
helpers.color(
"[>] File %s already exists, overwrite? [y/N] " % (export_path),
"red",
)
)
if choice.lower() != "" and choice.lower()[0] == "y":
pass
else:
return
except KeyboardInterrupt:
return
creds = self.get_credentials()
if len(creds) == 0:
print(helpers.color("[!] No credentials in the database."))
return
with open(export_path, "w") as output_file:
output_file.write(
"CredID,CredType,Domain,Username,Password,Host,OS,SID,Notes\n"
)
for cred in creds:
output_file.write('"%s"\n' % ('","'.join([str(x) for x in cred])))
print(
"\n" + helpers.color("[*] Credentials exported to %s\n" % (export_path))
)
| true | true |
f7fa9deffa07fd6b088b4a38645b7fc6de88dcd6 | 5,044 | py | Python | homeassistant/components/everlights/light.py | itewk/home-assistant | 769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4 | [
"Apache-2.0"
] | 23 | 2017-11-15T21:03:53.000Z | 2021-03-29T21:33:48.000Z | homeassistant/components/everlights/light.py | itewk/home-assistant | 769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4 | [
"Apache-2.0"
] | 9 | 2022-01-27T06:32:10.000Z | 2022-03-31T07:07:51.000Z | homeassistant/components/everlights/light.py | itewk/home-assistant | 769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4 | [
"Apache-2.0"
] | 10 | 2018-01-01T00:12:51.000Z | 2021-12-21T23:08:05.000Z | """Support for EverLights lights."""
from datetime import timedelta
import logging
from typing import Tuple
import pyeverlights
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_EFFECT,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_EFFECT,
Light,
)
from homeassistant.const import CONF_HOSTS
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
SUPPORT_EVERLIGHTS = SUPPORT_EFFECT | SUPPORT_BRIGHTNESS | SUPPORT_COLOR
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOSTS): vol.All(cv.ensure_list, [cv.string])}
)
NAME_FORMAT = "EverLights {} Zone {}"
def color_rgb_to_int(red: int, green: int, blue: int) -> int:
"""Return a RGB color as an integer."""
return red * 256 * 256 + green * 256 + blue
def color_int_to_rgb(value: int) -> Tuple[int, int, int]:
"""Return an RGB tuple from an integer."""
return (value >> 16, (value >> 8) & 0xFF, value & 0xFF)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the EverLights lights from configuration.yaml."""
lights = []
for ipaddr in config[CONF_HOSTS]:
api = pyeverlights.EverLights(ipaddr, async_get_clientsession(hass))
try:
status = await api.get_status()
effects = await api.get_all_patterns()
except pyeverlights.ConnectionError:
raise PlatformNotReady
else:
lights.append(EverLightsLight(api, pyeverlights.ZONE_1, status, effects))
lights.append(EverLightsLight(api, pyeverlights.ZONE_2, status, effects))
async_add_entities(lights)
class EverLightsLight(Light):
"""Representation of a Flux light."""
def __init__(self, api, channel, status, effects):
"""Initialize the light."""
self._api = api
self._channel = channel
self._status = status
self._effects = effects
self._mac = status["mac"]
self._error_reported = False
self._hs_color = [255, 255]
self._brightness = 255
self._effect = None
self._available = True
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return f"{self._mac}-{self._channel}"
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def name(self):
"""Return the name of the device."""
return NAME_FORMAT.format(self._mac, self._channel)
@property
def is_on(self):
"""Return true if device is on."""
return self._status[f"ch{self._channel}Active"] == 1
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def hs_color(self):
"""Return the color property."""
return self._hs_color
@property
def effect(self):
"""Return the effect property."""
return self._effect
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_EVERLIGHTS
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effects
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
hs_color = kwargs.get(ATTR_HS_COLOR, self._hs_color)
brightness = kwargs.get(ATTR_BRIGHTNESS, self._brightness)
effect = kwargs.get(ATTR_EFFECT)
if effect is not None:
colors = await self._api.set_pattern_by_id(self._channel, effect)
rgb = color_int_to_rgb(colors[0])
hsv = color_util.color_RGB_to_hsv(*rgb)
hs_color = hsv[:2]
brightness = hsv[2] / 100 * 255
else:
rgb = color_util.color_hsv_to_RGB(*hs_color, brightness / 255 * 100)
colors = [color_rgb_to_int(*rgb)]
await self._api.set_pattern(self._channel, colors)
self._hs_color = hs_color
self._brightness = brightness
self._effect = effect
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._api.clear_pattern(self._channel)
async def async_update(self):
"""Synchronize state with control box."""
try:
self._status = await self._api.get_status()
except pyeverlights.ConnectionError:
if self._available:
_LOGGER.warning("EverLights control box connection lost.")
self._available = False
else:
if not self._available:
_LOGGER.warning("EverLights control box connection restored.")
self._available = True
| 29.497076 | 86 | 0.648295 | from datetime import timedelta
import logging
from typing import Tuple
import pyeverlights
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_EFFECT,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_EFFECT,
Light,
)
from homeassistant.const import CONF_HOSTS
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
SUPPORT_EVERLIGHTS = SUPPORT_EFFECT | SUPPORT_BRIGHTNESS | SUPPORT_COLOR
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOSTS): vol.All(cv.ensure_list, [cv.string])}
)
NAME_FORMAT = "EverLights {} Zone {}"
def color_rgb_to_int(red: int, green: int, blue: int) -> int:
return red * 256 * 256 + green * 256 + blue
def color_int_to_rgb(value: int) -> Tuple[int, int, int]:
return (value >> 16, (value >> 8) & 0xFF, value & 0xFF)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
lights = []
for ipaddr in config[CONF_HOSTS]:
api = pyeverlights.EverLights(ipaddr, async_get_clientsession(hass))
try:
status = await api.get_status()
effects = await api.get_all_patterns()
except pyeverlights.ConnectionError:
raise PlatformNotReady
else:
lights.append(EverLightsLight(api, pyeverlights.ZONE_1, status, effects))
lights.append(EverLightsLight(api, pyeverlights.ZONE_2, status, effects))
async_add_entities(lights)
class EverLightsLight(Light):
def __init__(self, api, channel, status, effects):
self._api = api
self._channel = channel
self._status = status
self._effects = effects
self._mac = status["mac"]
self._error_reported = False
self._hs_color = [255, 255]
self._brightness = 255
self._effect = None
self._available = True
@property
def unique_id(self) -> str:
return f"{self._mac}-{self._channel}"
@property
def available(self) -> bool:
return self._available
@property
def name(self):
return NAME_FORMAT.format(self._mac, self._channel)
@property
def is_on(self):
return self._status[f"ch{self._channel}Active"] == 1
@property
def brightness(self):
return self._brightness
@property
def hs_color(self):
return self._hs_color
@property
def effect(self):
return self._effect
@property
def supported_features(self):
return SUPPORT_EVERLIGHTS
@property
def effect_list(self):
return self._effects
async def async_turn_on(self, **kwargs):
hs_color = kwargs.get(ATTR_HS_COLOR, self._hs_color)
brightness = kwargs.get(ATTR_BRIGHTNESS, self._brightness)
effect = kwargs.get(ATTR_EFFECT)
if effect is not None:
colors = await self._api.set_pattern_by_id(self._channel, effect)
rgb = color_int_to_rgb(colors[0])
hsv = color_util.color_RGB_to_hsv(*rgb)
hs_color = hsv[:2]
brightness = hsv[2] / 100 * 255
else:
rgb = color_util.color_hsv_to_RGB(*hs_color, brightness / 255 * 100)
colors = [color_rgb_to_int(*rgb)]
await self._api.set_pattern(self._channel, colors)
self._hs_color = hs_color
self._brightness = brightness
self._effect = effect
async def async_turn_off(self, **kwargs):
await self._api.clear_pattern(self._channel)
async def async_update(self):
try:
self._status = await self._api.get_status()
except pyeverlights.ConnectionError:
if self._available:
_LOGGER.warning("EverLights control box connection lost.")
self._available = False
else:
if not self._available:
_LOGGER.warning("EverLights control box connection restored.")
self._available = True
| true | true |
f7fa9e7684e40eb97e693344e27ca77458e7dd13 | 77,071 | py | Python | ingress/ambassador/ambassador/ambassador/config.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 63 | 2018-02-04T03:31:22.000Z | 2022-03-07T08:27:39.000Z | ingress/ambassador/ambassador/ambassador/config.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 3 | 2021-03-26T00:22:50.000Z | 2021-03-26T00:26:07.000Z | ingress/ambassador/ambassador/ambassador/config.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 40 | 2018-01-22T16:31:16.000Z | 2022-03-08T04:40:42.000Z | # Copyright 2018 Datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import sys
import collections
import datetime
import json
import logging
import os
import re
from urllib.parse import urlparse
import jsonschema
import semantic_version
import yaml
from pkg_resources import Requirement, resource_filename
from jinja2 import Environment, FileSystemLoader
from .utils import RichStatus, SourcedDict, read_cert_secret, save_cert, TLSPaths, kube_v1, check_cert_file
from .mapping import Mapping
from scout import Scout
from .VERSION import Version
#############################################################################
## config.py -- the main configuration parser for Ambassador
##
## Ambassador configures itself by creating a new Config object, which calls
## Config.__init__().
##
## __init__() sets up all the defaults for everything, then walks over all the
## YAML it can find and calls self.load_yaml() to load each YAML file. After
## everything is loaded, it calls self.process_all_objects() to build the
## config objects.
##
## load_yaml() does the heavy lifting around YAML parsing and such, including
## managing K8s annotations if so requested. Every object in every YAML file is
## parsed and saved before any object is processed.
##
## process_all_objects() walks all the saved objects and creates an internal
## representation of the Ambassador config in the data structures initialized
## by __init__(). Each object is processed with self.process_object(). This
## internal representation is called the intermediate config.
##
## process_object() handles a single parsed object from YAML. It uses
## self.validate_object() to make sure of a schema match; assuming that's
## good, most of the heavy lifting is done by a handler method. The handler
## method for a given type is named handle_kind(), with kind in lowercase,
## so e.g. the Mapping object is processed using the handle_mapping() method.
##
## After all of that, the actual Envoy config is generated from the intermediate
## config using generate_envoy_config().
##
## The diag service also uses generate_intermediate_for() to extract the
## intermediate config for a given mapping or service.
def get_semver(what, version_string):
semver = None
try:
semver = semantic_version.Version(version_string)
except ValueError:
pass
return semver
class Config (object):
# Weird stuff. The build version looks like
#
# 0.12.0 for a prod build, or
# 0.12.1-b2.da5d895.DIRTY for a dev build (in this case made from a dirty true)
#
# Now:
# - Scout needs a build number (semver "+something") to flag a non-prod release;
# but
# - DockerHub cannot use a build number at all; but
# - 0.12.1-b2 comes _before_ 0.12.1+b2 in SemVer land.
#
# FFS.
#
# We cope with this by transforming e.g.
#
# 0.12.1-b2.da5d895.DIRTY into 0.12.1-b2+da5d895.DIRTY
#
# for Scout.
scout_version = Version
if '-' in scout_version:
# TODO(plombardi): This version code needs to be rewritten. We should only report RC and GA versions.
#
# As of the time when we moved to streamlined branch, merge and release model the way versions in development
# land are rendered has changed. A development version no longer has any <MAJOR>.<MINOR>.<PATCH> information and
# is instead rendered as <BRANCH_NAME>-<GIT_SHORT_HASH>[-dirty] where [-dirty] is only appended for modified
# source trees.
#
# Long term we are planning to remove the version report for development branches anyways so all of this
# formatting for versions
scout_version = "0.0.0-" + Version.split("-")[1] # middle part is commit hash
# Dev build!
# v, p = scout_version.split('-')
# p, b = p.split('.', 1) if ('.' in p) else (0, p)
#
# scout_version = "%s-%s+%s" % (v, p, b)
# Use scout_version here, not __version__, because the version
# coming back from Scout will use build numbers for dev builds, but
# __version__ won't, and we need to be consistent for comparison.
current_semver = get_semver("current", scout_version)
# When using multiple Ambassadors in one cluster, use AMBASSADOR_ID to distinguish them.
ambassador_id = os.environ.get('AMBASSADOR_ID', 'default')
runtime = "kubernetes" if os.environ.get('KUBERNETES_SERVICE_HOST', None) else "docker"
namespace = os.environ.get('AMBASSADOR_NAMESPACE', 'default')
# Default to using the Nil UUID unless the environment variable is set explicitly
scout_install_id = os.environ.get('AMBASSADOR_SCOUT_ID', "00000000-0000-0000-0000-000000000000")
try:
scout = Scout(app="ambassador", version=scout_version, install_id=scout_install_id)
scout_error = None
except OSError as e:
scout_error = e
scout_latest_version = None
scout_latest_semver = None
scout_notices = []
scout_last_response = None
scout_last_update = datetime.datetime.now() - datetime.timedelta(hours=24)
scout_update_frequency = datetime.timedelta(hours=4)
@classmethod
def scout_report(klass, force_result=None, **kwargs):
_notices = []
env_result = os.environ.get("AMBASSADOR_SCOUT_RESULT", None)
if env_result:
force_result = json.loads(env_result)
result = force_result
result_timestamp = None
result_was_cached = False
if not result:
if Config.scout:
if 'runtime' not in kwargs:
kwargs['runtime'] = Config.runtime
# How long since the last Scout update? If it's been more than an hour,
# check Scout again.
now = datetime.datetime.now()
if (now - Config.scout_last_update) > Config.scout_update_frequency:
result = Config.scout.report(**kwargs)
Config.scout_last_update = now
Config.scout_last_result = dict(**result)
else:
# _notices.append({ "level": "debug", "message": "Returning cached result" })
result = dict(**Config.scout_last_result)
result_was_cached = True
result_timestamp = Config.scout_last_update
else:
result = { "scout": "unavailable" }
result_timestamp = datetime.datetime.now()
else:
_notices.append({ "level": "debug", "message": "Returning forced result" })
result_timestamp = datetime.datetime.now()
if not Config.current_semver:
_notices.append({
"level": "warning",
"message": "Ambassador has bad version '%s'??!" % Config.scout_version
})
result['cached'] = result_was_cached
result['timestamp'] = result_timestamp.timestamp()
# Do version & notices stuff.
if 'latest_version' in result:
latest_version = result['latest_version']
latest_semver = get_semver("latest", latest_version)
if latest_semver:
Config.scout_latest_version = latest_version
Config.scout_latest_semver = latest_semver
else:
_notices.append({
"level": "warning",
"message": "Scout returned bad version '%s'??!" % latest_version
})
if (Config.scout_latest_semver and
((not Config.current_semver) or
(Config.scout_latest_semver > Config.current_semver))):
_notices.append({
"level": "info",
"message": "Upgrade available! to Ambassador version %s" % Config.scout_latest_semver
})
if 'notices' in result:
_notices.extend(result['notices'])
Config.scout_notices = _notices
return result
def __init__(self, config_dir_path, k8s=False, schema_dir_path=None, template_dir_path=None):
self.config_dir_path = config_dir_path
if not template_dir_path:
template_dir_path = resource_filename(Requirement.parse("ambassador"),"templates")
if not schema_dir_path:
schema_dir_path = resource_filename(Requirement.parse("ambassador"),"schemas")
self.schema_dir_path = schema_dir_path
self.template_dir_path = template_dir_path
self.namespace = os.environ.get('AMBASSADOR_NAMESPACE', 'default')
self.logger = logging.getLogger("ambassador.config")
self.logger.debug("Scout version %s" % Config.scout_version)
self.logger.debug("Runtime %s" % Config.runtime)
self.logger.debug("CONFIG DIR %s" % os.path.abspath(self.config_dir_path))
self.logger.debug("TEMPLATE DIR %s" % os.path.abspath(self.template_dir_path))
self.logger.debug("SCHEMA DIR %s" % os.path.abspath(self.schema_dir_path))
if Config.scout_error:
self.logger.warning("Couldn't do version check: %s" % str(Config.scout_error))
self.schemas = {}
self.config = {}
self.tls_contexts = {}
self.envoy_config = {}
self.envoy_clusters = {}
self.envoy_routes = {}
self.sources = {
"--internal--": {
"_source": "--internal--",
"kind": "Internal",
"version": "v0",
"name": "Ambassador Internals",
"filename": "--internal--",
"index": 0,
"description": "The '--internal--' source marks objects created by Ambassador's internal logic."
},
"--diagnostics--": {
"_source": "--diagnostics--",
"kind": "diagnostics",
"version": "v0",
"name": "Ambassador Diagnostics",
"filename": "--diagnostics--",
"index": 0,
"description": "The '--diagnostics--' source marks objects created by Ambassador to assist with diagnostic output."
}
}
self.source_map = {
'--internal--': { '--internal--': True }
}
self.source_overrides = {}
self.default_liveness_probe = {
"enabled": True,
"prefix": "/ambassador/v0/check_alive",
"rewrite": "/ambassador/v0/check_alive",
# "service" gets added later
}
self.default_readiness_probe = {
"enabled": True,
"prefix": "/ambassador/v0/check_ready",
"rewrite": "/ambassador/v0/check_ready",
# "service" gets added later
}
self.default_diagnostics = {
"enabled": True,
"prefix": "/ambassador/v0/",
"rewrite": "/ambassador/v0/",
# "service" gets added later
}
# 'server' and 'client' are special contexts. Others
# use cert_chain_file defaulting to context.crt,
# private_key_file (context.key), and cacert_chain_file
# (context.pem).
self.default_tls_config = {
"server": {},
"client": {},
}
if os.path.isfile(TLSPaths.mount_tls_crt.value):
self.default_tls_config["server"]["cert_chain_file"] = TLSPaths.mount_tls_crt.value
if os.path.isfile(TLSPaths.mount_tls_key.value):
self.default_tls_config["server"]["private_key_file"] = TLSPaths.mount_tls_key.value
if os.path.isfile(TLSPaths.client_mount_crt.value):
self.default_tls_config["client"]["cacert_chain_file"] = TLSPaths.client_mount_crt.value
self.tls_config = None
self.errors = {}
self.fatal_errors = 0
self.object_errors = 0
self.objects_to_process = []
if not os.path.isdir(self.config_dir_path):
raise Exception("ERROR ERROR ERROR configuration directory %s does not exist; exiting" % self.config_dir_path)
for dirpath, dirnames, filenames in os.walk(self.config_dir_path, topdown=True):
# Modify dirnames in-place (dirs[:]) to remove any weird directories
# whose names start with '.' -- why? because my GKE cluster mounts my
# ConfigMap with a self-referential directory named
# /etc/ambassador-config/..9989_25_09_15_43_06.922818753, and if we don't
# ignore that, we end up trying to read the same config files twice, which
# triggers the collision checks. Sigh.
dirnames[:] = sorted([ d for d in dirnames if not d.startswith('.') ])
# self.logger.debug("WALK %s: dirs %s, files %s" % (dirpath, dirnames, filenames))
for filename in sorted([ x for x in filenames if x.endswith(".yaml") ]):
filepath = os.path.join(dirpath, filename)
self.load_yaml(filepath, filename, open(filepath, "r").read(), ocount=1, k8s=k8s)
self.process_all_objects()
if self.fatal_errors:
# Kaboom.
raise Exception("ERROR ERROR ERROR Unparseable configuration; exiting")
if self.errors:
self.logger.error("ERROR ERROR ERROR Starting with configuration errors")
self.generate_intermediate_config()
def load_yaml(self, filepath, filename, serialization, resource_identifier=None, ocount=1, k8s=False):
try:
# XXX This is a bit of a hack -- yaml.safe_load_all returns a
# generator, and if we don't use list() here, any exception
# dealing with the actual object gets deferred
for obj in yaml.safe_load_all(serialization):
if k8s:
ocount = self.prep_k8s(filepath, filename, ocount, obj)
else:
# k8s objects will have an identifier, for other objects use filepath
object_unique_id = resource_identifier or filepath
self.objects_to_process.append((object_unique_id, filename, ocount, obj))
ocount += 1
except Exception as e:
# No sense letting one attribute with bad YAML take down the whole
# gateway, so post the error but keep any objects we were able to
# parse before hitting the error.
self.resource_identifier = resource_identifier or filepath
self.filename = filename
self.ocount = ocount
self.post_error(RichStatus.fromError("%s: could not parse YAML" % filepath))
return ocount
def prep_k8s(self, filepath, filename, ocount, obj):
kind = obj.get('kind', None)
if kind != "Service":
self.logger.debug("%s/%s: ignoring K8s %s object" %
(filepath, ocount, kind))
return ocount
metadata = obj.get('metadata', None)
if not metadata:
self.logger.debug("%s/%s: ignoring unannotated K8s %s" %
(filepath, ocount, kind))
return ocount
# Use metadata to build an unique resource identifier
resource_name = metadata.get('name')
# This should never happen as the name field is required in metadata for Service
if not resource_name:
self.logger.debug("%s/%s: ignoring unnamed K8s %s" %
(filepath, ocount, kind))
return ocount
resource_namespace = metadata.get('namespace', 'default')
# This resource identifier is useful for log output since filenames can be duplicated (multiple subdirectories)
resource_identifier = '{name}.{namespace}'.format(namespace=resource_namespace, name=resource_name)
annotations = metadata.get('annotations', None)
if annotations:
annotations = annotations.get('getambassador.io/config', None)
# self.logger.debug("annotations %s" % annotations)
if not annotations:
self.logger.debug("%s/%s: ignoring K8s %s without Ambassador annotation" %
(filepath, ocount, kind))
return ocount
return self.load_yaml(filepath, filename + ":annotation", annotations, ocount=ocount, resource_identifier=resource_identifier)
def process_all_objects(self):
for resource_identifier, filename, ocount, obj in sorted(self.objects_to_process):
# resource_identifier is either a filepath or <name>.<namespace>
self.resource_identifier = resource_identifier
# This fallback prevents issues for internal/diagnostics objects
self.filename = filename
self.ocount = ocount
if self.filename in self.source_overrides:
# Let Pragma objects override source information for this filename.
override = self.source_overrides[self.filename]
self.source = override.get('source', self.filename)
self.ocount += override.get('ocount_delta', 0)
else:
# No pragma involved here; just default to the filename.
self.source = self.filename
# Is the object empty?
if obj == None :
self.logger.debug("Annotation has empty config")
return
# Is an ambassador_id present in this object?
allowed_ids = obj.get('ambassador_id', 'default')
if allowed_ids:
# Make sure it's a list. Yes, this is Draconian,
# but the jsonschema will allow only a string or a list,
# and guess what? Strings are iterables.
if type(allowed_ids) != list:
allowed_ids = [ allowed_ids ]
if Config.ambassador_id not in allowed_ids:
self.logger.debug("PROCESS: skip %s.%d; id %s not in %s" %
(self.resource_identifier, self.ocount, Config.ambassador_id, allowed_ids))
continue
self.logger.debug("PROCESS: %s.%d => %s" % (self.resource_identifier, self.ocount, self.source))
rc = self.process_object(obj)
if not rc:
# Object error. Not good but we'll allow the system to start.
self.post_error(rc)
def clean_and_copy(self, d):
out = []
for key in sorted(d.keys()):
original = d[key]
copy = dict(**original)
if '_source' in original:
del(original['_source'])
if '_referenced_by' in original:
del(original['_referenced_by'])
out.append(copy)
return out
def current_source_key(self):
return("%s.%d" % (self.filename, self.ocount))
def post_error(self, rc, key=None):
if not key:
key = self.current_source_key()
# Yuck.
filename = re.sub(r'\.\d+$', '', key)
# Fetch the relevant source info. If it doesn't exist, stuff
# in a fake record.
source_info = self.sources.setdefault(key, {
'kind': 'error',
'version': 'error',
'name': 'error',
'filename': filename,
'index': self.ocount,
'yaml': 'error'
})
source_info.setdefault('errors', [])
source_info['errors'].append(rc.toDict())
source_map = self.source_map.setdefault(filename, {})
source_map[key] = True
errors = self.errors.setdefault(key, [])
errors.append(rc.toDict())
self.logger.error("%s (%s): %s" % (key, filename, rc))
def process_object(self, obj):
# Cache the source key first thing...
source_key = self.current_source_key()
# This should be impossible.
if not obj:
return RichStatus.fromError("undefined object???")
try:
obj_version = obj['apiVersion']
obj_kind = obj['kind']
except KeyError:
return RichStatus.fromError("need apiVersion, kind")
# Is this a pragma object?
if obj_kind == 'Pragma':
# Yes. Handle this inline and be done.
return self.handle_pragma(source_key, obj)
# Not a pragma. It needs a name...
if 'name' not in obj:
return RichStatus.fromError("need name")
obj_name = obj['name']
# ...and off we go. Save the source info...
self.sources[source_key] = {
'kind': obj_kind,
'version': obj_version,
'name': obj_name,
'filename': self.filename,
'index': self.ocount,
'yaml': yaml.safe_dump(obj, default_flow_style=False)
}
# ...and figure out if this thing is OK.
rc = self.validate_object(obj)
if not rc:
# Well that's no good.
return rc
# Make sure it has a source: use what's in the object if present,
# otherwise use self.source.
self.sources[source_key]['_source'] = obj.get('source', self.source)
# self.logger.debug("source for %s is %s" % (source_key, self.sources[source_key]['_source']))
source_map = self.source_map.setdefault(self.filename, {})
source_map[source_key] = True
# OK, so far so good. Grab the handler for this object type.
handler_name = "handle_%s" % obj_kind.lower()
handler = getattr(self, handler_name, None)
if not handler:
handler = self.save_object
self.logger.warning("%s[%d]: no handler for %s, just saving" %
(self.resource_identifier, self.ocount, obj_kind))
# else:
# self.logger.debug("%s[%d]: handling %s..." %
# (self.filename, self.ocount, obj_kind))
try:
handler(source_key, obj, obj_name, obj_kind, obj_version)
except Exception as e:
# Bzzzt.
return RichStatus.fromError("could not process %s object: %s" % (obj_kind, e))
# OK, all's well.
return RichStatus.OK(msg="%s object processed successfully" % obj_kind)
def validate_object(self, obj):
# Each object must be a dict, and must include "apiVersion"
# and "type" at toplevel.
if not isinstance(obj, collections.Mapping):
return RichStatus.fromError("not a dictionary")
if not (("apiVersion" in obj) and ("kind" in obj) and ("name" in obj)):
return RichStatus.fromError("must have apiVersion, kind, and name")
obj_version = obj['apiVersion']
obj_kind = obj['kind']
obj_name = obj['name']
if obj_version.startswith("ambassador/"):
obj_version = obj_version.split('/')[1]
else:
return RichStatus.fromError("apiVersion %s unsupported" % obj_version)
schema_key = "%s-%s" % (obj_version, obj_kind)
schema = self.schemas.get(schema_key, None)
if not schema:
schema_path = os.path.join(self.schema_dir_path, obj_version,
"%s.schema" % obj_kind)
try:
schema = json.load(open(schema_path, "r"))
except OSError:
self.logger.debug("no schema at %s, skipping" % schema_path)
except json.decoder.JSONDecodeError as e:
self.logger.warning("corrupt schema at %s, skipping (%s)" %
(schema_path, e))
if schema:
self.schemas[schema_key] = schema
try:
jsonschema.validate(obj, schema)
except jsonschema.exceptions.ValidationError as e:
return RichStatus.fromError("not a valid %s: %s" % (obj_kind, e))
return RichStatus.OK(msg="valid %s" % obj_kind,
details=(obj_kind, obj_version, obj_name))
def safe_store(self, source_key, storage_name, obj_name, obj_kind, value, allow_log=True):
storage = self.config.setdefault(storage_name, {})
if obj_name in storage:
# Oooops.
raise Exception("%s[%d] defines %s %s, which is already present" %
(self.resource_identifier, self.ocount, obj_kind, obj_name))
if allow_log:
self.logger.debug("%s[%d]: saving %s %s" %
(self.resource_identifier, self.ocount, obj_kind, obj_name))
storage[obj_name] = value
return storage[obj_name]
def save_object(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, obj_kind, obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_pragma(self, source_key, obj):
keylist = sorted([x for x in sorted(obj.keys()) if ((x != 'apiVersion') and (x != 'kind'))])
# self.logger.debug("PRAGMA: %s" % keylist)
for key in keylist:
if key == 'source':
override = self.source_overrides.setdefault(self.filename, {})
override['source'] = obj['source']
self.logger.debug("PRAGMA: override %s to %s" %
(self.resource_identifier, self.source_overrides[self.filename]['source']))
elif key == 'autogenerated':
override = self.source_overrides.setdefault(self.filename, {})
override['ocount_delta'] = -1
# self.logger.debug("PRAGMA: autogenerated, setting ocount_delta to -1")
# else:
# self.logger.debug("PRAGMA: skip %s" % key)
return RichStatus.OK(msg="handled pragma object")
def handle_module(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "modules", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj['config']))
def handle_ratelimitservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "ratelimit_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_tracingservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "tracing_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_authservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "auth_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_mapping(self, source_key, obj, obj_name, obj_kind, obj_version):
mapping = Mapping(source_key, **obj)
return self.safe_store(source_key, "mappings", obj_name, obj_kind, mapping)
def diag_port(self):
modules = self.config.get("modules", {})
amod = modules.get("ambassador", {})
return amod.get("diag_port", 8877)
def diag_service(self):
return "127.0.0.1:%d" % self.diag_port()
def add_intermediate_cluster(self, _source, name, _service, urls,
type="strict_dns", lb_type="round_robin",
cb_name=None, od_name=None, originate_tls=None,
grpc=False, host_rewrite=None, ssl_context=None):
if name not in self.envoy_clusters:
self.logger.debug("CLUSTER %s: new from %s" % (name, _source))
cluster = SourcedDict(
_source=_source,
_referenced_by=[ _source ],
_service=_service,
name=name,
type=type,
lb_type=lb_type,
urls=urls
)
if cb_name and (cb_name in self.breakers):
cluster['circuit_breakers'] = self.breakers[cb_name]
self.breakers[cb_name]._mark_referenced_by(_source)
if od_name and (od_name in self.outliers):
cluster['outlier_detection'] = self.outliers[od_name]
self.outliers[od_name]._mark_referenced_by(_source)
if originate_tls == True:
cluster['tls_context'] = { '_ambassador_enabled': True }
cluster['tls_array'] = []
elif (originate_tls and (originate_tls in self.tls_contexts)):
cluster['tls_context'] = self.tls_contexts[originate_tls]
self.tls_contexts[originate_tls]._mark_referenced_by(_source)
tls_array = []
for key, value in cluster['tls_context'].items():
if key.startswith('_'):
continue
tls_array.append({ 'key': key, 'value': value })
cluster['tls_array'] = sorted(tls_array, key=lambda x: x['key'])
elif ssl_context:
cluster['tls_context'] = ssl_context
tls_array = []
for key, value in ssl_context.items():
tls_array.append({ 'key': key, 'value': value })
cluster['tls_array'] = sorted(tls_array, key=lambda x: x['key'])
if host_rewrite and originate_tls:
cluster['tls_array'].append({'key': 'sni', 'value': host_rewrite })
if grpc:
cluster['features'] = 'http2'
self.envoy_clusters[name] = cluster
else:
self.logger.debug("CLUSTER %s: referenced by %s" % (name, _source))
self.envoy_clusters[name]._mark_referenced_by(_source)
# XXX This is a silly API. We should have a Cluster object that can carry what kind
# of cluster it is (this is a target cluster of weight 50%, this is a shadow cluster,
# whatever) and the API should be "add this cluster to this Mapping".
def add_intermediate_route(self, _source, mapping, svc, cluster_name, shadow=False):
route = self.envoy_routes.get(mapping.group_id, None)
host_redirect = mapping.get('host_redirect', False)
shadow = mapping.get('shadow', False)
if route:
# Is this a host_redirect? If so, that's an error.
if host_redirect:
self.logger.error("ignoring non-unique host_redirect mapping %s (see also %s)" %
(mapping['name'], route['_source']))
# Is this a shadow? If so, is there already a shadow marked?
elif shadow:
extant_shadow = route.get('shadow', None)
if extant_shadow:
shadow_name = extant_shadow.get('name', None)
if shadow_name != cluster_name:
self.logger.error("mapping %s defines multiple shadows! Ignoring %s" %
(mapping['name'], cluster_name))
else:
# XXX CODE DUPLICATION with mapping.py!!
# We're going to need to support shadow weighting later, so use a dict here.
route['shadow'] = {
'name': cluster_name
}
route.setdefault('clusters', [])
else:
# Take the easy way out -- just add a new entry to this
# route's set of weighted clusters.
route["clusters"].append( { "name": cluster_name,
"weight": mapping.attrs.get("weight", None) } )
route._mark_referenced_by(_source)
return
# OK, if here, we don't have an extent route group for this Mapping. Make a
# new one.
route = mapping.new_route(svc, cluster_name)
self.envoy_routes[mapping.group_id] = route
def service_tls_check(self, svc, context, host_rewrite):
originate_tls = False
name_fields = None
if svc.lower().startswith("http://"):
originate_tls = False
svc = svc[len("http://"):]
elif svc.lower().startswith("https://"):
originate_tls = True
name_fields = [ 'otls' ]
svc = svc[len("https://"):]
elif context == True:
originate_tls = True
name_fields = [ 'otls' ]
# Separate if here because you need to be able to specify a context
# even after you say "https://" for the service.
if context and (context != True):
if context in self.tls_contexts:
name_fields = [ 'otls', context ]
originate_tls = context
else:
self.logger.error("Originate-TLS context %s is not defined" % context)
if originate_tls and host_rewrite:
name_fields.append("hr-%s" % host_rewrite)
port = 443 if originate_tls else 80
context_name = "_".join(name_fields) if name_fields else None
svc_url = 'tcp://%s' % svc
if ':' not in svc:
svc_url = '%s:%d' % (svc_url, port)
return (svc, svc_url, originate_tls, context_name)
def add_clusters_for_mapping(self, mapping):
svc = mapping['service']
tls_context = mapping.get('tls', None)
grpc = mapping.get('grpc', False)
host_rewrite = mapping.get('host_rewrite', None)
# Given the service and the TLS context, first initialize the cluster name for the
# main service with the incoming service string...
cluster_name_fields = [ svc ]
host_redirect = mapping.get('host_redirect', False)
shadow = mapping.get('shadow', False)
if host_redirect:
if shadow:
# Not allowed.
errstr = "At most one of host_redirect and shadow may be set; ignoring host_redirect"
self.post_error(RichStatus.fromError(errstr), key=mapping['_source'])
host_redirect = False
else:
# Short-circuit. You needn't actually create a cluster for a
# host_redirect mapping.
return svc, None
if shadow:
cluster_name_fields.insert(0, "shadow")
# ...then do whatever normalization we need for the name and the URL. This can
# change the service name (e.g. "http://foo" will turn into "foo"), so we set
# up cluster_name_fields above in order to preserve compatibility with older
# versions of Ambassador. (This isn't a functional issue, just a matter of
# trying not to confuse people on upgrades.)
(svc, url, originate_tls, otls_name) = self.service_tls_check(svc, tls_context, host_rewrite)
# Build up the common name stuff that we'll need for the service and
# the shadow service.
aux_name_fields = []
cb_name = mapping.get('circuit_breaker', None)
if cb_name:
if cb_name in self.breakers:
aux_name_fields.append("cb_%s" % cb_name)
else:
self.logger.error("CircuitBreaker %s is not defined (mapping %s)" %
(cb_name, mapping.name))
od_name = mapping.get('outlier_detection', None)
if od_name:
if od_name in self.outliers:
aux_name_fields.append("od_%s" % od_name)
else:
self.logger.error("OutlierDetection %s is not defined (mapping %s)" %
(od_name, mapping.name))
# OK. Use the main service stuff to build up the main clustor.
if otls_name:
cluster_name_fields.append(otls_name)
cluster_name_fields.extend(aux_name_fields)
cluster_name = 'cluster_%s' % "_".join(cluster_name_fields)
cluster_name = re.sub(r'[^0-9A-Za-z_]', '_', cluster_name)
self.logger.debug("%s: svc %s -> cluster %s" % (mapping.name, svc, cluster_name))
self.add_intermediate_cluster(mapping['_source'], cluster_name,
svc, [ url ],
cb_name=cb_name, od_name=od_name, grpc=grpc,
originate_tls=originate_tls, host_rewrite=host_rewrite)
return svc, cluster_name
def merge_tmods(self, tls_module, generated_module, key):
"""
Merge TLS module configuration for a particular key. In the event of conflicts, the
tls_module element wins, and an error is posted so that the diagnostics service can
show it.
Returns a TLS module with a correctly-merged config element. This will be the
tls_module (possibly modified) unless no tls_module is present, in which case
the generated_module will be promoted. If any changes were made to the module, it
will be marked as referenced by the generated_module.
:param tls_module: the `tls` module; may be None
:param generated_module: the `tls-from-ambassador-certs` module; may be None
:param key: the key in the module config to merge
:return: TLS module object; see above.
"""
# First up, the easy cases. If either module is missing, return the other.
# (The other might be None too, of course.)
if generated_module is None:
return tls_module
elif tls_module is None:
return generated_module
else:
self.logger.debug("tls_module %s" % json.dumps(tls_module, indent=4))
self.logger.debug("generated_module %s" % json.dumps(generated_module, indent=4))
# OK, no easy cases. We know that both modules exist: grab the config dicts.
tls_source = tls_module['_source']
tls_config = tls_module.get(key, {})
gen_source = generated_module['_source']
gen_config = generated_module.get(key, {})
# Now walk over the tls_config and copy anything needed.
any_changes = False
for ckey in gen_config:
if ckey in tls_config:
# ckey exists in both modules. Do they have the same value?
if tls_config[ckey] != gen_config[ckey]:
# No -- post an error, but let the version from the TLS module win.
errfmt = "CONFLICT in TLS config for {}.{}: using {} from TLS module in {}"
errstr = errfmt.format(key, ckey, tls_config[ckey], tls_source)
self.post_error(RichStatus.fromError(errstr))
else:
# They have the same value. Worth mentioning in debug.
self.logger.debug("merge_tmods: {}.{} duplicated with same value".format(key, ckey))
else:
# ckey only exists in gen_config. Copy it over.
self.logger.debug("merge_tmods: copy {}.{} from gen_config".format(key, ckey))
tls_config[ckey] = gen_config[ckey]
any_changes = True
# If we had changes...
if any_changes:
# ...then mark the tls_module as referenced by the generated_module's
# source..
tls_module._mark_referenced_by(gen_source)
# ...and copy the tls_config back in (in case the key wasn't in the tls_module
# config at all originally).
tls_module[key] = tls_config
# Finally, return the tls_module.
return tls_module
def generate_intermediate_config(self):
# First things first. The "Ambassador" module always exists; create it with
# default values now.
self.ambassador_module = SourcedDict(
service_port = 80,
admin_port = 8001,
diag_port = 8877,
auth_enabled = None,
liveness_probe = { "enabled": True },
readiness_probe = { "enabled": True },
diagnostics = { "enabled": True },
tls_config = None,
use_proxy_proto = False,
x_forwarded_proto_redirect = False,
)
# Next up: let's define initial clusters, routes, and filters.
#
# Our set of clusters starts out empty; we use add_intermediate_cluster()
# to build it up while making sure that all the source-tracking stuff
# works out.
#
# Note that we use a map for clusters, not a list -- the reason is that
# multiple mappings can use the same service, and we don't want multiple
# clusters.
self.envoy_clusters = {}
# Our initial set of routes is empty...
self.envoy_routes = {}
# Our initial list of grpc_services is empty...
self.envoy_config['grpc_services'] = []
# Now we look at user-defined modules from our config...
modules = self.config.get('modules', {})
# ...most notably the 'ambassador' and 'tls' modules, which are handled first.
amod = modules.get('ambassador', None)
tls_module = modules.get('tls', None)
# Part of handling the 'tls' module is folding in the 'tls-from-ambassador-certs'
# module, so grab that too...
generated_module = modules.get('tls-from-ambassador-certs', None)
# ...and merge the 'server' and 'client' config elements.
tls_module = self.merge_tmods(tls_module, generated_module, 'server')
tls_module = self.merge_tmods(tls_module, generated_module, 'client')
# OK, done. Make sure we have _something_ for the TLS module going forward.
tmod = tls_module or {}
self.logger.debug("TLS module after merge: %s" % json.dumps(tmod, indent=4))
if amod or tmod:
self.module_config_ambassador("ambassador", amod, tmod)
router_config = {}
tracing_configs = self.config.get('tracing_configs', None)
self.module_config_tracing(tracing_configs)
if 'tracing' in self.envoy_config:
router_config['start_child_span'] = True
# !!!! WARNING WARNING WARNING !!!! Filters are actually ORDER-DEPENDENT.
self.envoy_config['filters'] = []
# Start with authentication filter
auth_mod = modules.get('authentication', None)
auth_configs = self.config.get('auth_configs', None)
auth_filter = self.module_config_authentication("authentication", amod, auth_mod, auth_configs)
if auth_filter:
self.envoy_config['filters'].append(auth_filter)
# Then append the rate-limit filter, because we might rate-limit based on auth headers
ratelimit_configs = self.config.get('ratelimit_configs', None)
(ratelimit_filter, ratelimit_grpc_service) = self.module_config_ratelimit(ratelimit_configs)
if ratelimit_filter and ratelimit_grpc_service:
self.envoy_config['filters'].append(ratelimit_filter)
self.envoy_config['grpc_services'].append(ratelimit_grpc_service)
# Then append non-configurable cors and decoder filters
self.envoy_config['filters'].append(SourcedDict(name="cors", config={}))
self.envoy_config['filters'].append(SourcedDict(type="decoder", name="router", config=router_config))
# For mappings, start with empty sets for everything.
mappings = self.config.get("mappings", {})
self.breakers = self.config.get("CircuitBreaker", {})
for key, breaker in self.breakers.items():
breaker['_referenced_by'] = []
self.outliers = self.config.get("OutlierDetection", {})
for key, outlier in self.outliers.items():
outlier['_referenced_by'] = []
# OK. Given those initial sets, let's look over our global modules.
for module_name in modules.keys():
if ((module_name == 'ambassador') or
(module_name == 'tls') or
(module_name == 'authentication') or
(module_name == 'tls-from-ambassador-certs')):
continue
handler_name = "module_config_%s" % module_name
handler = getattr(self, handler_name, None)
if not handler:
self.logger.error("module %s: no configuration generator, skipping" % module_name)
continue
handler(module_name, modules[module_name])
# Once modules are handled, we can set up our admin config...
self.envoy_config['admin'] = SourcedDict(
_from=self.ambassador_module,
admin_port=self.ambassador_module["admin_port"]
)
# ...and our listeners.
primary_listener = SourcedDict(
_from=self.ambassador_module,
service_port=self.ambassador_module["service_port"],
require_tls=False,
use_proxy_proto=self.ambassador_module['use_proxy_proto']
)
if 'use_remote_address' in self.ambassador_module:
primary_listener['use_remote_address'] = self.ambassador_module['use_remote_address']
# If x_forwarded_proto_redirect is set, then we enable require_tls in primary listener, which in turn sets
# require_ssl to true in envoy config. Once set, then all requests that contain X-FORWARDED-PROTO set to
# https, are processes normally by envoy. In all the other cases, including X-FORWARDED-PROTO set to http,
# a 301 redirect response to https://host is sent
if self.ambassador_module.get('x_forwarded_proto_redirect', False):
primary_listener['require_tls'] = True
self.logger.debug("x_forwarded_proto_redirect is set to true, enabling 'require_tls' in listener")
redirect_cleartext_from = None
tmod = self.ambassador_module.get('tls_config', None)
# ...TLS config, if necessary...
if tmod:
# self.logger.debug("USING TLS")
primary_listener['tls'] = tmod
if self.tmod_certs_exist(primary_listener['tls']) > 0:
primary_listener['tls']['ssl_context'] = True
redirect_cleartext_from = tmod.get('redirect_cleartext_from')
self.envoy_config['listeners'] = [ primary_listener ]
if redirect_cleartext_from:
# We only want to set `require_tls` on the primary listener when certs are present on the pod
if self.tmod_certs_exist(primary_listener['tls']) > 0:
primary_listener['require_tls'] = True
new_listener = SourcedDict(
_from=self.ambassador_module,
service_port=redirect_cleartext_from,
require_tls=True,
# Note: no TLS context here, this is a cleartext listener.
# We can set require_tls True because we can let the upstream
# tell us about that.
use_proxy_proto=self.ambassador_module['use_proxy_proto']
)
if 'use_remote_address' in self.ambassador_module:
new_listener['use_remote_address'] = self.ambassador_module['use_remote_address']
self.envoy_config['listeners'].append(new_listener)
self.default_liveness_probe['service'] = self.diag_service()
self.default_readiness_probe['service'] = self.diag_service()
self.default_diagnostics['service'] = self.diag_service()
for name, cur, dflt in [
("liveness", self.ambassador_module['liveness_probe'],
self.default_liveness_probe),
("readiness", self.ambassador_module['readiness_probe'],
self.default_readiness_probe),
("diagnostics", self.ambassador_module['diagnostics'],
self.default_diagnostics)
]:
if cur and cur.get("enabled", False):
prefix = cur.get("prefix", dflt['prefix'])
rewrite = cur.get("rewrite", dflt['rewrite'])
service = cur.get("service", dflt['service'])
# Push a fake mapping to handle this.
name = "internal_%s_probe_mapping" % name
mappings[name] = Mapping(
_from=self.ambassador_module,
kind='Mapping',
name=name,
prefix=prefix,
rewrite=rewrite,
service=service
)
# self.logger.debug("PROBE %s: %s -> %s%s" % (name, prefix, service, rewrite))
# OK! We have all the mappings we need. Process them (don't worry about sorting
# yet, we'll do that on routes).
for mapping_name in sorted(mappings.keys()):
mapping = mappings[mapping_name]
# OK. Set up clusters for this service...
svc, cluster_name = self.add_clusters_for_mapping(mapping)
# ...and route.
self.add_intermediate_route(mapping['_source'], mapping, svc, cluster_name)
# OK. Walk the set of clusters and normalize names...
collisions = {}
mangled = {}
for name in sorted(self.envoy_clusters.keys()):
if len(name) > 60:
# Too long.
short_name = name[0:40]
collision_list = collisions.setdefault(short_name, [])
collision_list.append(name)
for short_name in sorted(collisions.keys()):
name_list = collisions[short_name]
i = 0
for name in sorted(name_list):
mangled_name = "%s-%d" % (short_name, i)
i += 1
self.logger.info("%s => %s" % (name, mangled_name))
mangled[name] = mangled_name
self.envoy_clusters[name]['name'] = mangled_name
# We need to default any unspecified weights and renormalize to 100
for group_id, route in self.envoy_routes.items():
clusters = route["clusters"]
total = 0.0
unspecified = 0
# If this is a websocket route, it will support only one cluster right now.
if route.get('use_websocket', False):
if len(clusters) > 1:
errmsg = "Only one cluster is supported for websockets; using %s" % clusters[0]['name']
self.post_error(RichStatus.fromError(errmsg))
for c in clusters:
# Mangle the name, if need be.
c_name = c["name"]
if c_name in mangled:
c["name"] = mangled[c_name]
# self.logger.info("%s: mangling cluster %s to %s" % (group_id, c_name, c["name"]))
if c["weight"] is None:
unspecified += 1
else:
total += c["weight"]
if unspecified:
for c in clusters:
if c["weight"] is None:
c["weight"] = (100.0 - total)/unspecified
elif total != 100.0:
for c in clusters:
c["weight"] *= 100.0/total
# OK. When all is said and done, sort the list of routes by route weight...
self.envoy_config['routes'] = sorted([
route for group_id, route in self.envoy_routes.items()
], reverse=True, key=Mapping.route_weight)
# ...then map clusters back into a list...
self.envoy_config['clusters'] = [
self.envoy_clusters[cluster_key] for cluster_key in sorted(self.envoy_clusters.keys())
]
# ...and finally repeat for breakers and outliers, but copy them in the process so we
# can mess with the originals.
#
# What's going on here is that circuit-breaker and outlier-detection configs aren't
# included as independent objects in envoy.json, but we want to be able to discuss
# them in diag. We also don't need to keep the _source and _referenced_by elements
# in their real Envoy appearances.
self.envoy_config['breakers'] = self.clean_and_copy(self.breakers)
self.envoy_config['outliers'] = self.clean_and_copy(self.outliers)
@staticmethod
def tmod_certs_exist(tmod):
"""
Returns the number of certs that are defined in the supplied tmod
:param tmod: The TLS module configuration
:return: number of certs in tmod
:rtype: int
"""
cert_count = 0
if tmod.get('cert_chain_file') is not None:
cert_count += 1
if tmod.get('private_key_file') is not None:
cert_count += 1
if tmod.get('cacert_chain_file') is not None:
cert_count += 1
return cert_count
def _get_intermediate_for(self, element_list, source_keys, value):
if not isinstance(value, dict):
return
good = True
if '_source' in value:
good = False
value_source = value.get("_source", None)
value_referenced_by = value.get("_referenced_by", [])
if ((value_source in source_keys) or
(source_keys & set(value_referenced_by))):
good = True
if good:
element_list.append(value)
def get_intermediate_for(self, source_key):
source_keys = []
if source_key.startswith("grp-"):
group_id = source_key[4:]
for route in self.envoy_config['routes']:
if route['_group_id'] == group_id:
source_keys.append(route['_source'])
for reference_key in route['_referenced_by']:
source_keys.append(reference_key)
if not source_keys:
return {
"error": "No group matches %s" % group_id
}
else:
if source_key in self.source_map:
# Exact match for a file in the source map: include all the objects
# in the file.
source_keys = self.source_map[source_key]
elif source_key in self.sources:
# Exact match for an object in a file: include only that object.
source_keys.append(source_key)
else:
# No match at all. Weird.
return {
"error": "No source matches %s" % source_key
}
source_keys = set(source_keys)
# self.logger.debug("get_intermediate_for: source_keys %s" % source_keys)
# self.logger.debug("get_intermediate_for: errors %s" % self.errors)
sources = []
for key in source_keys:
source_dict = dict(self.sources[key])
source_dict['errors'] = [
{
'summary': error['error'].split('\n', 1)[0],
'text': error['error']
}
for error in self.errors.get(key, [])
]
source_dict['source_key'] = key
sources.append(source_dict)
result = {
"sources": sources
}
# self.logger.debug("get_intermediate_for: initial result %s" % result)
for key in self.envoy_config.keys():
result[key] = []
value = self.envoy_config[key]
if isinstance(value, list):
for v2 in value:
self._get_intermediate_for(result[key], source_keys, v2)
else:
self._get_intermediate_for(result[key], source_keys, value)
return result
def generate_envoy_config(self, template=None, template_dir=None, **kwargs):
# Finally! Render the template to JSON...
envoy_json = self.to_json(template=template, template_dir=template_dir)
# We used to use the JSON parser as a final sanity check here. That caused
# Forge some issues, so it's turned off for now.
# rc = RichStatus.fromError("impossible")
# # ...and use the JSON parser as a final sanity check.
# try:
# obj = json.loads(envoy_json)
# rc = RichStatus.OK(msg="Envoy configuration OK", envoy_config=obj)
# except json.decoder.JSONDecodeError as e:
# rc = RichStatus.fromError("Invalid Envoy configuration: %s" % str(e),
# raw=envoy_json, exception=e)
# Go ahead and report that we generated an Envoy config, if we can.
scout_result = Config.scout_report(action="config", result=True, generated=True, **kwargs)
rc = RichStatus.OK(envoy_config=envoy_json, scout_result=scout_result)
# self.logger.debug("Scout reports %s" % json.dumps(rc.scout_result))
return rc
def set_config_ambassador(self, module, key, value, merge=False):
if not merge:
self.ambassador_module[key] = value
else:
self.ambassador_module[key].update(value)
# XXX This is actually wrong sometimes. If, for example, you have an
# ambassador module that defines the admin_port, sure, bringing in its
# source makes sense. On the other hand, if you have a TLS module
# created by a secret, that source shouldn't really take over the
# admin document. This will take enough unraveling that I'm going to
# leave it for now, though.
self.ambassador_module['_source'] = module['_source']
def update_config_ambassador(self, module, key, value):
self.set_config_ambassador(module, key, value, merge=True)
def tls_config_helper(self, name, amod, tmod):
tmp_config = SourcedDict(_from=amod)
some_enabled = False
for context_name in tmod.keys():
if context_name.startswith('_'):
continue
context = tmod[context_name]
# self.logger.debug("context %s -- %s" % (context_name, json.dumps(context)))
if context.get('enabled', True):
if context_name == 'server':
# Server-side TLS is enabled.
self.logger.debug("TLS termination enabled!")
some_enabled = True
# Switch to port 443 by default...
self.set_config_ambassador(amod, 'service_port', 443)
# ...and merge in the server-side defaults.
tmp_config.update(self.default_tls_config['server'])
tmp_config.update(tmod['server'])
# Check if secrets are supplied for TLS termination and/or TLS auth
secret = context.get('secret')
if secret is not None:
self.logger.debug("config.server.secret is {}".format(secret))
# If /{etc,ambassador}/certs/tls.crt does not exist, then load the secrets
if check_cert_file(TLSPaths.mount_tls_crt.value):
self.logger.debug("Secret already exists, taking no action for secret {}".format(secret))
elif check_cert_file(TLSPaths.tls_crt.value):
tmp_config['cert_chain_file'] = TLSPaths.tls_crt.value
tmp_config['private_key_file'] = TLSPaths.tls_key.value
else:
(server_cert, server_key, server_data) = read_cert_secret(kube_v1(), secret, self.namespace)
if server_cert and server_key:
self.logger.debug("saving contents of secret {} to {}".format(
secret, TLSPaths.cert_dir.value))
save_cert(server_cert, server_key, TLSPaths.cert_dir.value)
tmp_config['cert_chain_file'] = TLSPaths.tls_crt.value
tmp_config['private_key_file'] = TLSPaths.tls_key.value
elif context_name == 'client':
# Client-side TLS is enabled.
self.logger.debug("TLS client certs enabled!")
some_enabled = True
# Merge in the client-side defaults.
tmp_config.update(self.default_tls_config['client'])
tmp_config.update(tmod['client'])
secret = context.get('secret')
if secret is not None:
self.logger.debug("config.client.secret is {}".format(secret))
if check_cert_file(TLSPaths.client_mount_crt.value):
self.logger.debug("Secret already exists, taking no action for secret {}".format(secret))
elif check_cert_file(TLSPaths.client_tls_crt.value):
tmp_config['cacert_chain_file'] = TLSPaths.client_tls_crt.value
else:
(client_cert, _, _) = read_cert_secret(kube_v1(), secret, self.namespace)
if client_cert:
self.logger.debug("saving contents of secret {} to {}".format(
secret, TLSPaths.client_cert_dir.value))
save_cert(client_cert, None, TLSPaths.client_cert_dir.value)
tmp_config['cacert_chain_file'] = TLSPaths.client_tls_crt.value
else:
# This is a wholly new thing.
self.tls_contexts[context_name] = SourcedDict(
_from=tmod,
**context
)
if some_enabled:
if 'enabled' in tmp_config:
del(tmp_config['enabled'])
# Save the TLS config...
self.set_config_ambassador(amod, 'tls_config', tmp_config)
self.logger.debug("TLS config: %s" % json.dumps(self.ambassador_module['tls_config'], indent=4))
self.logger.debug("TLS contexts: %s" % json.dumps(self.tls_contexts, indent=4))
return some_enabled
def module_config_ambassador(self, name, amod, tmod):
# Toplevel Ambassador configuration. First up, check out TLS.
have_amod_tls = False
if amod and ('tls' in amod):
have_amod_tls = self.tls_config_helper(name, amod, amod['tls'])
if not have_amod_tls and tmod:
self.tls_config_helper(name, tmod, tmod)
if amod and ('cors' in amod):
self.parse_and_save_default_cors(amod)
# After that, check for port definitions, probes, etc., and copy them in
# as we find them.
for key in [ 'service_port', 'admin_port', 'diag_port',
'liveness_probe', 'readiness_probe', 'auth_enabled',
'use_proxy_proto', 'use_remote_address', 'diagnostics', 'x_forwarded_proto_redirect' ]:
if amod and (key in amod):
# Yes. It overrides the default.
self.set_config_ambassador(amod, key, amod[key])
def parse_and_save_default_cors(self, amod):
cors_default_temp = {'enabled': True}
cors = amod['cors']
origins = cors.get('origins')
if origins is not None:
if type(origins) is list:
cors_default_temp['allow_origin'] = origins
elif type(origins) is str:
cors_default_temp['allow_origin'] = origins.split(',')
else:
print("invalid cors configuration supplied - {}".format(origins))
return
self.save_cors_default_element("max_age", "max_age", cors_default_temp, cors)
self.save_cors_default_element("credentials", "allow_credentials", cors_default_temp, cors)
self.save_cors_default_element("methods", "allow_methods", cors_default_temp, cors)
self.save_cors_default_element("headers", "allow_headers", cors_default_temp, cors)
self.save_cors_default_element("exposed_headers", "expose_headers", cors_default_temp, cors)
self.envoy_config['cors_default'] = cors_default_temp
def save_cors_default_element(self, cors_key, route_key, cors_dest, cors_source):
if cors_source.get(cors_key) is not None:
if type(cors_source.get(cors_key)) is list:
cors_dest[route_key] = ", ".join(cors_source.get(cors_key))
else:
cors_dest[route_key] = cors_source.get(cors_key)
def module_config_ratelimit(self, ratelimit_config):
cluster_hosts = None
sources = []
if ratelimit_config:
for config in ratelimit_config.values():
sources.append(config['_source'])
cluster_hosts = config.get("service", None)
if not cluster_hosts or not sources:
return (None, None)
host_rewrite = config.get("host_rewrite", None)
cluster_name = "cluster_ext_ratelimit"
filter_config = {
"domain": "ambassador",
"request_type": "both",
"timeout_ms": 20
}
grpc_service = SourcedDict(
name="rate_limit_service",
cluster_name=cluster_name
)
first_source = sources.pop(0)
filter = SourcedDict(
_source=first_source,
type="decoder",
name="rate_limit",
config=filter_config
)
if cluster_name not in self.envoy_clusters:
(svc, url, originate_tls, otls_name) = self.service_tls_check(cluster_hosts, None, host_rewrite)
self.add_intermediate_cluster(first_source, cluster_name,
'extratelimit', [url],
type="strict_dns", lb_type="round_robin",
grpc=True, host_rewrite=host_rewrite)
for source in sources:
filter._mark_referenced_by(source)
self.envoy_clusters[cluster_name]._mark_referenced_by(source)
return (filter, grpc_service)
def module_config_tracing(self, tracing_config):
cluster_hosts = None
driver = None
driver_config = None
tag_headers = None
host_rewrite = None
sources = []
if tracing_config:
for config in tracing_config.values():
sources.append(config['_source'])
cluster_hosts = config.get("service", None)
driver = config.get("driver", None)
driver_config = config.get("config", {})
tag_headers = config.get("tag_headers", [])
host_rewrite = config.get("host_rewrite", None)
if not cluster_hosts or not sources:
return
cluster_name = "cluster_ext_tracing"
first_source = sources.pop(0)
if cluster_name not in self.envoy_clusters:
(svc, url, originate_tls, otls_name) = self.service_tls_check(cluster_hosts, None, host_rewrite)
grpc = False
ssl_context = None
if driver == "lightstep":
grpc = True
parsed_url = urlparse(url)
ssl_context = {
"ca_cert_file": "/etc/ssl/certs/ca-certificates.crt",
"verify_subject_alt_name": [parsed_url.hostname]
}
self.add_intermediate_cluster(first_source, cluster_name,
'exttracing', [url],
type="strict_dns", lb_type="round_robin",
host_rewrite=host_rewrite, grpc=grpc, ssl_context=ssl_context)
driver_config['collector_cluster'] = cluster_name
tracing = SourcedDict(
_source=first_source,
driver=driver,
config=driver_config,
tag_headers=tag_headers,
cluster_name=cluster_name
)
self.envoy_config['tracing'] = tracing
def auth_helper(self, sources, config, cluster_hosts, module):
sources.append(module['_source'])
for key in [ 'path_prefix', 'timeout_ms', 'cluster' ]:
value = module.get(key, None)
if value != None:
previous = config.get(key, None)
if previous and (previous != value):
errstr = (
"AuthService cannot support multiple %s values; using %s" %
(key, previous)
)
self.post_error(RichStatus.fromError(errstr), key=module['_source'])
else:
config[key] = value
headers = module.get('allowed_headers', None)
if headers:
allowed_headers = config.get('allowed_headers', [])
for hdr in headers:
if hdr not in allowed_headers:
allowed_headers.append(hdr)
config['allowed_headers'] = allowed_headers
auth_service = module.get("auth_service", None)
# weight = module.get("weight", 100)
weight = 100 # Can't support arbitrary weights right now.
if auth_service:
cluster_hosts[auth_service] = ( weight, module.get('tls', None) )
def module_config_authentication(self, name, amod, auth_mod, auth_configs):
filter_config = {
"cluster": "cluster_ext_auth",
"timeout_ms": 5000
}
cluster_hosts = {}
sources = []
if auth_mod:
self.auth_helper(sources, filter_config, cluster_hosts, auth_mod)
if auth_configs:
# self.logger.debug("auth_configs: %s" % auth_configs)
for config in auth_configs.values():
self.auth_helper(sources, filter_config, cluster_hosts, config)
if not sources:
return None
first_source = sources.pop(0)
filter = SourcedDict(
_source=first_source,
_services=sorted(cluster_hosts.keys()),
type="decoder",
name="extauth",
config=filter_config
)
cluster_name = filter_config['cluster']
host_rewrite = filter_config.get('host_rewrite', None)
if cluster_name not in self.envoy_clusters:
if not cluster_hosts:
cluster_hosts = { '127.0.0.1:5000': ( 100, None ) }
urls = []
protocols = {}
for svc in sorted(cluster_hosts.keys()):
weight, tls_context = cluster_hosts[svc]
(svc, url, originate_tls, otls_name) = self.service_tls_check(svc, tls_context, host_rewrite)
if originate_tls:
protocols['https'] = True
else:
protocols['http'] = True
if otls_name:
filter_config['cluster'] = cluster_name + "_" + otls_name
cluster_name = filter_config['cluster']
urls.append(url)
if len(protocols.keys()) != 1:
raise Exception("auth config cannot try to use both HTTP and HTTPS")
self.add_intermediate_cluster(first_source, cluster_name,
'extauth', urls,
type="strict_dns", lb_type="round_robin",
originate_tls=originate_tls, host_rewrite=host_rewrite)
for source in sources:
filter._mark_referenced_by(source)
self.envoy_clusters[cluster_name]._mark_referenced_by(source)
return filter
### DIAGNOSTICS
def diagnostic_overview(self):
# Build a set of source _files_ rather than source _objects_.
source_files = {}
for filename, source_keys in self.source_map.items():
# self.logger.debug("overview -- filename %s, source_keys %d" %
# (filename, len(source_keys)))
# # Skip '--internal--' etc.
# if filename.startswith('--'):
# continue
source_dict = source_files.setdefault(
filename,
{
'filename': filename,
'objects': {},
'count': 0,
'plural': "objects",
'error_count': 0,
'error_plural': "errors"
}
)
for source_key in source_keys:
# self.logger.debug("overview --- source_key %s" % source_key)
source = self.sources[source_key]
if ('source' in source) and not ('source' in source_dict):
source_dict['source'] = source['source']
raw_errors = self.errors.get(source_key, [])
errors = []
for error in raw_errors:
source_dict['error_count'] += 1
errors.append({
'summary': error['error'].split('\n', 1)[0],
'text': error['error']
})
source_dict['error_plural'] = "error" if (source_dict['error_count'] == 1) else "errors"
source_dict['count'] += 1
source_dict['plural'] = "object" if (source_dict['count'] == 1) else "objects"
object_dict = source_dict['objects']
object_dict[source_key] = {
'key': source_key,
'kind': source['kind'],
'errors': errors
}
routes = []
for route in self.envoy_config['routes']:
if route['_source'] != "--diagnostics--":
route['_group_id'] = Mapping.group_id(route.get('method', 'GET'),
route['prefix'] if 'prefix' in route else route['regex'],
route.get('headers', []))
routes.append(route)
configuration = { key: self.envoy_config[key] for key in self.envoy_config.keys()
if key != "routes" }
cluster_to_service_mapping = {
"cluster_ext_auth": "AuthService",
"cluster_ext_tracing": "TracingService",
"cluster_ext_ratelimit": "RateLimitService"
}
ambassador_services = []
for cluster in configuration.get('clusters', []):
maps_to_service = cluster_to_service_mapping.get(cluster['name'])
if maps_to_service:
service_weigth = 100.0 / len(cluster['urls'])
for url in cluster['urls']:
ambassador_services.append(SourcedDict(
_from=cluster,
type=maps_to_service,
name=url,
cluster=cluster['name'],
_service_weight=service_weigth
))
overview = dict(sources=sorted(source_files.values(), key=lambda x: x['filename']),
routes=routes,
**configuration)
if len(ambassador_services) > 0:
overview['ambassador_services'] = ambassador_services
# self.logger.debug("overview result %s" % json.dumps(overview, indent=4, sort_keys=True))
return overview
def pretty(self, obj, out=sys.stdout):
out.write(obj)
# json.dump(obj, out, indent=4, separators=(',',':'), sort_keys=True)
# out.write("\n")
def to_json(self, template=None, template_dir=None):
template_paths = [ self.config_dir_path, self.template_dir_path ]
if template_dir:
template_paths.insert(0, template_dir)
if not template:
env = Environment(loader=FileSystemLoader(template_paths))
template = env.get_template("envoy.j2")
return(template.render(**self.envoy_config))
def dump(self):
print("==== config")
self.pretty(self.config)
print("==== envoy_config")
self.pretty(self.envoy_config)
if __name__ == '__main__':
aconf = Config(sys.argv[1])
print(json.dumps(aconf.diagnostic_overview(), indent=4, sort_keys=True))
| 40.3091 | 134 | 0.577584 |
import sys
import collections
import datetime
import json
import logging
import os
import re
from urllib.parse import urlparse
import jsonschema
import semantic_version
import yaml
from pkg_resources import Requirement, resource_filename
from jinja2 import Environment, FileSystemLoader
from .utils import RichStatus, SourcedDict, read_cert_secret, save_cert, TLSPaths, kube_v1, check_cert_file
from .mapping import Mapping
from scout import Scout
from .VERSION import Version
> Config.scout_update_frequency:
result = Config.scout.report(**kwargs)
Config.scout_last_update = now
Config.scout_last_result = dict(**result)
else:
# _notices.append({ "level": "debug", "message": "Returning cached result" })
result = dict(**Config.scout_last_result)
result_was_cached = True
result_timestamp = Config.scout_last_update
else:
result = { "scout": "unavailable" }
result_timestamp = datetime.datetime.now()
else:
_notices.append({ "level": "debug", "message": "Returning forced result" })
result_timestamp = datetime.datetime.now()
if not Config.current_semver:
_notices.append({
"level": "warning",
"message": "Ambassador has bad version '%s'??!" % Config.scout_version
})
result['cached'] = result_was_cached
result['timestamp'] = result_timestamp.timestamp()
# Do version & notices stuff.
if 'latest_version' in result:
latest_version = result['latest_version']
latest_semver = get_semver("latest", latest_version)
if latest_semver:
Config.scout_latest_version = latest_version
Config.scout_latest_semver = latest_semver
else:
_notices.append({
"level": "warning",
"message": "Scout returned bad version '%s'??!" % latest_version
})
if (Config.scout_latest_semver and
((not Config.current_semver) or
(Config.scout_latest_semver > Config.current_semver))):
_notices.append({
"level": "info",
"message": "Upgrade available! to Ambassador version %s" % Config.scout_latest_semver
})
if 'notices' in result:
_notices.extend(result['notices'])
Config.scout_notices = _notices
return result
def __init__(self, config_dir_path, k8s=False, schema_dir_path=None, template_dir_path=None):
self.config_dir_path = config_dir_path
if not template_dir_path:
template_dir_path = resource_filename(Requirement.parse("ambassador"),"templates")
if not schema_dir_path:
schema_dir_path = resource_filename(Requirement.parse("ambassador"),"schemas")
self.schema_dir_path = schema_dir_path
self.template_dir_path = template_dir_path
self.namespace = os.environ.get('AMBASSADOR_NAMESPACE', 'default')
self.logger = logging.getLogger("ambassador.config")
self.logger.debug("Scout version %s" % Config.scout_version)
self.logger.debug("Runtime %s" % Config.runtime)
self.logger.debug("CONFIG DIR %s" % os.path.abspath(self.config_dir_path))
self.logger.debug("TEMPLATE DIR %s" % os.path.abspath(self.template_dir_path))
self.logger.debug("SCHEMA DIR %s" % os.path.abspath(self.schema_dir_path))
if Config.scout_error:
self.logger.warning("Couldn't do version check: %s" % str(Config.scout_error))
self.schemas = {}
self.config = {}
self.tls_contexts = {}
self.envoy_config = {}
self.envoy_clusters = {}
self.envoy_routes = {}
self.sources = {
"--internal--": {
"_source": "--internal--",
"kind": "Internal",
"version": "v0",
"name": "Ambassador Internals",
"filename": "--internal--",
"index": 0,
"description": "The '--internal--' source marks objects created by Ambassador's internal logic."
},
"--diagnostics--": {
"_source": "--diagnostics--",
"kind": "diagnostics",
"version": "v0",
"name": "Ambassador Diagnostics",
"filename": "--diagnostics--",
"index": 0,
"description": "The '--diagnostics--' source marks objects created by Ambassador to assist with diagnostic output."
}
}
self.source_map = {
'--internal--': { '--internal--': True }
}
self.source_overrides = {}
self.default_liveness_probe = {
"enabled": True,
"prefix": "/ambassador/v0/check_alive",
"rewrite": "/ambassador/v0/check_alive",
# "service" gets added later
}
self.default_readiness_probe = {
"enabled": True,
"prefix": "/ambassador/v0/check_ready",
"rewrite": "/ambassador/v0/check_ready",
# "service" gets added later
}
self.default_diagnostics = {
"enabled": True,
"prefix": "/ambassador/v0/",
"rewrite": "/ambassador/v0/",
# "service" gets added later
}
# 'server' and 'client' are special contexts. Others
# use cert_chain_file defaulting to context.crt,
# private_key_file (context.key), and cacert_chain_file
# (context.pem).
self.default_tls_config = {
"server": {},
"client": {},
}
if os.path.isfile(TLSPaths.mount_tls_crt.value):
self.default_tls_config["server"]["cert_chain_file"] = TLSPaths.mount_tls_crt.value
if os.path.isfile(TLSPaths.mount_tls_key.value):
self.default_tls_config["server"]["private_key_file"] = TLSPaths.mount_tls_key.value
if os.path.isfile(TLSPaths.client_mount_crt.value):
self.default_tls_config["client"]["cacert_chain_file"] = TLSPaths.client_mount_crt.value
self.tls_config = None
self.errors = {}
self.fatal_errors = 0
self.object_errors = 0
self.objects_to_process = []
if not os.path.isdir(self.config_dir_path):
raise Exception("ERROR ERROR ERROR configuration directory %s does not exist; exiting" % self.config_dir_path)
for dirpath, dirnames, filenames in os.walk(self.config_dir_path, topdown=True):
# Modify dirnames in-place (dirs[:]) to remove any weird directories
# whose names start with '.' -- why? because my GKE cluster mounts my
# ConfigMap with a self-referential directory named
# /etc/ambassador-config/..9989_25_09_15_43_06.922818753, and if we don't
dirnames[:] = sorted([ d for d in dirnames if not d.startswith('.') ])
for filename in sorted([ x for x in filenames if x.endswith(".yaml") ]):
filepath = os.path.join(dirpath, filename)
self.load_yaml(filepath, filename, open(filepath, "r").read(), ocount=1, k8s=k8s)
self.process_all_objects()
if self.fatal_errors:
raise Exception("ERROR ERROR ERROR Unparseable configuration; exiting")
if self.errors:
self.logger.error("ERROR ERROR ERROR Starting with configuration errors")
self.generate_intermediate_config()
def load_yaml(self, filepath, filename, serialization, resource_identifier=None, ocount=1, k8s=False):
try:
# dealing with the actual object gets deferred
for obj in yaml.safe_load_all(serialization):
if k8s:
ocount = self.prep_k8s(filepath, filename, ocount, obj)
else:
# k8s objects will have an identifier, for other objects use filepath
object_unique_id = resource_identifier or filepath
self.objects_to_process.append((object_unique_id, filename, ocount, obj))
ocount += 1
except Exception as e:
# No sense letting one attribute with bad YAML take down the whole
# gateway, so post the error but keep any objects we were able to
# parse before hitting the error.
self.resource_identifier = resource_identifier or filepath
self.filename = filename
self.ocount = ocount
self.post_error(RichStatus.fromError("%s: could not parse YAML" % filepath))
return ocount
def prep_k8s(self, filepath, filename, ocount, obj):
kind = obj.get('kind', None)
if kind != "Service":
self.logger.debug("%s/%s: ignoring K8s %s object" %
(filepath, ocount, kind))
return ocount
metadata = obj.get('metadata', None)
if not metadata:
self.logger.debug("%s/%s: ignoring unannotated K8s %s" %
(filepath, ocount, kind))
return ocount
# Use metadata to build an unique resource identifier
resource_name = metadata.get('name')
# This should never happen as the name field is required in metadata for Service
if not resource_name:
self.logger.debug("%s/%s: ignoring unnamed K8s %s" %
(filepath, ocount, kind))
return ocount
resource_namespace = metadata.get('namespace', 'default')
# This resource identifier is useful for log output since filenames can be duplicated (multiple subdirectories)
resource_identifier = '{name}.{namespace}'.format(namespace=resource_namespace, name=resource_name)
annotations = metadata.get('annotations', None)
if annotations:
annotations = annotations.get('getambassador.io/config', None)
# self.logger.debug("annotations %s" % annotations)
if not annotations:
self.logger.debug("%s/%s: ignoring K8s %s without Ambassador annotation" %
(filepath, ocount, kind))
return ocount
return self.load_yaml(filepath, filename + ":annotation", annotations, ocount=ocount, resource_identifier=resource_identifier)
def process_all_objects(self):
for resource_identifier, filename, ocount, obj in sorted(self.objects_to_process):
# resource_identifier is either a filepath or <name>.<namespace>
self.resource_identifier = resource_identifier
# This fallback prevents issues for internal/diagnostics objects
self.filename = filename
self.ocount = ocount
if self.filename in self.source_overrides:
# Let Pragma objects override source information for this filename.
override = self.source_overrides[self.filename]
self.source = override.get('source', self.filename)
self.ocount += override.get('ocount_delta', 0)
else:
# No pragma involved here; just default to the filename.
self.source = self.filename
# Is the object empty?
if obj == None :
self.logger.debug("Annotation has empty config")
return
# Is an ambassador_id present in this object?
allowed_ids = obj.get('ambassador_id', 'default')
if allowed_ids:
# Make sure it's a list. Yes, this is Draconian,
if type(allowed_ids) != list:
allowed_ids = [ allowed_ids ]
if Config.ambassador_id not in allowed_ids:
self.logger.debug("PROCESS: skip %s.%d; id %s not in %s" %
(self.resource_identifier, self.ocount, Config.ambassador_id, allowed_ids))
continue
self.logger.debug("PROCESS: %s.%d => %s" % (self.resource_identifier, self.ocount, self.source))
rc = self.process_object(obj)
if not rc:
self.post_error(rc)
def clean_and_copy(self, d):
out = []
for key in sorted(d.keys()):
original = d[key]
copy = dict(**original)
if '_source' in original:
del(original['_source'])
if '_referenced_by' in original:
del(original['_referenced_by'])
out.append(copy)
return out
def current_source_key(self):
return("%s.%d" % (self.filename, self.ocount))
def post_error(self, rc, key=None):
if not key:
key = self.current_source_key()
# Yuck.
filename = re.sub(r'\.\d+$', '', key)
# Fetch the relevant source info. If it doesn't exist, stuff
source_info = self.sources.setdefault(key, {
'kind': 'error',
'version': 'error',
'name': 'error',
'filename': filename,
'index': self.ocount,
'yaml': 'error'
})
source_info.setdefault('errors', [])
source_info['errors'].append(rc.toDict())
source_map = self.source_map.setdefault(filename, {})
source_map[key] = True
errors = self.errors.setdefault(key, [])
errors.append(rc.toDict())
self.logger.error("%s (%s): %s" % (key, filename, rc))
def process_object(self, obj):
source_key = self.current_source_key()
if not obj:
return RichStatus.fromError("undefined object???")
try:
obj_version = obj['apiVersion']
obj_kind = obj['kind']
except KeyError:
return RichStatus.fromError("need apiVersion, kind")
if obj_kind == 'Pragma':
return self.handle_pragma(source_key, obj)
if 'name' not in obj:
return RichStatus.fromError("need name")
obj_name = obj['name']
self.sources[source_key] = {
'kind': obj_kind,
'version': obj_version,
'name': obj_name,
'filename': self.filename,
'index': self.ocount,
'yaml': yaml.safe_dump(obj, default_flow_style=False)
}
rc = self.validate_object(obj)
if not rc:
return rc
# Make sure it has a source: use what's in the object if present,
self.sources[source_key]['_source'] = obj.get('source', self.source)
source_map = self.source_map.setdefault(self.filename, {})
source_map[source_key] = True
handler_name = "handle_%s" % obj_kind.lower()
handler = getattr(self, handler_name, None)
if not handler:
handler = self.save_object
self.logger.warning("%s[%d]: no handler for %s, just saving" %
(self.resource_identifier, self.ocount, obj_kind))
try:
handler(source_key, obj, obj_name, obj_kind, obj_version)
except Exception as e:
return RichStatus.fromError("could not process %s object: %s" % (obj_kind, e))
return RichStatus.OK(msg="%s object processed successfully" % obj_kind)
def validate_object(self, obj):
# Each object must be a dict, and must include "apiVersion"
# and "type" at toplevel.
if not isinstance(obj, collections.Mapping):
return RichStatus.fromError("not a dictionary")
if not (("apiVersion" in obj) and ("kind" in obj) and ("name" in obj)):
return RichStatus.fromError("must have apiVersion, kind, and name")
obj_version = obj['apiVersion']
obj_kind = obj['kind']
obj_name = obj['name']
if obj_version.startswith("ambassador/"):
obj_version = obj_version.split('/')[1]
else:
return RichStatus.fromError("apiVersion %s unsupported" % obj_version)
schema_key = "%s-%s" % (obj_version, obj_kind)
schema = self.schemas.get(schema_key, None)
if not schema:
schema_path = os.path.join(self.schema_dir_path, obj_version,
"%s.schema" % obj_kind)
try:
schema = json.load(open(schema_path, "r"))
except OSError:
self.logger.debug("no schema at %s, skipping" % schema_path)
except json.decoder.JSONDecodeError as e:
self.logger.warning("corrupt schema at %s, skipping (%s)" %
(schema_path, e))
if schema:
self.schemas[schema_key] = schema
try:
jsonschema.validate(obj, schema)
except jsonschema.exceptions.ValidationError as e:
return RichStatus.fromError("not a valid %s: %s" % (obj_kind, e))
return RichStatus.OK(msg="valid %s" % obj_kind,
details=(obj_kind, obj_version, obj_name))
def safe_store(self, source_key, storage_name, obj_name, obj_kind, value, allow_log=True):
storage = self.config.setdefault(storage_name, {})
if obj_name in storage:
# Oooops.
raise Exception("%s[%d] defines %s %s, which is already present" %
(self.resource_identifier, self.ocount, obj_kind, obj_name))
if allow_log:
self.logger.debug("%s[%d]: saving %s %s" %
(self.resource_identifier, self.ocount, obj_kind, obj_name))
storage[obj_name] = value
return storage[obj_name]
def save_object(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, obj_kind, obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_pragma(self, source_key, obj):
keylist = sorted([x for x in sorted(obj.keys()) if ((x != 'apiVersion') and (x != 'kind'))])
# self.logger.debug("PRAGMA: %s" % keylist)
for key in keylist:
if key == 'source':
override = self.source_overrides.setdefault(self.filename, {})
override['source'] = obj['source']
self.logger.debug("PRAGMA: override %s to %s" %
(self.resource_identifier, self.source_overrides[self.filename]['source']))
elif key == 'autogenerated':
override = self.source_overrides.setdefault(self.filename, {})
override['ocount_delta'] = -1
# self.logger.debug("PRAGMA: autogenerated, setting ocount_delta to -1")
# else:
# self.logger.debug("PRAGMA: skip %s" % key)
return RichStatus.OK(msg="handled pragma object")
def handle_module(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "modules", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj['config']))
def handle_ratelimitservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "ratelimit_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_tracingservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "tracing_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_authservice(self, source_key, obj, obj_name, obj_kind, obj_version):
return self.safe_store(source_key, "auth_configs", obj_name, obj_kind,
SourcedDict(_source=source_key, **obj))
def handle_mapping(self, source_key, obj, obj_name, obj_kind, obj_version):
mapping = Mapping(source_key, **obj)
return self.safe_store(source_key, "mappings", obj_name, obj_kind, mapping)
def diag_port(self):
modules = self.config.get("modules", {})
amod = modules.get("ambassador", {})
return amod.get("diag_port", 8877)
def diag_service(self):
return "127.0.0.1:%d" % self.diag_port()
def add_intermediate_cluster(self, _source, name, _service, urls,
type="strict_dns", lb_type="round_robin",
cb_name=None, od_name=None, originate_tls=None,
grpc=False, host_rewrite=None, ssl_context=None):
if name not in self.envoy_clusters:
self.logger.debug("CLUSTER %s: new from %s" % (name, _source))
cluster = SourcedDict(
_source=_source,
_referenced_by=[ _source ],
_service=_service,
name=name,
type=type,
lb_type=lb_type,
urls=urls
)
if cb_name and (cb_name in self.breakers):
cluster['circuit_breakers'] = self.breakers[cb_name]
self.breakers[cb_name]._mark_referenced_by(_source)
if od_name and (od_name in self.outliers):
cluster['outlier_detection'] = self.outliers[od_name]
self.outliers[od_name]._mark_referenced_by(_source)
if originate_tls == True:
cluster['tls_context'] = { '_ambassador_enabled': True }
cluster['tls_array'] = []
elif (originate_tls and (originate_tls in self.tls_contexts)):
cluster['tls_context'] = self.tls_contexts[originate_tls]
self.tls_contexts[originate_tls]._mark_referenced_by(_source)
tls_array = []
for key, value in cluster['tls_context'].items():
if key.startswith('_'):
continue
tls_array.append({ 'key': key, 'value': value })
cluster['tls_array'] = sorted(tls_array, key=lambda x: x['key'])
elif ssl_context:
cluster['tls_context'] = ssl_context
tls_array = []
for key, value in ssl_context.items():
tls_array.append({ 'key': key, 'value': value })
cluster['tls_array'] = sorted(tls_array, key=lambda x: x['key'])
if host_rewrite and originate_tls:
cluster['tls_array'].append({'key': 'sni', 'value': host_rewrite })
if grpc:
cluster['features'] = 'http2'
self.envoy_clusters[name] = cluster
else:
self.logger.debug("CLUSTER %s: referenced by %s" % (name, _source))
self.envoy_clusters[name]._mark_referenced_by(_source)
# XXX This is a silly API. We should have a Cluster object that can carry what kind
# of cluster it is (this is a target cluster of weight 50%, this is a shadow cluster,
# whatever) and the API should be "add this cluster to this Mapping".
def add_intermediate_route(self, _source, mapping, svc, cluster_name, shadow=False):
route = self.envoy_routes.get(mapping.group_id, None)
host_redirect = mapping.get('host_redirect', False)
shadow = mapping.get('shadow', False)
if route:
# Is this a host_redirect? If so, that's an error.
if host_redirect:
self.logger.error("ignoring non-unique host_redirect mapping %s (see also %s)" %
(mapping['name'], route['_source']))
elif shadow:
extant_shadow = route.get('shadow', None)
if extant_shadow:
shadow_name = extant_shadow.get('name', None)
if shadow_name != cluster_name:
self.logger.error("mapping %s defines multiple shadows! Ignoring %s" %
(mapping['name'], cluster_name))
else:
route['shadow'] = {
'name': cluster_name
}
route.setdefault('clusters', [])
else:
# Take the easy way out -- just add a new entry to this
# route's set of weighted clusters.
route["clusters"].append( { "name": cluster_name,
"weight": mapping.attrs.get("weight", None) } )
route._mark_referenced_by(_source)
return
# new one.
route = mapping.new_route(svc, cluster_name)
self.envoy_routes[mapping.group_id] = route
def service_tls_check(self, svc, context, host_rewrite):
originate_tls = False
name_fields = None
if svc.lower().startswith("http://"):
originate_tls = False
svc = svc[len("http://"):]
elif svc.lower().startswith("https://"):
originate_tls = True
name_fields = [ 'otls' ]
svc = svc[len("https://"):]
elif context == True:
originate_tls = True
name_fields = [ 'otls' ]
# Separate if here because you need to be able to specify a context
# even after you say "https://" for the service.
if context and (context != True):
if context in self.tls_contexts:
name_fields = [ 'otls', context ]
originate_tls = context
else:
self.logger.error("Originate-TLS context %s is not defined" % context)
if originate_tls and host_rewrite:
name_fields.append("hr-%s" % host_rewrite)
port = 443 if originate_tls else 80
context_name = "_".join(name_fields) if name_fields else None
svc_url = 'tcp://%s' % svc
if ':' not in svc:
svc_url = '%s:%d' % (svc_url, port)
return (svc, svc_url, originate_tls, context_name)
def add_clusters_for_mapping(self, mapping):
svc = mapping['service']
tls_context = mapping.get('tls', None)
grpc = mapping.get('grpc', False)
host_rewrite = mapping.get('host_rewrite', None)
# Given the service and the TLS context, first initialize the cluster name for the
# main service with the incoming service string...
cluster_name_fields = [ svc ]
host_redirect = mapping.get('host_redirect', False)
shadow = mapping.get('shadow', False)
if host_redirect:
if shadow:
# Not allowed.
errstr = "At most one of host_redirect and shadow may be set; ignoring host_redirect"
self.post_error(RichStatus.fromError(errstr), key=mapping['_source'])
host_redirect = False
else:
# Short-circuit. You needn't actually create a cluster for a
return svc, None
if shadow:
cluster_name_fields.insert(0, "shadow")
# trying not to confuse people on upgrades.)
(svc, url, originate_tls, otls_name) = self.service_tls_check(svc, tls_context, host_rewrite)
# Build up the common name stuff that we'll need for the service and
aux_name_fields = []
cb_name = mapping.get('circuit_breaker', None)
if cb_name:
if cb_name in self.breakers:
aux_name_fields.append("cb_%s" % cb_name)
else:
self.logger.error("CircuitBreaker %s is not defined (mapping %s)" %
(cb_name, mapping.name))
od_name = mapping.get('outlier_detection', None)
if od_name:
if od_name in self.outliers:
aux_name_fields.append("od_%s" % od_name)
else:
self.logger.error("OutlierDetection %s is not defined (mapping %s)" %
(od_name, mapping.name))
if otls_name:
cluster_name_fields.append(otls_name)
cluster_name_fields.extend(aux_name_fields)
cluster_name = 'cluster_%s' % "_".join(cluster_name_fields)
cluster_name = re.sub(r'[^0-9A-Za-z_]', '_', cluster_name)
self.logger.debug("%s: svc %s -> cluster %s" % (mapping.name, svc, cluster_name))
self.add_intermediate_cluster(mapping['_source'], cluster_name,
svc, [ url ],
cb_name=cb_name, od_name=od_name, grpc=grpc,
originate_tls=originate_tls, host_rewrite=host_rewrite)
return svc, cluster_name
def merge_tmods(self, tls_module, generated_module, key):
if generated_module is None:
return tls_module
elif tls_module is None:
return generated_module
else:
self.logger.debug("tls_module %s" % json.dumps(tls_module, indent=4))
self.logger.debug("generated_module %s" % json.dumps(generated_module, indent=4))
tls_source = tls_module['_source']
tls_config = tls_module.get(key, {})
gen_source = generated_module['_source']
gen_config = generated_module.get(key, {})
any_changes = False
for ckey in gen_config:
if ckey in tls_config:
if tls_config[ckey] != gen_config[ckey]:
errfmt = "CONFLICT in TLS config for {}.{}: using {} from TLS module in {}"
errstr = errfmt.format(key, ckey, tls_config[ckey], tls_source)
self.post_error(RichStatus.fromError(errstr))
else:
self.logger.debug("merge_tmods: {}.{} duplicated with same value".format(key, ckey))
else:
self.logger.debug("merge_tmods: copy {}.{} from gen_config".format(key, ckey))
tls_config[ckey] = gen_config[ckey]
any_changes = True
if any_changes:
# source..
tls_module._mark_referenced_by(gen_source)
# ...and copy the tls_config back in (in case the key wasn't in the tls_module
tls_module[key] = tls_config
return tls_module
def generate_intermediate_config(self):
self.ambassador_module = SourcedDict(
service_port = 80,
admin_port = 8001,
diag_port = 8877,
auth_enabled = None,
liveness_probe = { "enabled": True },
readiness_probe = { "enabled": True },
diagnostics = { "enabled": True },
tls_config = None,
use_proxy_proto = False,
x_forwarded_proto_redirect = False,
)
#
# Our set of clusters starts out empty; we use add_intermediate_cluster()
# to build it up while making sure that all the source-tracking stuff
# works out.
#
# Note that we use a map for clusters, not a list -- the reason is that
# multiple mappings can use the same service, and we don't want multiple
self.envoy_clusters = {}
self.envoy_routes = {}
self.envoy_config['grpc_services'] = []
modules = self.config.get('modules', {})
amod = modules.get('ambassador', None)
tls_module = modules.get('tls', None)
generated_module = modules.get('tls-from-ambassador-certs', None)
tls_module = self.merge_tmods(tls_module, generated_module, 'server')
tls_module = self.merge_tmods(tls_module, generated_module, 'client')
tmod = tls_module or {}
self.logger.debug("TLS module after merge: %s" % json.dumps(tmod, indent=4))
if amod or tmod:
self.module_config_ambassador("ambassador", amod, tmod)
router_config = {}
tracing_configs = self.config.get('tracing_configs', None)
self.module_config_tracing(tracing_configs)
if 'tracing' in self.envoy_config:
router_config['start_child_span'] = True
self.envoy_config['filters'] = []
auth_mod = modules.get('authentication', None)
auth_configs = self.config.get('auth_configs', None)
auth_filter = self.module_config_authentication("authentication", amod, auth_mod, auth_configs)
if auth_filter:
self.envoy_config['filters'].append(auth_filter)
ratelimit_configs = self.config.get('ratelimit_configs', None)
(ratelimit_filter, ratelimit_grpc_service) = self.module_config_ratelimit(ratelimit_configs)
if ratelimit_filter and ratelimit_grpc_service:
self.envoy_config['filters'].append(ratelimit_filter)
self.envoy_config['grpc_services'].append(ratelimit_grpc_service)
self.envoy_config['filters'].append(SourcedDict(name="cors", config={}))
self.envoy_config['filters'].append(SourcedDict(type="decoder", name="router", config=router_config))
mappings = self.config.get("mappings", {})
self.breakers = self.config.get("CircuitBreaker", {})
for key, breaker in self.breakers.items():
breaker['_referenced_by'] = []
self.outliers = self.config.get("OutlierDetection", {})
for key, outlier in self.outliers.items():
outlier['_referenced_by'] = []
for module_name in modules.keys():
if ((module_name == 'ambassador') or
(module_name == 'tls') or
(module_name == 'authentication') or
(module_name == 'tls-from-ambassador-certs')):
continue
handler_name = "module_config_%s" % module_name
handler = getattr(self, handler_name, None)
if not handler:
self.logger.error("module %s: no configuration generator, skipping" % module_name)
continue
handler(module_name, modules[module_name])
# Once modules are handled, we can set up our admin config...
self.envoy_config['admin'] = SourcedDict(
_from=self.ambassador_module,
admin_port=self.ambassador_module["admin_port"]
)
# ...and our listeners.
primary_listener = SourcedDict(
_from=self.ambassador_module,
service_port=self.ambassador_module["service_port"],
require_tls=False,
use_proxy_proto=self.ambassador_module['use_proxy_proto']
)
if 'use_remote_address' in self.ambassador_module:
primary_listener['use_remote_address'] = self.ambassador_module['use_remote_address']
# If x_forwarded_proto_redirect is set, then we enable require_tls in primary listener, which in turn sets
# require_ssl to true in envoy config. Once set, then all requests that contain X-FORWARDED-PROTO set to
# https, are processes normally by envoy. In all the other cases, including X-FORWARDED-PROTO set to http,
# a 301 redirect response to https://host is sent
if self.ambassador_module.get('x_forwarded_proto_redirect', False):
primary_listener['require_tls'] = True
self.logger.debug("x_forwarded_proto_redirect is set to true, enabling 'require_tls' in listener")
redirect_cleartext_from = None
tmod = self.ambassador_module.get('tls_config', None)
# ...TLS config, if necessary...
if tmod:
# self.logger.debug("USING TLS")
primary_listener['tls'] = tmod
if self.tmod_certs_exist(primary_listener['tls']) > 0:
primary_listener['tls']['ssl_context'] = True
redirect_cleartext_from = tmod.get('redirect_cleartext_from')
self.envoy_config['listeners'] = [ primary_listener ]
if redirect_cleartext_from:
# We only want to set `require_tls` on the primary listener when certs are present on the pod
if self.tmod_certs_exist(primary_listener['tls']) > 0:
primary_listener['require_tls'] = True
new_listener = SourcedDict(
_from=self.ambassador_module,
service_port=redirect_cleartext_from,
require_tls=True,
# Note: no TLS context here, this is a cleartext listener.
# We can set require_tls True because we can let the upstream
# tell us about that.
use_proxy_proto=self.ambassador_module['use_proxy_proto']
)
if 'use_remote_address' in self.ambassador_module:
new_listener['use_remote_address'] = self.ambassador_module['use_remote_address']
self.envoy_config['listeners'].append(new_listener)
self.default_liveness_probe['service'] = self.diag_service()
self.default_readiness_probe['service'] = self.diag_service()
self.default_diagnostics['service'] = self.diag_service()
for name, cur, dflt in [
("liveness", self.ambassador_module['liveness_probe'],
self.default_liveness_probe),
("readiness", self.ambassador_module['readiness_probe'],
self.default_readiness_probe),
("diagnostics", self.ambassador_module['diagnostics'],
self.default_diagnostics)
]:
if cur and cur.get("enabled", False):
prefix = cur.get("prefix", dflt['prefix'])
rewrite = cur.get("rewrite", dflt['rewrite'])
service = cur.get("service", dflt['service'])
# Push a fake mapping to handle this.
name = "internal_%s_probe_mapping" % name
mappings[name] = Mapping(
_from=self.ambassador_module,
kind='Mapping',
name=name,
prefix=prefix,
rewrite=rewrite,
service=service
)
# self.logger.debug("PROBE %s: %s -> %s%s" % (name, prefix, service, rewrite))
# OK! We have all the mappings we need. Process them (don't worry about sorting
for mapping_name in sorted(mappings.keys()):
mapping = mappings[mapping_name]
# OK. Set up clusters for this service...
svc, cluster_name = self.add_clusters_for_mapping(mapping)
# ...and route.
self.add_intermediate_route(mapping['_source'], mapping, svc, cluster_name)
# OK. Walk the set of clusters and normalize names...
collisions = {}
mangled = {}
for name in sorted(self.envoy_clusters.keys()):
if len(name) > 60:
# Too long.
short_name = name[0:40]
collision_list = collisions.setdefault(short_name, [])
collision_list.append(name)
for short_name in sorted(collisions.keys()):
name_list = collisions[short_name]
i = 0
for name in sorted(name_list):
mangled_name = "%s-%d" % (short_name, i)
i += 1
self.logger.info("%s => %s" % (name, mangled_name))
mangled[name] = mangled_name
self.envoy_clusters[name]['name'] = mangled_name
# We need to default any unspecified weights and renormalize to 100
for group_id, route in self.envoy_routes.items():
clusters = route["clusters"]
total = 0.0
unspecified = 0
# If this is a websocket route, it will support only one cluster right now.
if route.get('use_websocket', False):
if len(clusters) > 1:
errmsg = "Only one cluster is supported for websockets; using %s" % clusters[0]['name']
self.post_error(RichStatus.fromError(errmsg))
for c in clusters:
# Mangle the name, if need be.
c_name = c["name"]
if c_name in mangled:
c["name"] = mangled[c_name]
# self.logger.info("%s: mangling cluster %s to %s" % (group_id, c_name, c["name"]))
if c["weight"] is None:
unspecified += 1
else:
total += c["weight"]
if unspecified:
for c in clusters:
if c["weight"] is None:
c["weight"] = (100.0 - total)/unspecified
elif total != 100.0:
for c in clusters:
c["weight"] *= 100.0/total
# OK. When all is said and done, sort the list of routes by route weight...
self.envoy_config['routes'] = sorted([
route for group_id, route in self.envoy_routes.items()
], reverse=True, key=Mapping.route_weight)
# ...then map clusters back into a list...
self.envoy_config['clusters'] = [
self.envoy_clusters[cluster_key] for cluster_key in sorted(self.envoy_clusters.keys())
]
# ...and finally repeat for breakers and outliers, but copy them in the process so we
# can mess with the originals.
#
# What's going on here is that circuit-breaker and outlier-detection configs aren't
# included as independent objects in envoy.json, but we want to be able to discuss
# them in diag. We also don't need to keep the _source and _referenced_by elements
self.envoy_config['breakers'] = self.clean_and_copy(self.breakers)
self.envoy_config['outliers'] = self.clean_and_copy(self.outliers)
@staticmethod
def tmod_certs_exist(tmod):
cert_count = 0
if tmod.get('cert_chain_file') is not None:
cert_count += 1
if tmod.get('private_key_file') is not None:
cert_count += 1
if tmod.get('cacert_chain_file') is not None:
cert_count += 1
return cert_count
def _get_intermediate_for(self, element_list, source_keys, value):
if not isinstance(value, dict):
return
good = True
if '_source' in value:
good = False
value_source = value.get("_source", None)
value_referenced_by = value.get("_referenced_by", [])
if ((value_source in source_keys) or
(source_keys & set(value_referenced_by))):
good = True
if good:
element_list.append(value)
def get_intermediate_for(self, source_key):
source_keys = []
if source_key.startswith("grp-"):
group_id = source_key[4:]
for route in self.envoy_config['routes']:
if route['_group_id'] == group_id:
source_keys.append(route['_source'])
for reference_key in route['_referenced_by']:
source_keys.append(reference_key)
if not source_keys:
return {
"error": "No group matches %s" % group_id
}
else:
if source_key in self.source_map:
source_keys = self.source_map[source_key]
elif source_key in self.sources:
source_keys.append(source_key)
else:
return {
"error": "No source matches %s" % source_key
}
source_keys = set(source_keys)
sources = []
for key in source_keys:
source_dict = dict(self.sources[key])
source_dict['errors'] = [
{
'summary': error['error'].split('\n', 1)[0],
'text': error['error']
}
for error in self.errors.get(key, [])
]
source_dict['source_key'] = key
sources.append(source_dict)
result = {
"sources": sources
}
for key in self.envoy_config.keys():
result[key] = []
value = self.envoy_config[key]
if isinstance(value, list):
for v2 in value:
self._get_intermediate_for(result[key], source_keys, v2)
else:
self._get_intermediate_for(result[key], source_keys, value)
return result
def generate_envoy_config(self, template=None, template_dir=None, **kwargs):
envoy_json = self.to_json(template=template, template_dir=template_dir)
# rc = RichStatus.fromError("impossible")
# # ...and use the JSON parser as a final sanity check.
# try:
# obj = json.loads(envoy_json)
# rc = RichStatus.OK(msg="Envoy configuration OK", envoy_config=obj)
# except json.decoder.JSONDecodeError as e:
# rc = RichStatus.fromError("Invalid Envoy configuration: %s" % str(e),
# raw=envoy_json, exception=e)
# Go ahead and report that we generated an Envoy config, if we can.
scout_result = Config.scout_report(action="config", result=True, generated=True, **kwargs)
rc = RichStatus.OK(envoy_config=envoy_json, scout_result=scout_result)
# self.logger.debug("Scout reports %s" % json.dumps(rc.scout_result))
return rc
def set_config_ambassador(self, module, key, value, merge=False):
if not merge:
self.ambassador_module[key] = value
else:
self.ambassador_module[key].update(value)
# XXX This is actually wrong sometimes. If, for example, you have an
# ambassador module that defines the admin_port, sure, bringing in its
# source makes sense. On the other hand, if you have a TLS module
# created by a secret, that source shouldn't really take over the
# leave it for now, though.
self.ambassador_module['_source'] = module['_source']
def update_config_ambassador(self, module, key, value):
self.set_config_ambassador(module, key, value, merge=True)
def tls_config_helper(self, name, amod, tmod):
tmp_config = SourcedDict(_from=amod)
some_enabled = False
for context_name in tmod.keys():
if context_name.startswith('_'):
continue
context = tmod[context_name]
# self.logger.debug("context %s -- %s" % (context_name, json.dumps(context)))
if context.get('enabled', True):
if context_name == 'server':
# Server-side TLS is enabled.
self.logger.debug("TLS termination enabled!")
some_enabled = True
# Switch to port 443 by default...
self.set_config_ambassador(amod, 'service_port', 443)
# ...and merge in the server-side defaults.
tmp_config.update(self.default_tls_config['server'])
tmp_config.update(tmod['server'])
# Check if secrets are supplied for TLS termination and/or TLS auth
secret = context.get('secret')
if secret is not None:
self.logger.debug("config.server.secret is {}".format(secret))
# If /{etc,ambassador}/certs/tls.crt does not exist, then load the secrets
if check_cert_file(TLSPaths.mount_tls_crt.value):
self.logger.debug("Secret already exists, taking no action for secret {}".format(secret))
elif check_cert_file(TLSPaths.tls_crt.value):
tmp_config['cert_chain_file'] = TLSPaths.tls_crt.value
tmp_config['private_key_file'] = TLSPaths.tls_key.value
else:
(server_cert, server_key, server_data) = read_cert_secret(kube_v1(), secret, self.namespace)
if server_cert and server_key:
self.logger.debug("saving contents of secret {} to {}".format(
secret, TLSPaths.cert_dir.value))
save_cert(server_cert, server_key, TLSPaths.cert_dir.value)
tmp_config['cert_chain_file'] = TLSPaths.tls_crt.value
tmp_config['private_key_file'] = TLSPaths.tls_key.value
elif context_name == 'client':
# Client-side TLS is enabled.
self.logger.debug("TLS client certs enabled!")
some_enabled = True
# Merge in the client-side defaults.
tmp_config.update(self.default_tls_config['client'])
tmp_config.update(tmod['client'])
secret = context.get('secret')
if secret is not None:
self.logger.debug("config.client.secret is {}".format(secret))
if check_cert_file(TLSPaths.client_mount_crt.value):
self.logger.debug("Secret already exists, taking no action for secret {}".format(secret))
elif check_cert_file(TLSPaths.client_tls_crt.value):
tmp_config['cacert_chain_file'] = TLSPaths.client_tls_crt.value
else:
(client_cert, _, _) = read_cert_secret(kube_v1(), secret, self.namespace)
if client_cert:
self.logger.debug("saving contents of secret {} to {}".format(
secret, TLSPaths.client_cert_dir.value))
save_cert(client_cert, None, TLSPaths.client_cert_dir.value)
tmp_config['cacert_chain_file'] = TLSPaths.client_tls_crt.value
else:
# This is a wholly new thing.
self.tls_contexts[context_name] = SourcedDict(
_from=tmod,
**context
)
if some_enabled:
if 'enabled' in tmp_config:
del(tmp_config['enabled'])
# Save the TLS config...
self.set_config_ambassador(amod, 'tls_config', tmp_config)
self.logger.debug("TLS config: %s" % json.dumps(self.ambassador_module['tls_config'], indent=4))
self.logger.debug("TLS contexts: %s" % json.dumps(self.tls_contexts, indent=4))
return some_enabled
def module_config_ambassador(self, name, amod, tmod):
# Toplevel Ambassador configuration. First up, check out TLS.
have_amod_tls = False
if amod and ('tls' in amod):
have_amod_tls = self.tls_config_helper(name, amod, amod['tls'])
if not have_amod_tls and tmod:
self.tls_config_helper(name, tmod, tmod)
if amod and ('cors' in amod):
self.parse_and_save_default_cors(amod)
# After that, check for port definitions, probes, etc., and copy them in
# as we find them.
for key in [ 'service_port', 'admin_port', 'diag_port',
'liveness_probe', 'readiness_probe', 'auth_enabled',
'use_proxy_proto', 'use_remote_address', 'diagnostics', 'x_forwarded_proto_redirect' ]:
if amod and (key in amod):
# Yes. It overrides the default.
self.set_config_ambassador(amod, key, amod[key])
def parse_and_save_default_cors(self, amod):
cors_default_temp = {'enabled': True}
cors = amod['cors']
origins = cors.get('origins')
if origins is not None:
if type(origins) is list:
cors_default_temp['allow_origin'] = origins
elif type(origins) is str:
cors_default_temp['allow_origin'] = origins.split(',')
else:
print("invalid cors configuration supplied - {}".format(origins))
return
self.save_cors_default_element("max_age", "max_age", cors_default_temp, cors)
self.save_cors_default_element("credentials", "allow_credentials", cors_default_temp, cors)
self.save_cors_default_element("methods", "allow_methods", cors_default_temp, cors)
self.save_cors_default_element("headers", "allow_headers", cors_default_temp, cors)
self.save_cors_default_element("exposed_headers", "expose_headers", cors_default_temp, cors)
self.envoy_config['cors_default'] = cors_default_temp
def save_cors_default_element(self, cors_key, route_key, cors_dest, cors_source):
if cors_source.get(cors_key) is not None:
if type(cors_source.get(cors_key)) is list:
cors_dest[route_key] = ", ".join(cors_source.get(cors_key))
else:
cors_dest[route_key] = cors_source.get(cors_key)
def module_config_ratelimit(self, ratelimit_config):
cluster_hosts = None
sources = []
if ratelimit_config:
for config in ratelimit_config.values():
sources.append(config['_source'])
cluster_hosts = config.get("service", None)
if not cluster_hosts or not sources:
return (None, None)
host_rewrite = config.get("host_rewrite", None)
cluster_name = "cluster_ext_ratelimit"
filter_config = {
"domain": "ambassador",
"request_type": "both",
"timeout_ms": 20
}
grpc_service = SourcedDict(
name="rate_limit_service",
cluster_name=cluster_name
)
first_source = sources.pop(0)
filter = SourcedDict(
_source=first_source,
type="decoder",
name="rate_limit",
config=filter_config
)
if cluster_name not in self.envoy_clusters:
(svc, url, originate_tls, otls_name) = self.service_tls_check(cluster_hosts, None, host_rewrite)
self.add_intermediate_cluster(first_source, cluster_name,
'extratelimit', [url],
type="strict_dns", lb_type="round_robin",
grpc=True, host_rewrite=host_rewrite)
for source in sources:
filter._mark_referenced_by(source)
self.envoy_clusters[cluster_name]._mark_referenced_by(source)
return (filter, grpc_service)
def module_config_tracing(self, tracing_config):
cluster_hosts = None
driver = None
driver_config = None
tag_headers = None
host_rewrite = None
sources = []
if tracing_config:
for config in tracing_config.values():
sources.append(config['_source'])
cluster_hosts = config.get("service", None)
driver = config.get("driver", None)
driver_config = config.get("config", {})
tag_headers = config.get("tag_headers", [])
host_rewrite = config.get("host_rewrite", None)
if not cluster_hosts or not sources:
return
cluster_name = "cluster_ext_tracing"
first_source = sources.pop(0)
if cluster_name not in self.envoy_clusters:
(svc, url, originate_tls, otls_name) = self.service_tls_check(cluster_hosts, None, host_rewrite)
grpc = False
ssl_context = None
if driver == "lightstep":
grpc = True
parsed_url = urlparse(url)
ssl_context = {
"ca_cert_file": "/etc/ssl/certs/ca-certificates.crt",
"verify_subject_alt_name": [parsed_url.hostname]
}
self.add_intermediate_cluster(first_source, cluster_name,
'exttracing', [url],
type="strict_dns", lb_type="round_robin",
host_rewrite=host_rewrite, grpc=grpc, ssl_context=ssl_context)
driver_config['collector_cluster'] = cluster_name
tracing = SourcedDict(
_source=first_source,
driver=driver,
config=driver_config,
tag_headers=tag_headers,
cluster_name=cluster_name
)
self.envoy_config['tracing'] = tracing
def auth_helper(self, sources, config, cluster_hosts, module):
sources.append(module['_source'])
for key in [ 'path_prefix', 'timeout_ms', 'cluster' ]:
value = module.get(key, None)
if value != None:
previous = config.get(key, None)
if previous and (previous != value):
errstr = (
"AuthService cannot support multiple %s values; using %s" %
(key, previous)
)
self.post_error(RichStatus.fromError(errstr), key=module['_source'])
else:
config[key] = value
headers = module.get('allowed_headers', None)
if headers:
allowed_headers = config.get('allowed_headers', [])
for hdr in headers:
if hdr not in allowed_headers:
allowed_headers.append(hdr)
config['allowed_headers'] = allowed_headers
auth_service = module.get("auth_service", None)
# weight = module.get("weight", 100)
weight = 100 # Can't support arbitrary weights right now.
if auth_service:
cluster_hosts[auth_service] = ( weight, module.get('tls', None) )
def module_config_authentication(self, name, amod, auth_mod, auth_configs):
filter_config = {
"cluster": "cluster_ext_auth",
"timeout_ms": 5000
}
cluster_hosts = {}
sources = []
if auth_mod:
self.auth_helper(sources, filter_config, cluster_hosts, auth_mod)
if auth_configs:
for config in auth_configs.values():
self.auth_helper(sources, filter_config, cluster_hosts, config)
if not sources:
return None
first_source = sources.pop(0)
filter = SourcedDict(
_source=first_source,
_services=sorted(cluster_hosts.keys()),
type="decoder",
name="extauth",
config=filter_config
)
cluster_name = filter_config['cluster']
host_rewrite = filter_config.get('host_rewrite', None)
if cluster_name not in self.envoy_clusters:
if not cluster_hosts:
cluster_hosts = { '127.0.0.1:5000': ( 100, None ) }
urls = []
protocols = {}
for svc in sorted(cluster_hosts.keys()):
weight, tls_context = cluster_hosts[svc]
(svc, url, originate_tls, otls_name) = self.service_tls_check(svc, tls_context, host_rewrite)
if originate_tls:
protocols['https'] = True
else:
protocols['http'] = True
if otls_name:
filter_config['cluster'] = cluster_name + "_" + otls_name
cluster_name = filter_config['cluster']
urls.append(url)
if len(protocols.keys()) != 1:
raise Exception("auth config cannot try to use both HTTP and HTTPS")
self.add_intermediate_cluster(first_source, cluster_name,
'extauth', urls,
type="strict_dns", lb_type="round_robin",
originate_tls=originate_tls, host_rewrite=host_rewrite)
for source in sources:
filter._mark_referenced_by(source)
self.envoy_clusters[cluster_name]._mark_referenced_by(source)
return filter
w(self):
source_files = {}
for filename, source_keys in self.source_map.items():
source_dict = source_files.setdefault(
filename,
{
'filename': filename,
'objects': {},
'count': 0,
'plural': "objects",
'error_count': 0,
'error_plural': "errors"
}
)
for source_key in source_keys:
source = self.sources[source_key]
if ('source' in source) and not ('source' in source_dict):
source_dict['source'] = source['source']
raw_errors = self.errors.get(source_key, [])
errors = []
for error in raw_errors:
source_dict['error_count'] += 1
errors.append({
'summary': error['error'].split('\n', 1)[0],
'text': error['error']
})
source_dict['error_plural'] = "error" if (source_dict['error_count'] == 1) else "errors"
source_dict['count'] += 1
source_dict['plural'] = "object" if (source_dict['count'] == 1) else "objects"
object_dict = source_dict['objects']
object_dict[source_key] = {
'key': source_key,
'kind': source['kind'],
'errors': errors
}
routes = []
for route in self.envoy_config['routes']:
if route['_source'] != "--diagnostics--":
route['_group_id'] = Mapping.group_id(route.get('method', 'GET'),
route['prefix'] if 'prefix' in route else route['regex'],
route.get('headers', []))
routes.append(route)
configuration = { key: self.envoy_config[key] for key in self.envoy_config.keys()
if key != "routes" }
cluster_to_service_mapping = {
"cluster_ext_auth": "AuthService",
"cluster_ext_tracing": "TracingService",
"cluster_ext_ratelimit": "RateLimitService"
}
ambassador_services = []
for cluster in configuration.get('clusters', []):
maps_to_service = cluster_to_service_mapping.get(cluster['name'])
if maps_to_service:
service_weigth = 100.0 / len(cluster['urls'])
for url in cluster['urls']:
ambassador_services.append(SourcedDict(
_from=cluster,
type=maps_to_service,
name=url,
cluster=cluster['name'],
_service_weight=service_weigth
))
overview = dict(sources=sorted(source_files.values(), key=lambda x: x['filename']),
routes=routes,
**configuration)
if len(ambassador_services) > 0:
overview['ambassador_services'] = ambassador_services
return overview
def pretty(self, obj, out=sys.stdout):
out.write(obj)
def to_json(self, template=None, template_dir=None):
template_paths = [ self.config_dir_path, self.template_dir_path ]
if template_dir:
template_paths.insert(0, template_dir)
if not template:
env = Environment(loader=FileSystemLoader(template_paths))
template = env.get_template("envoy.j2")
return(template.render(**self.envoy_config))
def dump(self):
print("==== config")
self.pretty(self.config)
print("==== envoy_config")
self.pretty(self.envoy_config)
if __name__ == '__main__':
aconf = Config(sys.argv[1])
print(json.dumps(aconf.diagnostic_overview(), indent=4, sort_keys=True))
| true | true |
f7fa9ec1636705253fec23dbf2339ee3acbc644b | 7,168 | py | Python | text_sentiment/tests/test_app.py | ChrisCintron/text-sentiment | 47510824ebf045d183825b7dc8feba0876982752 | [
"Unlicense"
] | null | null | null | text_sentiment/tests/test_app.py | ChrisCintron/text-sentiment | 47510824ebf045d183825b7dc8feba0876982752 | [
"Unlicense"
] | null | null | null | text_sentiment/tests/test_app.py | ChrisCintron/text-sentiment | 47510824ebf045d183825b7dc8feba0876982752 | [
"Unlicense"
] | null | null | null | import pytest
import types
from context import app #context file adjust our working directory in order to access app
from app import *
from fixtures.constants import *
class Test_Filters:
@pytest.fixture(autouse=True)
def filterobj(self):
self.line = 'Cont1ain filt2ers us1ed to filter th103e con*tents from textfile'
yield Filters()
def test_init(self,filterobj):
"""Split has to be the last function to be called"""
assert filterobj.order.pop() == 'split'
def test_remove_badchars(self,filterobj):
line = 'Cont1ain filt2ers us1ed to filter th103e con*tents from textfile'
expected_output = 'Contain filters used to filter the contents from textfile'
output = filterobj.remove_badchars(line)
assert output == expected_output
def test_make_lowercase(self,filterobj):
line = 'ContAiN fIlterS uSeD to fiLTer thE contenTS fRom texTFILE'
expected_output = 'contain filters used to filter the contents from textfile'
output = filterobj.make_lowercase(line)
assert output == expected_output
def test_split(self,filterobj):
line = 'Contain filters used to filter the contents'
expected_output = ['Contain','filters','used','to','filter','the','contents']
output = filterobj.split(line)
assert output == expected_output
def test_filter(self,filterobj):
line = 'Cont1*7ain f@#9ILTErs us9]ed TO fIltER the+ ContenTS'
expected_output = ['contain','filters','used','to','filter','the','contents']
output = filterobj.filter(line)
assert output == expected_output
@pytest.fixture()
def database():
yield Database(db_path=DB_PATH)
class Test_Database:
def test_init(self,database):
expected_output = ['Warriner-English','labMTwords-English']
output = [table for table in database.metadata.tables]
assert set(expected_output).issubset(output)
def test_query(self,database):
table = 'Warriner-English'
assert database.query(table=table, word='love') == (table, 'love', 8)
assert database.query(table=table, word='rest') == (table, 'rest', 7.86)
assert database.query(table=table, word='grave') == (table, 'grave', 2.4)
assert database.query(table=table, word='132s') == (table, '132s', 0)
assert database.query(table=table, word='x2') == (table, 'x2', 0)
assert database.query(table=table, word='yelack') == (table, 'yelack', 0)
table = 'labMTwords-English'
assert database.query(table=table, word='love') == (table, 'love', 8.42)
assert database.query(table=table, word='rest') == (table, 'rest', 7.18)
assert database.query(table=table, word='grave') == (table, 'grave', 2.56)
assert database.query(table=table, word='132s') == (table, '132s', 0)
assert database.query(table=table, word='x2') == (table, 'x2', 0)
assert database.query(table=table, word='yelack') == (table, 'yelack', 0)
@pytest.fixture()
def ts():
yield TextSentiment(file_path=TEST_DOC,db_path=DB_PATH)
class Test_TextSentiment:
def test_openfile(self,ts):
file_path = TEST_DOC
output_1_correct = 'My second line.'
output_2_correct = 'Third line is the charm.'
output_3_correct = 'Fourth one fits the bill.'
output = ts._openfile(file_path)
assert next(output) == output_1_correct
assert next(output) == output_2_correct
assert next(output) == output_3_correct
def test_filter(self,ts):
content = ['My Second Line']
expected_output = ['my','second','line']
output = ts._filter(content)
assert next(output) == expected_output
def test_query(self,ts):
"""Test single word query on both tables"""
tables = ['Warriner-English','labMTwords-English']
word = 'love'
expected_output = (tables[0], 'love', 8)
output = ts._query(table=tables[0], word=word)
assert output == expected_output
expected_output = (tables[1], 'love', 8.42)
output = ts._query(table=tables[1], word=word)
assert output == expected_output
def populate_datalabels(self,ts):
ts.populate_datalabels
def test_wordcount(self,ts):
"""Tests specific wordcount and actually returned data structure"""
word = 'is'
value = 1
content = [['my','second','line',
'third','line','is','the','charm',
'fourth','one','fits','the','bill',]]
expected_output = value
output = ts._wordcount(content)[word]
assert output == expected_output
content = [['one','two','two','three','three','three']]
expected_output = Counter({'one':1,'two':2,'three':3})
output = ts._wordcount(content)
assert output == expected_output
def test_process(self,ts):
tables = ts.tables
wordset = Counter(['one','two','two','three','three','three'])
#output = ts.process(wordset=wordset, tables=tables,formatter='json')
output = ts.process(wordset=wordset, tables=tables)
#output = json.loads(output)
assert output['table_metrics']['Warriner-English']['sentimentvalue'] == 5.83
assert round(output['table_metrics']['Warriner-English']['total_db_value'],2) == 34.98
assert output['table_metrics']['Warriner-English']['total_frequency'] == 6
assert output['table_metrics']['labMTwords-English']['sentimentvalue'] == 5.56
assert output['table_metrics']['labMTwords-English']['total_db_value'] == 33.36
assert output['table_metrics']['labMTwords-English']['total_frequency'] == 6
assert output['words']['one']['frequency'] == 1
assert output['words']['one']['table_value']['Warriner-English'] == 6.09
assert output['words']['one']['table_value']['labMTwords-English'] == 5.4
assert output['words']['two']['frequency'] == 2
assert output['words']['two']['table_value']['Warriner-English'] == 6.3
assert output['words']['two']['table_value']['labMTwords-English'] == 5.4
assert output['words']['three']['frequency'] == 3
assert output['words']['three']['table_value']['Warriner-English'] == 5.43
assert output['words']['three']['table_value']['labMTwords-English'] == 5.72
def test_updatetotalvalues(self,ts):
table = 'Warriner-English'
frequency,table_value = 1, 6.04
output = ts._updatetotalvalues(frequency,table,table_value)
expected_output = 6.04
assert output == expected_output
frequency,table_value = 2, 3
output = ts._updatetotalvalues(frequency,table,table_value)
expected_output = 4.01
assert output == expected_output
def test_main(self,ts):
"""Test overall functionality"""
string = "What can you tell me about the brother that went missing last night? I always wondered what hapens to folks that can't swim underwater! #Bananaface"
ts = TextSentiment(file_path=TEST_DOC)
data = ts.process(formatter='json')
print(data)
| 44.521739 | 166 | 0.641044 | import pytest
import types
from context import app
from app import *
from fixtures.constants import *
class Test_Filters:
@pytest.fixture(autouse=True)
def filterobj(self):
self.line = 'Cont1ain filt2ers us1ed to filter th103e con*tents from textfile'
yield Filters()
def test_init(self,filterobj):
assert filterobj.order.pop() == 'split'
def test_remove_badchars(self,filterobj):
line = 'Cont1ain filt2ers us1ed to filter th103e con*tents from textfile'
expected_output = 'Contain filters used to filter the contents from textfile'
output = filterobj.remove_badchars(line)
assert output == expected_output
def test_make_lowercase(self,filterobj):
line = 'ContAiN fIlterS uSeD to fiLTer thE contenTS fRom texTFILE'
expected_output = 'contain filters used to filter the contents from textfile'
output = filterobj.make_lowercase(line)
assert output == expected_output
def test_split(self,filterobj):
line = 'Contain filters used to filter the contents'
expected_output = ['Contain','filters','used','to','filter','the','contents']
output = filterobj.split(line)
assert output == expected_output
def test_filter(self,filterobj):
line = 'Cont1*7ain f@#9ILTErs us9]ed TO fIltER the+ ContenTS'
expected_output = ['contain','filters','used','to','filter','the','contents']
output = filterobj.filter(line)
assert output == expected_output
@pytest.fixture()
def database():
yield Database(db_path=DB_PATH)
class Test_Database:
def test_init(self,database):
expected_output = ['Warriner-English','labMTwords-English']
output = [table for table in database.metadata.tables]
assert set(expected_output).issubset(output)
def test_query(self,database):
table = 'Warriner-English'
assert database.query(table=table, word='love') == (table, 'love', 8)
assert database.query(table=table, word='rest') == (table, 'rest', 7.86)
assert database.query(table=table, word='grave') == (table, 'grave', 2.4)
assert database.query(table=table, word='132s') == (table, '132s', 0)
assert database.query(table=table, word='x2') == (table, 'x2', 0)
assert database.query(table=table, word='yelack') == (table, 'yelack', 0)
table = 'labMTwords-English'
assert database.query(table=table, word='love') == (table, 'love', 8.42)
assert database.query(table=table, word='rest') == (table, 'rest', 7.18)
assert database.query(table=table, word='grave') == (table, 'grave', 2.56)
assert database.query(table=table, word='132s') == (table, '132s', 0)
assert database.query(table=table, word='x2') == (table, 'x2', 0)
assert database.query(table=table, word='yelack') == (table, 'yelack', 0)
@pytest.fixture()
def ts():
yield TextSentiment(file_path=TEST_DOC,db_path=DB_PATH)
class Test_TextSentiment:
def test_openfile(self,ts):
file_path = TEST_DOC
output_1_correct = 'My second line.'
output_2_correct = 'Third line is the charm.'
output_3_correct = 'Fourth one fits the bill.'
output = ts._openfile(file_path)
assert next(output) == output_1_correct
assert next(output) == output_2_correct
assert next(output) == output_3_correct
def test_filter(self,ts):
content = ['My Second Line']
expected_output = ['my','second','line']
output = ts._filter(content)
assert next(output) == expected_output
def test_query(self,ts):
tables = ['Warriner-English','labMTwords-English']
word = 'love'
expected_output = (tables[0], 'love', 8)
output = ts._query(table=tables[0], word=word)
assert output == expected_output
expected_output = (tables[1], 'love', 8.42)
output = ts._query(table=tables[1], word=word)
assert output == expected_output
def populate_datalabels(self,ts):
ts.populate_datalabels
def test_wordcount(self,ts):
word = 'is'
value = 1
content = [['my','second','line',
'third','line','is','the','charm',
'fourth','one','fits','the','bill',]]
expected_output = value
output = ts._wordcount(content)[word]
assert output == expected_output
content = [['one','two','two','three','three','three']]
expected_output = Counter({'one':1,'two':2,'three':3})
output = ts._wordcount(content)
assert output == expected_output
def test_process(self,ts):
tables = ts.tables
wordset = Counter(['one','two','two','three','three','three'])
output = ts.process(wordset=wordset, tables=tables)
assert output['table_metrics']['Warriner-English']['sentimentvalue'] == 5.83
assert round(output['table_metrics']['Warriner-English']['total_db_value'],2) == 34.98
assert output['table_metrics']['Warriner-English']['total_frequency'] == 6
assert output['table_metrics']['labMTwords-English']['sentimentvalue'] == 5.56
assert output['table_metrics']['labMTwords-English']['total_db_value'] == 33.36
assert output['table_metrics']['labMTwords-English']['total_frequency'] == 6
assert output['words']['one']['frequency'] == 1
assert output['words']['one']['table_value']['Warriner-English'] == 6.09
assert output['words']['one']['table_value']['labMTwords-English'] == 5.4
assert output['words']['two']['frequency'] == 2
assert output['words']['two']['table_value']['Warriner-English'] == 6.3
assert output['words']['two']['table_value']['labMTwords-English'] == 5.4
assert output['words']['three']['frequency'] == 3
assert output['words']['three']['table_value']['Warriner-English'] == 5.43
assert output['words']['three']['table_value']['labMTwords-English'] == 5.72
def test_updatetotalvalues(self,ts):
table = 'Warriner-English'
frequency,table_value = 1, 6.04
output = ts._updatetotalvalues(frequency,table,table_value)
expected_output = 6.04
assert output == expected_output
frequency,table_value = 2, 3
output = ts._updatetotalvalues(frequency,table,table_value)
expected_output = 4.01
assert output == expected_output
def test_main(self,ts):
string = "What can you tell me about the brother that went missing last night? I always wondered what hapens to folks that can't swim underwater! #Bananaface"
ts = TextSentiment(file_path=TEST_DOC)
data = ts.process(formatter='json')
print(data)
| true | true |
f7fa9ef0a1bbff2b2dd8065c638bfe3a89080438 | 79,555 | py | Python | sympy/solvers/tests/test_solvers.py | nikkigabbard/sympy | 1819379bdcca733eabc635ca4b3c9ae3deff1205 | [
"BSD-3-Clause"
] | 2 | 2019-10-18T12:45:34.000Z | 2020-08-10T08:27:59.000Z | sympy/solvers/tests/test_solvers.py | nikkigabbard/sympy | 1819379bdcca733eabc635ca4b3c9ae3deff1205 | [
"BSD-3-Clause"
] | null | null | null | sympy/solvers/tests/test_solvers.py | nikkigabbard/sympy | 1819379bdcca733eabc635ca4b3c9ae3deff1205 | [
"BSD-3-Clause"
] | 1 | 2019-10-18T12:39:41.000Z | 2019-10-18T12:39:41.000Z | from sympy import (
Abs, And, Derivative, Dummy, Eq, Float, Function, Gt, I, Integral,
LambertW, Lt, Matrix, Or, Poly, Q, Rational, S, Symbol, Ne,
Wild, acos, asin, atan, atanh, cos, cosh, diff, erf, erfinv, erfc,
erfcinv, exp, im, log, pi, re, sec, sin,
sinh, solve, solve_linear, sqrt, sstr, symbols, sympify, tan, tanh,
root, simplify, atan2, arg, Mul, SparseMatrix, ask, Tuple, nsolve, oo,
E, cbrt, denom, Add)
from sympy.core.compatibility import range
from sympy.core.function import nfloat
from sympy.solvers import solve_linear_system, solve_linear_system_LU, \
solve_undetermined_coeffs
from sympy.solvers.solvers import _invert, unrad, checksol, posify, _ispow, \
det_quick, det_perm, det_minor, _simple_dens, check_assumptions, denoms, \
failing_assumptions
from sympy.physics.units import cm
from sympy.polys.rootoftools import CRootOf
from sympy.utilities.pytest import slow, XFAIL, SKIP, raises, skip, ON_TRAVIS
from sympy.utilities.randtest import verify_numerically as tn
from sympy.abc import a, b, c, d, k, h, p, x, y, z, t, q, m
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_swap_back():
f, g = map(Function, 'fg')
fx, gx = f(x), g(x)
assert solve([fx + y - 2, fx - gx - 5], fx, y, gx) == \
{fx: gx + 5, y: -gx - 3}
assert solve(fx + gx*x - 2, [fx, gx], dict=True)[0] == {fx: 2, gx: 0}
assert solve(fx + gx**2*x - y, [fx, gx], dict=True) == [{fx: y - gx**2*x}]
assert solve([f(1) - 2, x + 2], dict=True) == [{x: -2, f(1): 2}]
def guess_solve_strategy(eq, symbol):
try:
solve(eq, symbol)
return True
except (TypeError, NotImplementedError):
return False
def test_guess_poly():
# polynomial equations
assert guess_solve_strategy( S(4), x ) # == GS_POLY
assert guess_solve_strategy( x, x ) # == GS_POLY
assert guess_solve_strategy( x + a, x ) # == GS_POLY
assert guess_solve_strategy( 2*x, x ) # == GS_POLY
assert guess_solve_strategy( x + sqrt(2), x) # == GS_POLY
assert guess_solve_strategy( x + 2**Rational(1, 4), x) # == GS_POLY
assert guess_solve_strategy( x**2 + 1, x ) # == GS_POLY
assert guess_solve_strategy( x**2 - 1, x ) # == GS_POLY
assert guess_solve_strategy( x*y + y, x ) # == GS_POLY
assert guess_solve_strategy( x*exp(y) + y, x) # == GS_POLY
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), x) # == GS_POLY
def test_guess_poly_cv():
# polynomial equations via a change of variable
assert guess_solve_strategy( sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy(
x**Rational(1, 3) + sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy( 4*x*(1 - sqrt(x)), x ) # == GS_POLY_CV_1
# polynomial equation multiplying both sides by x**n
assert guess_solve_strategy( x + 1/x + y, x ) # == GS_POLY_CV_2
def test_guess_rational_cv():
# rational functions
assert guess_solve_strategy( (x + 1)/(x**2 + 2), x) # == GS_RATIONAL
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), y) # == GS_RATIONAL_CV_1
# rational functions via the change of variable y -> x**n
assert guess_solve_strategy( (sqrt(x) + 1)/(x**Rational(1, 3) + sqrt(x) + 1), x ) \
#== GS_RATIONAL_CV_1
def test_guess_transcendental():
#transcendental functions
assert guess_solve_strategy( exp(x) + 1, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy( 2*cos(x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(
exp(x) + exp(-x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(3**x - 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(-3**x + 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(a*x**b - y, x) # == GS_TRANSCENDENTAL
def test_solve_args():
# equation container, issue 5113
ans = {x: -3, y: 1}
eqs = (x + 5*y - 2, -3*x + 6*y - 15)
assert all(solve(container(eqs), x, y) == ans for container in
(tuple, list, set, frozenset))
assert solve(Tuple(*eqs), x, y) == ans
# implicit symbol to solve for
assert set(solve(x**2 - 4)) == set([S(2), -S(2)])
assert solve([x + y - 3, x - y - 5]) == {x: 4, y: -1}
assert solve(x - exp(x), x, implicit=True) == [exp(x)]
# no symbol to solve for
assert solve(42) == solve(42, x) == []
assert solve([1, 2]) == []
# duplicate symbols removed
assert solve((x - 3, y + 2), x, y, x) == {x: 3, y: -2}
# unordered symbols
# only 1
assert solve(y - 3, set([y])) == [3]
# more than 1
assert solve(y - 3, set([x, y])) == [{y: 3}]
# multiple symbols: take the first linear solution+
# - return as tuple with values for all requested symbols
assert solve(x + y - 3, [x, y]) == [(3 - y, y)]
# - unless dict is True
assert solve(x + y - 3, [x, y], dict=True) == [{x: 3 - y}]
# - or no symbols are given
assert solve(x + y - 3) == [{x: 3 - y}]
# multiple symbols might represent an undetermined coefficients system
assert solve(a + b*x - 2, [a, b]) == {a: 2, b: 0}
args = (a + b)*x - b**2 + 2, a, b
assert solve(*args) == \
[(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]
assert solve(*args, set=True) == \
([a, b], set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]))
assert solve(*args, dict=True) == \
[{b: sqrt(2), a: -sqrt(2)}, {b: -sqrt(2), a: sqrt(2)}]
eq = a*x**2 + b*x + c - ((x - h)**2 + 4*p*k)/4/p
flags = dict(dict=True)
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: c - b**2/(4*a), h: -b/(2*a), p: 1/(4*a)}]
flags.update(dict(simplify=False))
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: (4*a*c - b**2)/(4*a), h: -b/(2*a), p: 1/(4*a)}]
# failing undetermined system
assert solve(a*x + b**2/(x + 4) - 3*x - 4/x, a, b, dict=True) == \
[{a: (-b**2*x + 3*x**3 + 12*x**2 + 4*x + 16)/(x**2*(x + 4))}]
# failed single equation
assert solve(1/(1/x - y + exp(y))) == []
raises(
NotImplementedError, lambda: solve(exp(x) + sin(x) + exp(y) + sin(y)))
# failed system
# -- when no symbols given, 1 fails
assert solve([y, exp(x) + x]) == [{x: -LambertW(1), y: 0}]
# both fail
assert solve(
(exp(x) - x, exp(y) - y)) == [{x: -LambertW(-1), y: -LambertW(-1)}]
# -- when symbols given
solve([y, exp(x) + x], x, y) == [(-LambertW(1), 0)]
# symbol is a number
assert solve(x**2 - pi, pi) == [x**2]
# no equations
assert solve([], [x]) == []
# overdetermined system
# - nonlinear
assert solve([(x + y)**2 - 4, x + y - 2]) == [{x: -y + 2}]
# - linear
assert solve((x + y - 2, 2*x + 2*y - 4)) == {x: -y + 2}
# When one or more args are Boolean
assert solve([True, Eq(x, 0)], [x], dict=True) == [{x: 0}]
assert solve([Eq(x, x), Eq(x, 0), Eq(x, x+1)], [x], dict=True) == []
assert not solve([Eq(x, x+1), x < 2], x)
assert solve([Eq(x, 0), x+1<2]) == Eq(x, 0)
assert solve([Eq(x, x), Eq(x, x+1)], x) == []
assert solve(True, x) == []
assert solve([x-1, False], [x], set=True) == ([], set())
def test_solve_polynomial1():
assert solve(3*x - 2, x) == [Rational(2, 3)]
assert solve(Eq(3*x, 2), x) == [Rational(2, 3)]
assert set(solve(x**2 - 1, x)) == set([-S(1), S(1)])
assert set(solve(Eq(x**2, 1), x)) == set([-S(1), S(1)])
assert solve(x - y**3, x) == [y**3]
rx = root(x, 3)
assert solve(x - y**3, y) == [
rx, -rx/2 - sqrt(3)*I*rx/2, -rx/2 + sqrt(3)*I*rx/2]
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
assert solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y) == \
{
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
solution = {y: S.Zero, x: S.Zero}
assert solve((x - y, x + y), x, y ) == solution
assert solve((x - y, x + y), (x, y)) == solution
assert solve((x - y, x + y), [x, y]) == solution
assert set(solve(x**3 - 15*x - 4, x)) == set([
-2 + 3**Rational(1, 2),
S(4),
-2 - 3**Rational(1, 2)
])
assert set(solve((x**2 - 1)**2 - a, x)) == \
set([sqrt(1 + sqrt(a)), -sqrt(1 + sqrt(a)),
sqrt(1 - sqrt(a)), -sqrt(1 - sqrt(a))])
def test_solve_polynomial2():
assert solve(4, x) == []
def test_solve_polynomial_cv_1a():
"""
Test for solving on equations that can be converted to a polynomial equation
using the change of variable y -> x**Rational(p, q)
"""
assert solve( sqrt(x) - 1, x) == [1]
assert solve( sqrt(x) - 2, x) == [4]
assert solve( x**Rational(1, 4) - 2, x) == [16]
assert solve( x**Rational(1, 3) - 3, x) == [27]
assert solve(sqrt(x) + x**Rational(1, 3) + x**Rational(1, 4), x) == [0]
def test_solve_polynomial_cv_1b():
assert set(solve(4*x*(1 - a*sqrt(x)), x)) == set([S(0), 1/a**2])
assert set(solve(x*(root(x, 3) - 3), x)) == set([S(0), S(27)])
def test_solve_polynomial_cv_2():
"""
Test for solving on equations that can be converted to a polynomial equation
multiplying both sides of the equation by x**m
"""
assert solve(x + 1/x - 1, x) in \
[[ Rational(1, 2) + I*sqrt(3)/2, Rational(1, 2) - I*sqrt(3)/2],
[ Rational(1, 2) - I*sqrt(3)/2, Rational(1, 2) + I*sqrt(3)/2]]
def test_quintics_1():
f = x**5 - 110*x**3 - 55*x**2 + 2310*x + 979
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
# if one uses solve to get the roots of a polynomial that has a CRootOf
# solution, make sure that the use of nfloat during the solve process
# doesn't fail. Note: if you want numerical solutions to a polynomial
# it is *much* faster to use nroots to get them than to solve the
# equation only to get RootOf solutions which are then numerically
# evaluated. So for eq = x**5 + 3*x + 7 do Poly(eq).nroots() rather
# than [i.n() for i in solve(eq)] to get the numerical roots of eq.
assert nfloat(solve(x**5 + 3*x**3 + 7)[0], exponent=False) == \
CRootOf(x**5 + 3*x**3 + 7, 0).n()
def test_highorder_poly():
# just testing that the uniq generator is unpacked
sol = solve(x**6 - 2*x + 2)
assert all(isinstance(i, CRootOf) for i in sol) and len(sol) == 6
def test_quintics_2():
f = x**5 + 15*x + 12
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
def test_solve_rational():
"""Test solve for rational functions"""
assert solve( ( x - y**3 )/( (y**2)*sqrt(1 - y**2) ), x) == [y**3]
def test_solve_nonlinear():
assert solve(x**2 - y**2, x, y, dict=True) == [{x: -y}, {x: y}]
assert solve(x**2 - y**2/exp(x), x, y, dict=True) == [{x: 2*LambertW(y/2)}]
assert solve(x**2 - y**2/exp(x), y, x, dict=True) == [{y: -x*sqrt(exp(x))},
{y: x*sqrt(exp(x))}]
def test_issue_8666():
x = symbols('x')
assert solve(Eq(x**2 - 1/(x**2 - 4), 4 - 1/(x**2 - 4)), x) == []
assert solve(Eq(x + 1/x, 1/x), x) == []
def test_issue_7228():
assert solve(4**(2*(x**2) + 2*x) - 8, x) == [-Rational(3, 2), S.Half]
def test_issue_7190():
assert solve(log(x-3) + log(x+3), x) == [sqrt(10)]
def test_linear_system():
x, y, z, t, n = symbols('x, y, z, t, n')
assert solve([x - 1, x - y, x - 2*y, y - 1], [x, y]) == []
assert solve([x - 1, x - y, x - 2*y, x - 1], [x, y]) == []
assert solve([x - 1, x - 1, x - y, x - 2*y], [x, y]) == []
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == {x: -3, y: 1}
M = Matrix([[0, 0, n*(n + 1), (n + 1)**2, 0],
[n + 1, n + 1, -2*n - 1, -(n + 1), 0],
[-1, 0, 1, 0, 0]])
assert solve_linear_system(M, x, y, z, t) == \
{x: -t - t/n, z: -t - t/n, y: 0}
assert solve([x + y + z + t, -z - t], x, y, z, t) == {x: -y, z: -t}
def test_linear_system_function():
a = Function('a')
assert solve([a(0, 0) + a(0, 1) + a(1, 0) + a(1, 1), -a(1, 0) - a(1, 1)],
a(0, 0), a(0, 1), a(1, 0), a(1, 1)) == {a(1, 0): -a(1, 1), a(0, 0): -a(0, 1)}
def test_linear_systemLU():
n = Symbol('n')
M = Matrix([[1, 2, 0, 1], [1, 3, 2*n, 1], [4, -1, n**2, 1]])
assert solve_linear_system_LU(M, [x, y, z]) == {z: -3/(n**2 + 18*n),
x: 1 - 12*n/(n**2 + 18*n),
y: 6*n/(n**2 + 18*n)}
# Note: multiple solutions exist for some of these equations, so the tests
# should be expected to break if the implementation of the solver changes
# in such a way that a different branch is chosen
@slow
def test_solve_transcendental():
from sympy.abc import a, b
assert solve(exp(x) - 3, x) == [log(3)]
assert set(solve((a*x + b)*(exp(x) - 3), x)) == set([-b/a, log(3)])
assert solve(cos(x) - y, x) == [-acos(y) + 2*pi, acos(y)]
assert solve(2*cos(x) - y, x) == [-acos(y/2) + 2*pi, acos(y/2)]
assert solve(Eq(cos(x), sin(x)), x) == [-3*pi/4, pi/4]
assert set(solve(exp(x) + exp(-x) - y, x)) in [set([
log(y/2 - sqrt(y**2 - 4)/2),
log(y/2 + sqrt(y**2 - 4)/2),
]), set([
log(y - sqrt(y**2 - 4)) - log(2),
log(y + sqrt(y**2 - 4)) - log(2)]),
set([
log(y/2 - sqrt((y - 2)*(y + 2))/2),
log(y/2 + sqrt((y - 2)*(y + 2))/2)])]
assert solve(exp(x) - 3, x) == [log(3)]
assert solve(Eq(exp(x), 3), x) == [log(3)]
assert solve(log(x) - 3, x) == [exp(3)]
assert solve(sqrt(3*x) - 4, x) == [Rational(16, 3)]
assert solve(3**(x + 2), x) == []
assert solve(3**(2 - x), x) == []
assert solve(x + 2**x, x) == [-LambertW(log(2))/log(2)]
ans = solve(3*x + 5 + 2**(-5*x + 3), x)
assert len(ans) == 1 and ans[0].expand() == \
-Rational(5, 3) + LambertW(-10240*root(2, 3)*log(2)/3)/(5*log(2))
assert solve(5*x - 1 + 3*exp(2 - 7*x), x) == \
[Rational(1, 5) + LambertW(-21*exp(Rational(3, 5))/5)/7]
assert solve(2*x + 5 + log(3*x - 2), x) == \
[Rational(2, 3) + LambertW(2*exp(-Rational(19, 3))/3)/2]
assert solve(3*x + log(4*x), x) == [LambertW(Rational(3, 4))/3]
assert set(solve((2*x + 8)*(8 + exp(x)), x)) == set([S(-4), log(8) + pi*I])
eq = 2*exp(3*x + 4) - 3
ans = solve(eq, x) # this generated a failure in flatten
assert len(ans) == 3 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(2*log(3*x + 4) - 3, x) == [(exp(Rational(3, 2)) - 4)/3]
assert solve(exp(x) + 1, x) == [pi*I]
eq = 2*(3*x + 4)**5 - 6*7**(3*x + 9)
result = solve(eq, x)
ans = [(log(2401) + 5*LambertW(-log(7**(7*3**Rational(1, 5)/5))))/(3*log(7))/-1]
assert result == ans
# it works if expanded, too
assert solve(eq.expand(), x) == result
assert solve(z*cos(x) - y, x) == [-acos(y/z) + 2*pi, acos(y/z)]
assert solve(z*cos(2*x) - y, x) == [-acos(y/z)/2 + pi, acos(y/z)/2]
assert solve(z*cos(sin(x)) - y, x) == [
pi - asin(acos(y/z)), asin(acos(y/z) - 2*pi) + pi,
-asin(acos(y/z) - 2*pi), asin(acos(y/z))]
assert solve(z*cos(x), x) == [pi/2, 3*pi/2]
# issue 4508
assert solve(y - b*x/(a + x), x) in [[-a*y/(y - b)], [a*y/(b - y)]]
assert solve(y - b*exp(a/x), x) == [a/log(y/b)]
# issue 4507
assert solve(y - b/(1 + a*x), x) in [[(b - y)/(a*y)], [-((y - b)/(a*y))]]
# issue 4506
assert solve(y - a*x**b, x) == [(y/a)**(1/b)]
# issue 4505
assert solve(z**x - y, x) == [log(y)/log(z)]
# issue 4504
assert solve(2**x - 10, x) == [log(10)/log(2)]
# issue 6744
assert solve(x*y) == [{x: 0}, {y: 0}]
assert solve([x*y]) == [{x: 0}, {y: 0}]
assert solve(x**y - 1) == [{x: 1}, {y: 0}]
assert solve([x**y - 1]) == [{x: 1}, {y: 0}]
assert solve(x*y*(x**2 - y**2)) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
assert solve([x*y*(x**2 - y**2)]) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
# issue 4739
assert solve(exp(log(5)*x) - 2**x, x) == [0]
# issue 14791
assert solve(exp(log(5)*x) - exp(log(2)*x), x) == [0]
f = Function('f')
assert solve(y*f(log(5)*x) - y*f(log(2)*x), x) == [0]
assert solve(f(x) - f(0), x) == [0]
assert solve(f(x) - f(2 - x), x) == [1]
raises(NotImplementedError, lambda: solve(f(x, y) - f(1, 2), x))
raises(NotImplementedError, lambda: solve(f(x, y) - f(2 - x, 2), x))
raises(ValueError, lambda: solve(f(x, y) - f(1 - x), x))
raises(ValueError, lambda: solve(f(x, y) - f(1), x))
# misc
# make sure that the right variables is picked up in tsolve
# shouldn't generate a GeneratorsNeeded error in _tsolve when the NaN is generated
# for eq_down. Actual answers, as determined numerically are approx. +/- 0.83
raises(NotImplementedError, lambda:
solve(sinh(x)*sinh(sinh(x)) + cosh(x)*cosh(sinh(x)) - 3))
# watch out for recursive loop in tsolve
raises(NotImplementedError, lambda: solve((x + 2)**y*x - 3, x))
# issue 7245
assert solve(sin(sqrt(x))) == [0, pi**2]
# issue 7602
a, b = symbols('a, b', real=True, negative=False)
assert str(solve(Eq(a, 0.5 - cos(pi*b)/2), b)) == \
'[2.0 - 0.318309886183791*acos(1.0 - 2.0*a), 0.318309886183791*acos(1.0 - 2.0*a)]'
# issue 15325
assert solve(y**(1/x) - z, x) == [log(y)/log(z)]
def test_solve_for_functions_derivatives():
t = Symbol('t')
x = Function('x')(t)
y = Function('y')(t)
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
soln = solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y)
assert soln == {
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
assert solve(x - 1, x) == [1]
assert solve(3*x - 2, x) == [Rational(2, 3)]
soln = solve([a11*x.diff(t) + a12*y.diff(t) - b1, a21*x.diff(t) +
a22*y.diff(t) - b2], x.diff(t), y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x.diff(t): (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x.diff(t) - 1, x.diff(t)) == [1]
assert solve(3*x.diff(t) - 2, x.diff(t)) == [Rational(2, 3)]
eqns = set((3*x - 1, 2*y - 4))
assert solve(eqns, set((x, y))) == { x: Rational(1, 3), y: 2 }
x = Symbol('x')
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
assert solve(F.diff(x), diff(f(x), x)) == [(-x + 2)/f(x)]
# Mixed cased with a Symbol and a Function
x = Symbol('x')
y = Function('y')(t)
soln = solve([a11*x + a12*y.diff(t) - b1, a21*x +
a22*y.diff(t) - b2], x, y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
def test_issue_3725():
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
e = F.diff(x)
assert solve(e, f(x).diff(x)) in [[(2 - x)/f(x)], [-((x - 2)/f(x))]]
def test_issue_3870():
a, b, c, d = symbols('a b c d')
A = Matrix(2, 2, [a, b, c, d])
B = Matrix(2, 2, [0, 2, -3, 0])
C = Matrix(2, 2, [1, 2, 3, 4])
assert solve(A*B - C, [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - C], [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve(Eq(A*B, C), [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - B*A], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([A*C - C*A], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([A*B - B*A, A*C - C*A], [a, b, c, d]) == {a: d, b: 0, c: 0}
assert solve([Eq(A*B, B*A)], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([Eq(A*C, C*A)], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([Eq(A*B, B*A), Eq(A*C, C*A)], [a, b, c, d]) == {a: d, b: 0, c: 0}
def test_solve_linear():
w = Wild('w')
assert solve_linear(x, x) == (0, 1)
assert solve_linear(x, exclude=[x]) == (0, 1)
assert solve_linear(x, symbols=[w]) == (0, 1)
assert solve_linear(x, y - 2*x) in [(x, y/3), (y, 3*x)]
assert solve_linear(x, y - 2*x, exclude=[x]) == (y, 3*x)
assert solve_linear(3*x - y, 0) in [(x, y/3), (y, 3*x)]
assert solve_linear(3*x - y, 0, [x]) == (x, y/3)
assert solve_linear(3*x - y, 0, [y]) == (y, 3*x)
assert solve_linear(x**2/y, 1) == (y, x**2)
assert solve_linear(w, x) in [(w, x), (x, w)]
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y) == \
(y, -2 - cos(x)**2 - sin(x)**2)
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y, symbols=[x]) == (0, 1)
assert solve_linear(Eq(x, 3)) == (x, 3)
assert solve_linear(1/(1/x - 2)) == (0, 0)
assert solve_linear((x + 1)*exp(-x), symbols=[x]) == (x, -1)
assert solve_linear((x + 1)*exp(x), symbols=[x]) == ((x + 1)*exp(x), 1)
assert solve_linear(x*exp(-x**2), symbols=[x]) == (x, 0)
assert solve_linear(0**x - 1) == (0**x - 1, 1)
assert solve_linear(1 + 1/(x - 1)) == (x, 0)
eq = y*cos(x)**2 + y*sin(x)**2 - y # = y*(1 - 1) = 0
assert solve_linear(eq) == (0, 1)
eq = cos(x)**2 + sin(x)**2 # = 1
assert solve_linear(eq) == (0, 1)
raises(ValueError, lambda: solve_linear(Eq(x, 3), 3))
def test_solve_undetermined_coeffs():
assert solve_undetermined_coeffs(a*x**2 + b*x**2 + b*x + 2*c*x + c + 1, [a, b, c], x) == \
{a: -2, b: 2, c: -1}
# Test that rational functions work
assert solve_undetermined_coeffs(a/x + b/(x + 1) - (2*x + 1)/(x**2 + x), [a, b], x) == \
{a: 1, b: 1}
# Test cancellation in rational functions
assert solve_undetermined_coeffs(((c + 1)*a*x**2 + (c + 1)*b*x**2 +
(c + 1)*b*x + (c + 1)*2*c*x + (c + 1)**2)/(c + 1), [a, b, c], x) == \
{a: -2, b: 2, c: -1}
def test_solve_inequalities():
x = Symbol('x')
sol = And(S(0) < x, x < oo)
assert solve(x + 1 > 1) == sol
assert solve([x + 1 > 1]) == sol
assert solve([x + 1 > 1], x) == sol
assert solve([x + 1 > 1], [x]) == sol
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
And(Or(And(Lt(-sqrt(2), x), Lt(x, -1)),
And(Lt(1, x), Lt(x, sqrt(2)))), Eq(0, 0))
x = Symbol('x', real=True)
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
Or(And(Lt(-sqrt(2), x), Lt(x, -1)), And(Lt(1, x), Lt(x, sqrt(2))))
# issues 6627, 3448
assert solve((x - 3)/(x - 2) < 0, x) == And(Lt(2, x), Lt(x, 3))
assert solve(x/(x + 1) > 1, x) == And(Lt(-oo, x), Lt(x, -1))
assert solve(sin(x) > S.Half) == And(pi/6 < x, x < 5*pi/6)
assert solve(Eq(False, x < 1)) == (S(1) <= x) & (x < oo)
assert solve(Eq(True, x < 1)) == (-oo < x) & (x < 1)
assert solve(Eq(x < 1, False)) == (S(1) <= x) & (x < oo)
assert solve(Eq(x < 1, True)) == (-oo < x) & (x < 1)
assert solve(Eq(False, x)) == False
assert solve(Eq(True, x)) == True
assert solve(Eq(False, ~x)) == True
assert solve(Eq(True, ~x)) == False
assert solve(Ne(True, x)) == False
def test_issue_4793():
assert solve(1/x) == []
assert solve(x*(1 - 5/x)) == [5]
assert solve(x + sqrt(x) - 2) == [1]
assert solve(-(1 + x)/(2 + x)**2 + 1/(2 + x)) == []
assert solve(-x**2 - 2*x + (x + 1)**2 - 1) == []
assert solve((x/(x + 1) + 3)**(-2)) == []
assert solve(x/sqrt(x**2 + 1), x) == [0]
assert solve(exp(x) - y, x) == [log(y)]
assert solve(exp(x)) == []
assert solve(x**2 + x + sin(y)**2 + cos(y)**2 - 1, x) in [[0, -1], [-1, 0]]
eq = 4*3**(5*x + 2) - 7
ans = solve(eq, x)
assert len(ans) == 5 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(log(x**2) - y**2/exp(x), x, y, set=True) == (
[x, y],
{(x, sqrt(exp(x) * log(x ** 2))), (x, -sqrt(exp(x) * log(x ** 2)))})
assert solve(x**2*z**2 - z**2*y**2) == [{x: -y}, {x: y}, {z: 0}]
assert solve((x - 1)/(1 + 1/(x - 1))) == []
assert solve(x**(y*z) - x, x) == [1]
raises(NotImplementedError, lambda: solve(log(x) - exp(x), x))
raises(NotImplementedError, lambda: solve(2**x - exp(x) - 3))
def test_PR1964():
# issue 5171
assert solve(sqrt(x)) == solve(sqrt(x**3)) == [0]
assert solve(sqrt(x - 1)) == [1]
# issue 4462
a = Symbol('a')
assert solve(-3*a/sqrt(x), x) == []
# issue 4486
assert solve(2*x/(x + 2) - 1, x) == [2]
# issue 4496
assert set(solve((x**2/(7 - x)).diff(x))) == set([S(0), S(14)])
# issue 4695
f = Function('f')
assert solve((3 - 5*x/f(x))*f(x), f(x)) == [5*x/3]
# issue 4497
assert solve(1/root(5 + x, 5) - 9, x) == [-295244/S(59049)]
assert solve(sqrt(x) + sqrt(sqrt(x)) - 4) == [(-S.Half + sqrt(17)/2)**4]
assert set(solve(Poly(sqrt(exp(x)) + sqrt(exp(-x)) - 4))) in \
[
set([log((-sqrt(3) + 2)**2), log((sqrt(3) + 2)**2)]),
set([2*log(-sqrt(3) + 2), 2*log(sqrt(3) + 2)]),
set([log(-4*sqrt(3) + 7), log(4*sqrt(3) + 7)]),
]
assert set(solve(Poly(exp(x) + exp(-x) - 4))) == \
set([log(-sqrt(3) + 2), log(sqrt(3) + 2)])
assert set(solve(x**y + x**(2*y) - 1, x)) == \
set([(-S.Half + sqrt(5)/2)**(1/y), (-S.Half - sqrt(5)/2)**(1/y)])
assert solve(exp(x/y)*exp(-z/y) - 2, y) == [(x - z)/log(2)]
assert solve(
x**z*y**z - 2, z) in [[log(2)/(log(x) + log(y))], [log(2)/(log(x*y))]]
# if you do inversion too soon then multiple roots (as for the following)
# will be missed, e.g. if exp(3*x) = exp(3) -> 3*x = 3
E = S.Exp1
assert solve(exp(3*x) - exp(3), x) in [
[1, log(E*(-S.Half - sqrt(3)*I/2)), log(E*(-S.Half + sqrt(3)*I/2))],
[1, log(-E/2 - sqrt(3)*E*I/2), log(-E/2 + sqrt(3)*E*I/2)],
]
# coverage test
p = Symbol('p', positive=True)
assert solve((1/p + 1)**(p + 1)) == []
def test_issue_5197():
x = Symbol('x', real=True)
assert solve(x**2 + 1, x) == []
n = Symbol('n', integer=True, positive=True)
assert solve((n - 1)*(n + 2)*(2*n - 1), n) == [1]
x = Symbol('x', positive=True)
y = Symbol('y')
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == []
# not {x: -3, y: 1} b/c x is positive
# The solution following should not contain (-sqrt(2), sqrt(2))
assert solve((x + y)*n - y**2 + 2, x, y) == [(sqrt(2), -sqrt(2))]
y = Symbol('y', positive=True)
# The solution following should not contain {y: -x*exp(x/2)}
assert solve(x**2 - y**2/exp(x), y, x, dict=True) == [{y: x*exp(x/2)}]
assert solve(x**2 - y**2/exp(x), x, y, dict=True) == [{x: 2*LambertW(y/2)}]
x, y, z = symbols('x y z', positive=True)
assert solve(z**2*x**2 - z**2*y**2/exp(x), y, x, z, dict=True) == [{y: x*exp(x/2)}]
def test_checking():
assert set(
solve(x*(x - y/x), x, check=False)) == set([sqrt(y), S(0), -sqrt(y)])
assert set(solve(x*(x - y/x), x, check=True)) == set([sqrt(y), -sqrt(y)])
# {x: 0, y: 4} sets denominator to 0 in the following so system should return None
assert solve((1/(1/x + 2), 1/(y - 3) - 1)) == []
# 0 sets denominator of 1/x to zero so None is returned
assert solve(1/(1/x + 2)) == []
def test_issue_4671_4463_4467():
assert solve((sqrt(x**2 - 1) - 2)) in ([sqrt(5), -sqrt(5)],
[-sqrt(5), sqrt(5)])
assert solve((2**exp(y**2/x) + 2)/(x**2 + 15), y) == [
-sqrt(x*log(1 + I*pi/log(2))), sqrt(x*log(1 + I*pi/log(2)))]
C1, C2 = symbols('C1 C2')
f = Function('f')
assert solve(C1 + C2/x**2 - exp(-f(x)), f(x)) == [log(x**2/(C1*x**2 + C2))]
a = Symbol('a')
E = S.Exp1
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2]
)
assert solve(log(a**(-3) - x**2)/a, x) in (
[-sqrt(-1 + a**(-3)), sqrt(-1 + a**(-3))],
[sqrt(-1 + a**(-3)), -sqrt(-1 + a**(-3))],)
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2],)
assert set(solve((
a**2 + 1) * (sin(a*x) + cos(a*x)), x)) == set([-pi/(4*a), 3*pi/(4*a)])
assert solve(3 - (sinh(a*x) + cosh(a*x)), x) == [log(3)/a]
assert set(solve(3 - (sinh(a*x) + cosh(a*x)**2), x)) == \
set([log(-2 + sqrt(5))/a, log(-sqrt(2) + 1)/a,
log(-sqrt(5) - 2)/a, log(1 + sqrt(2))/a])
assert solve(atan(x) - 1) == [tan(1)]
def test_issue_5132():
r, t = symbols('r,t')
assert set(solve([r - x**2 - y**2, tan(t) - y/x], [x, y])) == \
set([(
-sqrt(r*cos(t)**2), -1*sqrt(r*cos(t)**2)*tan(t)),
(sqrt(r*cos(t)**2), sqrt(r*cos(t)**2)*tan(t))])
assert solve([exp(x) - sin(y), 1/y - 3], [x, y]) == \
[(log(sin(S(1)/3)), S(1)/3)]
assert solve([exp(x) - sin(y), 1/exp(y) - 3], [x, y]) == \
[(log(-sin(log(3))), -log(3))]
assert set(solve([exp(x) - sin(y), y**2 - 4], [x, y])) == \
set([(log(-sin(2)), -S(2)), (log(sin(2)), S(2))])
eqs = [exp(x)**2 - sin(y) + z**2, 1/exp(y) - 3]
assert solve(eqs, set=True) == \
([x, y], set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(-z**2 - sin(log(3)))/2, -log(3))]))
assert solve(eqs, x, z, set=True) == (
[x, z],
{(log(-z**2 + sin(y))/2, z), (log(-sqrt(-z**2 + sin(y))), z)})
assert set(solve(eqs, x, y)) == \
set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(-z**2 - sin(log(3)))/2, -log(3))])
assert set(solve(eqs, y, z)) == \
set([
(-log(3), -sqrt(-exp(2*x) - sin(log(3)))),
(-log(3), sqrt(-exp(2*x) - sin(log(3))))])
eqs = [exp(x)**2 - sin(y) + z, 1/exp(y) - 3]
assert solve(eqs, set=True) == ([x, y], set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(-z - sin(log(3)))/2, -log(3))]))
assert solve(eqs, x, z, set=True) == (
[x, z],
{(log(-sqrt(-z + sin(y))), z), (log(-z + sin(y))/2, z)})
assert set(solve(eqs, x, y)) == set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(-z - sin(log(3)))/2, -log(3))])
assert solve(eqs, z, y) == \
[(-exp(2*x) - sin(log(3)), -log(3))]
assert solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), set=True) == (
[x, y], set([(S(1), S(3)), (S(3), S(1))]))
assert set(solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), x, y)) == \
set([(S(1), S(3)), (S(3), S(1))])
def test_issue_5335():
lam, a0, conc = symbols('lam a0 conc')
a = 0.005
b = 0.743436700916726
eqs = [lam + 2*y - a0*(1 - x/2)*x - a*x/2*x,
a0*(1 - x/2)*x - 1*y - b*y,
x + y - conc]
sym = [x, y, a0]
# there are 4 solutions obtained manually but only two are valid
assert len(solve(eqs, sym, manual=True, minimal=True)) == 2
assert len(solve(eqs, sym)) == 2 # cf below with rational=False
@SKIP("Hangs")
def _test_issue_5335_float():
# gives ZeroDivisionError: polynomial division
lam, a0, conc = symbols('lam a0 conc')
a = 0.005
b = 0.743436700916726
eqs = [lam + 2*y - a0*(1 - x/2)*x - a*x/2*x,
a0*(1 - x/2)*x - 1*y - b*y,
x + y - conc]
sym = [x, y, a0]
assert len(solve(eqs, sym, rational=False)) == 2
def test_issue_5767():
assert set(solve([x**2 + y + 4], [x])) == \
set([(-sqrt(-y - 4),), (sqrt(-y - 4),)])
def test_polysys():
assert set(solve([x**2 + 2/y - 2, x + y - 3], [x, y])) == \
set([(S(1), S(2)), (1 + sqrt(5), 2 - sqrt(5)),
(1 - sqrt(5), 2 + sqrt(5))])
assert solve([x**2 + y - 2, x**2 + y]) == []
# the ordering should be whatever the user requested
assert solve([x**2 + y - 3, x - y - 4], (x, y)) != solve([x**2 +
y - 3, x - y - 4], (y, x))
@slow
def test_unrad1():
raises(NotImplementedError, lambda:
unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x)) + 3))
raises(NotImplementedError, lambda:
unrad(sqrt(x) + (x + 1)**Rational(1, 3) + 2*sqrt(y)))
s = symbols('s', cls=Dummy)
# checkers to deal with possibility of answer coming
# back with a sign change (cf issue 5203)
def check(rv, ans):
assert bool(rv[1]) == bool(ans[1])
if ans[1]:
return s_check(rv, ans)
e = rv[0].expand()
a = ans[0].expand()
return e in [a, -a] and rv[1] == ans[1]
def s_check(rv, ans):
# get the dummy
rv = list(rv)
d = rv[0].atoms(Dummy)
reps = list(zip(d, [s]*len(d)))
# replace s with this dummy
rv = (rv[0].subs(reps).expand(), [rv[1][0].subs(reps), rv[1][1].subs(reps)])
ans = (ans[0].subs(reps).expand(), [ans[1][0].subs(reps), ans[1][1].subs(reps)])
return str(rv[0]) in [str(ans[0]), str(-ans[0])] and \
str(rv[1]) == str(ans[1])
assert check(unrad(sqrt(x)),
(x, []))
assert check(unrad(sqrt(x) + 1),
(x - 1, []))
assert check(unrad(sqrt(x) + root(x, 3) + 2),
(s**3 + s**2 + 2, [s, s**6 - x]))
assert check(unrad(sqrt(x)*root(x, 3) + 2),
(x**5 - 64, []))
assert check(unrad(sqrt(x) + (x + 1)**Rational(1, 3)),
(x**3 - (x + 1)**2, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(2*x)),
(-2*sqrt(2)*x - 2*x + 1, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + 2),
(16*x - 9, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - x)),
(5*x**2 - 4*x, []))
assert check(unrad(a*sqrt(x) + b*sqrt(x) + c*sqrt(y) + d*sqrt(y)),
((a*sqrt(x) + b*sqrt(x))**2 - (c*sqrt(y) + d*sqrt(y))**2, []))
assert check(unrad(sqrt(x) + sqrt(1 - x)),
(2*x - 1, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) - 3),
(x**2 - x + 16, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x)),
(5*x**2 - 2*x + 1, []))
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - 3) in [
(25*x**4 + 376*x**3 + 1256*x**2 - 2272*x + 784, []),
(25*x**8 - 476*x**6 + 2534*x**4 - 1468*x**2 + 169, [])]
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - sqrt(1 - 2*x)) == \
(41*x**4 + 40*x**3 + 232*x**2 - 160*x + 16, []) # orig root at 0.487
assert check(unrad(sqrt(x) + sqrt(x + 1)), (S(1), []))
eq = sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x))
assert check(unrad(eq),
(16*x**2 - 9*x, []))
assert set(solve(eq, check=False)) == set([S(0), S(9)/16])
assert solve(eq) == []
# but this one really does have those solutions
assert set(solve(sqrt(x) - sqrt(x + 1) + sqrt(1 - sqrt(x)))) == \
set([S.Zero, S(9)/16])
assert check(unrad(sqrt(x) + root(x + 1, 3) + 2*sqrt(y), y),
(S('2*sqrt(x)*(x + 1)**(1/3) + x - 4*y + (x + 1)**(2/3)'), []))
assert check(unrad(sqrt(x/(1 - x)) + (x + 1)**Rational(1, 3)),
(x**5 - x**4 - x**3 + 2*x**2 + x - 1, []))
assert check(unrad(sqrt(x/(1 - x)) + 2*sqrt(y), y),
(4*x*y + x - 4*y, []))
assert check(unrad(sqrt(x)*sqrt(1 - x) + 2, x),
(x**2 - x + 4, []))
# http://tutorial.math.lamar.edu/
# Classes/Alg/SolveRadicalEqns.aspx#Solve_Rad_Ex2_a
assert solve(Eq(x, sqrt(x + 6))) == [3]
assert solve(Eq(x + sqrt(x - 4), 4)) == [4]
assert solve(Eq(1, x + sqrt(2*x - 3))) == []
assert set(solve(Eq(sqrt(5*x + 6) - 2, x))) == set([-S(1), S(2)])
assert set(solve(Eq(sqrt(2*x - 1) - sqrt(x - 4), 2))) == set([S(5), S(13)])
assert solve(Eq(sqrt(x + 7) + 2, sqrt(3 - x))) == [-6]
# http://www.purplemath.com/modules/solverad.htm
assert solve((2*x - 5)**Rational(1, 3) - 3) == [16]
assert set(solve(x + 1 - root(x**4 + 4*x**3 - x, 4))) == \
set([-S(1)/2, -S(1)/3])
assert set(solve(sqrt(2*x**2 - 7) - (3 - x))) == set([-S(8), S(2)])
assert solve(sqrt(2*x + 9) - sqrt(x + 1) - sqrt(x + 4)) == [0]
assert solve(sqrt(x + 4) + sqrt(2*x - 1) - 3*sqrt(x - 1)) == [5]
assert solve(sqrt(x)*sqrt(x - 7) - 12) == [16]
assert solve(sqrt(x - 3) + sqrt(x) - 3) == [4]
assert solve(sqrt(9*x**2 + 4) - (3*x + 2)) == [0]
assert solve(sqrt(x) - 2 - 5) == [49]
assert solve(sqrt(x - 3) - sqrt(x) - 3) == []
assert solve(sqrt(x - 1) - x + 7) == [10]
assert solve(sqrt(x - 2) - 5) == [27]
assert solve(sqrt(17*x - sqrt(x**2 - 5)) - 7) == [3]
assert solve(sqrt(x) - sqrt(x - 1) + sqrt(sqrt(x))) == []
# don't posify the expression in unrad and do use _mexpand
z = sqrt(2*x + 1)/sqrt(x) - sqrt(2 + 1/x)
p = posify(z)[0]
assert solve(p) == []
assert solve(z) == []
assert solve(z + 6*I) == [-S(1)/11]
assert solve(p + 6*I) == []
# issue 8622
assert unrad((root(x + 1, 5) - root(x, 3))) == (
x**5 - x**3 - 3*x**2 - 3*x - 1, [])
# issue #8679
assert check(unrad(x + root(x, 3) + root(x, 3)**2 + sqrt(y), x),
(s**3 + s**2 + s + sqrt(y), [s, s**3 - x]))
# for coverage
assert check(unrad(sqrt(x) + root(x, 3) + y),
(s**3 + s**2 + y, [s, s**6 - x]))
assert solve(sqrt(x) + root(x, 3) - 2) == [1]
raises(NotImplementedError, lambda:
solve(sqrt(x) + root(x, 3) + root(x + 1, 5) - 2))
# fails through a different code path
raises(NotImplementedError, lambda: solve(-sqrt(2) + cosh(x)/x))
# unrad some
assert solve(sqrt(x + root(x, 3))+root(x - y, 5), y) == [
x + (x**(S(1)/3) + x)**(S(5)/2)]
assert check(unrad(sqrt(x) - root(x + 1, 3)*sqrt(x + 2) + 2),
(s**10 + 8*s**8 + 24*s**6 - 12*s**5 - 22*s**4 - 160*s**3 - 212*s**2 -
192*s - 56, [s, s**2 - x]))
e = root(x + 1, 3) + root(x, 3)
assert unrad(e) == (2*x + 1, [])
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
(15625*x**4 + 173000*x**3 + 355600*x**2 - 817920*x + 331776, []))
assert check(unrad(root(x, 4) + root(x, 4)**3 - 1),
(s**3 + s - 1, [s, s**4 - x]))
assert check(unrad(root(x, 2) + root(x, 2)**3 - 1),
(x**3 + 2*x**2 + x - 1, []))
assert unrad(x**0.5) is None
assert check(unrad(t + root(x + y, 5) + root(x + y, 5)**3),
(s**3 + s + t, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, y),
(s**3 + s + x, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, x),
(s**5 + s**3 + s - y, [s, s**5 - x - y]))
assert check(unrad(root(x - 1, 3) + root(x + 1, 5) + root(2, 5)),
(s**5 + 5*2**(S(1)/5)*s**4 + s**3 + 10*2**(S(2)/5)*s**3 +
10*2**(S(3)/5)*s**2 + 5*2**(S(4)/5)*s + 4, [s, s**3 - x + 1]))
raises(NotImplementedError, lambda:
unrad((root(x, 2) + root(x, 3) + root(x, 4)).subs(x, x**5 - x + 1)))
# the simplify flag should be reset to False for unrad results;
# if it's not then this next test will take a long time
assert solve(root(x, 3) + root(x, 5) - 2) == [1]
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
((5*x - 4)*(3125*x**3 + 37100*x**2 + 100800*x - 82944), []))
ans = S('''
[4/5, -1484/375 + 172564/(140625*(114*sqrt(12657)/78125 +
12459439/52734375)**(1/3)) +
4*(114*sqrt(12657)/78125 + 12459439/52734375)**(1/3)]''')
assert solve(eq) == ans
# duplicate radical handling
assert check(unrad(sqrt(x + root(x + 1, 3)) - root(x + 1, 3) - 2),
(s**3 - s**2 - 3*s - 5, [s, s**3 - x - 1]))
# cov post-processing
e = root(x**2 + 1, 3) - root(x**2 - 1, 5) - 2
assert check(unrad(e),
(s**5 - 10*s**4 + 39*s**3 - 80*s**2 + 80*s - 30,
[s, s**3 - x**2 - 1]))
e = sqrt(x + root(x + 1, 2)) - root(x + 1, 3) - 2
assert check(unrad(e),
(s**6 - 2*s**5 - 7*s**4 - 3*s**3 + 26*s**2 + 40*s + 25,
[s, s**3 - x - 1]))
assert check(unrad(e, _reverse=True),
(s**6 - 14*s**5 + 73*s**4 - 187*s**3 + 276*s**2 - 228*s + 89,
[s, s**2 - x - sqrt(x + 1)]))
# this one needs r0, r1 reversal to work
assert check(unrad(sqrt(x + sqrt(root(x, 3) - 1)) - root(x, 6) - 2),
(s**12 - 2*s**8 - 8*s**7 - 8*s**6 + s**4 + 8*s**3 + 23*s**2 +
32*s + 17, [s, s**6 - x]))
# is this needed?
#assert unrad(root(cosh(x), 3)/x*root(x + 1, 5) - 1) == (
# x**15 - x**3*cosh(x)**5 - 3*x**2*cosh(x)**5 - 3*x*cosh(x)**5 - cosh(x)**5, [])
raises(NotImplementedError, lambda:
unrad(sqrt(cosh(x)/x) + root(x + 1,3)*sqrt(x) - 1))
assert unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1')) is None
assert check(unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1'), x),
(s**(2*y) + s + 1, [s, s**3 - x - y]))
# This tests two things: that if full unrad is attempted and fails
# the solution should still be found; also it tests that the use of
# composite
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
# watch out for when the cov doesn't involve the symbol of interest
eq = S('-x + (7*y/8 - (27*x/2 + 27*sqrt(x**2)/2)**(1/3)/3)**3 - 1')
assert solve(eq, y) == [
4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 - (-S(1)/2 -
sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 - S(13824)/343)**2)/2 -
S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 -
(-S(1)/2 + sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x +
27*sqrt(x**2))**(S(1)/3)/21 - (-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3]
eq = root(x + 1, 3) - (root(x, 3) + root(x, 5))
assert check(unrad(eq),
(3*s**13 + 3*s**11 + s**9 - 1, [s, s**15 - x]))
assert check(unrad(eq - 2),
(3*s**13 + 3*s**11 + 6*s**10 + s**9 + 12*s**8 + 6*s**6 + 12*s**5 +
12*s**3 + 7, [s, s**15 - x]))
assert check(unrad(root(x, 3) - root(x + 1, 4)/2 + root(x + 2, 3)),
(4096*s**13 + 960*s**12 + 48*s**11 - s**10 - 1728*s**4,
[s, s**4 - x - 1])) # orig expr has two real roots: -1, -.389
assert check(unrad(root(x, 3) + root(x + 1, 4) - root(x + 2, 3)/2),
(343*s**13 + 2904*s**12 + 1344*s**11 + 512*s**10 - 1323*s**9 -
3024*s**8 - 1728*s**7 + 1701*s**5 + 216*s**4 - 729*s, [s, s**4 - x -
1])) # orig expr has one real root: -0.048
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3)),
(729*s**13 - 216*s**12 + 1728*s**11 - 512*s**10 + 1701*s**9 -
3024*s**8 + 1344*s**7 + 1323*s**5 - 2904*s**4 + 343*s, [s, s**4 - x -
1])) # orig expr has 2 real roots: -0.91, -0.15
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3) - 2),
(729*s**13 + 1242*s**12 + 18496*s**10 + 129701*s**9 + 388602*s**8 +
453312*s**7 - 612864*s**6 - 3337173*s**5 - 6332418*s**4 - 7134912*s**3
- 5064768*s**2 - 2111913*s - 398034, [s, s**4 - x - 1]))
# orig expr has 1 real root: 19.53
ans = solve(sqrt(x) + sqrt(x + 1) -
sqrt(1 - x) - sqrt(2 + x))
assert len(ans) == 1 and NS(ans[0])[:4] == '0.73'
# the fence optimization problem
# https://github.com/sympy/sympy/issues/4793#issuecomment-36994519
F = Symbol('F')
eq = F - (2*x + 2*y + sqrt(x**2 + y**2))
ans = 2*F/7 - sqrt(2)*F/14
X = solve(eq, x, check=False)
for xi in reversed(X): # reverse since currently, ans is the 2nd one
Y = solve((x*y).subs(x, xi).diff(y), y, simplify=False, check=False)
if any((a - ans).expand().is_zero for a in Y):
break
else:
assert None # no answer was found
assert solve(sqrt(x + 1) + root(x, 3) - 2) == S('''
[(-11/(9*(47/54 + sqrt(93)/6)**(1/3)) + 1/3 + (47/54 +
sqrt(93)/6)**(1/3))**3]''')
assert solve(sqrt(sqrt(x + 1)) + x**Rational(1, 3) - 2) == S('''
[(-sqrt(-2*(-1/16 + sqrt(6913)/16)**(1/3) + 6/(-1/16 +
sqrt(6913)/16)**(1/3) + 17/2 + 121/(4*sqrt(-6/(-1/16 +
sqrt(6913)/16)**(1/3) + 2*(-1/16 + sqrt(6913)/16)**(1/3) + 17/4)))/2 +
sqrt(-6/(-1/16 + sqrt(6913)/16)**(1/3) + 2*(-1/16 +
sqrt(6913)/16)**(1/3) + 17/4)/2 + 9/4)**3]''')
assert solve(sqrt(x) + root(sqrt(x) + 1, 3) - 2) == S('''
[(-(81/2 + 3*sqrt(741)/2)**(1/3)/3 + (81/2 + 3*sqrt(741)/2)**(-1/3) +
2)**2]''')
eq = S('''
-x + (1/2 - sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3
+ x*(3*x**2 - 34) + 90)**2/4 - 39304/27) - 45)**(1/3) + 34/(3*(1/2 -
sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3 + x*(3*x**2
- 34) + 90)**2/4 - 39304/27) - 45)**(1/3))''')
assert check(unrad(eq),
(-s*(-s**6 + sqrt(3)*s**6*I - 153*2**(S(2)/3)*3**(S(1)/3)*s**4 +
51*12**(S(1)/3)*s**4 - 102*2**(S(2)/3)*3**(S(5)/6)*s**4*I - 1620*s**3 +
1620*sqrt(3)*s**3*I + 13872*18**(S(1)/3)*s**2 - 471648 +
471648*sqrt(3)*I), [s, s**3 - 306*x - sqrt(3)*sqrt(31212*x**2 -
165240*x + 61484) + 810]))
assert solve(eq) == [] # not other code errors
@slow
def test_unrad_slow():
# this has roots with multiplicity > 1; there should be no
# repeats in roots obtained, however
eq = (sqrt(1 + sqrt(1 - 4*x**2)) - x*((1 + sqrt(1 + 2*sqrt(1 - 4*x**2)))))
assert solve(eq) == [S.Half]
@XFAIL
def test_unrad_fail():
# this only works if we check real_root(eq.subs(x, S(1)/3))
# but checksol doesn't work like that
assert solve(root(x**3 - 3*x**2, 3) + 1 - x) == [S(1)/3]
assert solve(root(x + 1, 3) + root(x**2 - 2, 5) + 1) == [
-1, -1 + CRootOf(x**5 + x**4 + 5*x**3 + 8*x**2 + 10*x + 5, 0)**3]
def test_checksol():
x, y, r, t = symbols('x, y, r, t')
eq = r - x**2 - y**2
dict_var_soln = {y: - sqrt(r) / sqrt(tan(t)**2 + 1),
x: -sqrt(r)*tan(t)/sqrt(tan(t)**2 + 1)}
assert checksol(eq, dict_var_soln) == True
assert checksol(Eq(x, False), {x: False}) is True
assert checksol(Ne(x, False), {x: False}) is False
assert checksol(Eq(x < 1, True), {x: 0}) is True
assert checksol(Eq(x < 1, True), {x: 1}) is False
assert checksol(Eq(x < 1, False), {x: 1}) is True
assert checksol(Eq(x < 1, False), {x: 0}) is False
assert checksol(Eq(x + 1, x**2 + 1), {x: 1}) is True
assert checksol([x - 1, x**2 - 1], x, 1) is True
assert checksol([x - 1, x**2 - 2], x, 1) is False
assert checksol(Poly(x**2 - 1), x, 1) is True
raises(ValueError, lambda: checksol(x, 1))
raises(ValueError, lambda: checksol([], x, 1))
def test__invert():
assert _invert(x - 2) == (2, x)
assert _invert(2) == (2, 0)
assert _invert(exp(1/x) - 3, x) == (1/log(3), x)
assert _invert(exp(1/x + a/x) - 3, x) == ((a + 1)/log(3), x)
assert _invert(a, x) == (a, 0)
def test_issue_4463():
assert solve(-a*x + 2*x*log(x), x) == [exp(a/2)]
assert solve(a/x + exp(x/2), x) == [2*LambertW(-a/2)]
assert solve(x**x) == []
assert solve(x**x - 2) == [exp(LambertW(log(2)))]
assert solve(((x - 3)*(x - 2))**((x - 3)*(x - 4))) == [2]
assert solve(
(a/x + exp(x/2)).diff(x), x) == [4*LambertW(sqrt(2)*sqrt(a)/4)]
@slow
def test_issue_5114_solvers():
a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('a:r')
# there is no 'a' in the equation set but this is how the
# problem was originally posed
syms = a, b, c, f, h, k, n
eqs = [b + r/d - c/d,
c*(1/d + 1/e + 1/g) - f/g - r/d,
f*(1/g + 1/i + 1/j) - c/g - h/i,
h*(1/i + 1/l + 1/m) - f/i - k/m,
k*(1/m + 1/o + 1/p) - h/m - n/p,
n*(1/p + 1/q) - k/p]
assert len(solve(eqs, syms, manual=True, check=False, simplify=False)) == 1
def test_issue_5849():
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
ans = [{
dQ4: I3 - I5,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
I4: I3 - I5,
dQ2: I2,
Q2: 2*I3 + 2*I5 + 3*I6,
I1: I2 + I3,
Q4: -I3/2 + 3*I5/2 - dI4/2}]
v = I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4
assert solve(e, *v, manual=True, check=False, dict=True) == ans
assert solve(e, *v, manual=True) == []
# the matrix solver (tested below) doesn't like this because it produces
# a zero row in the matrix. Is this related to issue 4551?
assert [ei.subs(
ans[0]) for ei in e] == [0, 0, I3 - I6, -I3 + I6, 0, 0, 0, 0, 0]
def test_issue_5849_matrix():
'''Same as test_2750 but solved with the matrix solver.'''
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
assert solve(e, I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4) == {
dI4: -I3 + 3*I5 - 2*Q4,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
dQ2: I2,
I1: I2 + I3,
Q2: 2*I3 + 2*I5 + 3*I6,
dQ4: I3 - I5,
I4: I3 - I5}
def test_issue_5901():
f, g, h = map(Function, 'fgh')
a = Symbol('a')
D = Derivative(f(x), x)
G = Derivative(g(a), a)
assert solve(f(x) + f(x).diff(x), f(x)) == \
[-D]
assert solve(f(x) - 3, f(x)) == \
[3]
assert solve(f(x) - 3*f(x).diff(x), f(x)) == \
[3*D]
assert solve([f(x) - 3*f(x).diff(x)], f(x)) == \
{f(x): 3*D}
assert solve([f(x) - 3*f(x).diff(x), f(x)**2 - y + 4], f(x), y) == \
[{f(x): 3*D, y: 9*D**2 + 4}]
assert solve(-f(a)**2*g(a)**2 + f(a)**2*h(a)**2 + g(a).diff(a),
h(a), g(a), set=True) == \
([g(a)], set([
(-sqrt(h(a)**2*f(a)**2 + G)/f(a),),
(sqrt(h(a)**2*f(a)**2+ G)/f(a),)]))
args = [f(x).diff(x, 2)*(f(x) + g(x)) - g(x)**2 + 2, f(x), g(x)]
assert set(solve(*args)) == \
set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))])
eqs = [f(x)**2 + g(x) - 2*f(x).diff(x), g(x)**2 - 4]
assert solve(eqs, f(x), g(x), set=True) == \
([f(x), g(x)], set([
(-sqrt(2*D - 2), S(2)),
(sqrt(2*D - 2), S(2)),
(-sqrt(2*D + 2), -S(2)),
(sqrt(2*D + 2), -S(2))]))
# the underlying problem was in solve_linear that was not masking off
# anything but a Mul or Add; it now raises an error if it gets anything
# but a symbol and solve handles the substitutions necessary so solve_linear
# won't make this error
raises(
ValueError, lambda: solve_linear(f(x) + f(x).diff(x), symbols=[f(x)]))
assert solve_linear(f(x) + f(x).diff(x), symbols=[x]) == \
(f(x) + Derivative(f(x), x), 1)
assert solve_linear(f(x) + Integral(x, (x, y)), symbols=[x]) == \
(f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(x) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x + f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(y) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x, -f(y) - Integral(x, (x, y)))
assert solve_linear(x - f(x)/a + (f(x) - 1)/a, symbols=[x]) == \
(x, 1/a)
assert solve_linear(x + Derivative(2*x, x)) == \
(x, -2)
assert solve_linear(x + Integral(x, y), symbols=[x]) == \
(x, 0)
assert solve_linear(x + Integral(x, y) - 2, symbols=[x]) == \
(x, 2/(y + 1))
assert set(solve(x + exp(x)**2, exp(x))) == \
set([-sqrt(-x), sqrt(-x)])
assert solve(x + exp(x), x, implicit=True) == \
[-exp(x)]
assert solve(cos(x) - sin(x), x, implicit=True) == []
assert solve(x - sin(x), x, implicit=True) == \
[sin(x)]
assert solve(x**2 + x - 3, x, implicit=True) == \
[-x**2 + 3]
assert solve(x**2 + x - 3, x**2, implicit=True) == \
[-x + 3]
def test_issue_5912():
assert set(solve(x**2 - x - 0.1, rational=True)) == \
set([S(1)/2 + sqrt(35)/10, -sqrt(35)/10 + S(1)/2])
ans = solve(x**2 - x - 0.1, rational=False)
assert len(ans) == 2 and all(a.is_Number for a in ans)
ans = solve(x**2 - x - 0.1)
assert len(ans) == 2 and all(a.is_Number for a in ans)
def test_float_handling():
def test(e1, e2):
return len(e1.atoms(Float)) == len(e2.atoms(Float))
assert solve(x - 0.5, rational=True)[0].is_Rational
assert solve(x - 0.5, rational=False)[0].is_Float
assert solve(x - S.Half, rational=False)[0].is_Rational
assert solve(x - 0.5, rational=None)[0].is_Float
assert solve(x - S.Half, rational=None)[0].is_Rational
assert test(nfloat(1 + 2*x), 1.0 + 2.0*x)
for contain in [list, tuple, set]:
ans = nfloat(contain([1 + 2*x]))
assert type(ans) is contain and test(list(ans)[0], 1.0 + 2.0*x)
k, v = list(nfloat({2*x: [1 + 2*x]}).items())[0]
assert test(k, 2*x) and test(v[0], 1.0 + 2.0*x)
assert test(nfloat(cos(2*x)), cos(2.0*x))
assert test(nfloat(3*x**2), 3.0*x**2)
assert test(nfloat(3*x**2, exponent=True), 3.0*x**2.0)
assert test(nfloat(exp(2*x)), exp(2.0*x))
assert test(nfloat(x/3), x/3.0)
assert test(nfloat(x**4 + 2*x + cos(S(1)/3) + 1),
x**4 + 2.0*x + 1.94495694631474)
# don't call nfloat if there is no solution
tot = 100 + c + z + t
assert solve(((.7 + c)/tot - .6, (.2 + z)/tot - .3, t/tot - .1)) == []
def test_check_assumptions():
x = symbols('x', positive=True)
assert solve(x**2 - 1) == [1]
assert check_assumptions(1, x) == True
raises(AssertionError, lambda: check_assumptions(2*x, x, positive=True))
raises(TypeError, lambda: check_assumptions(1, 1))
def test_failing_assumptions():
x = Symbol('x', real=True, positive=True)
y = Symbol('y')
assert failing_assumptions(6*x + y, **x.assumptions0) == \
{'real': None, 'imaginary': None, 'complex': None, 'hermitian': None,
'positive': None, 'nonpositive': None, 'nonnegative': None, 'nonzero': None,
'negative': None, 'zero': None}
def test_issue_6056():
assert solve(tanh(x + 3)*tanh(x - 3) - 1) == []
assert set([simplify(w) for w in solve(tanh(x - 1)*tanh(x + 1) + 1)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
assert set([simplify(w) for w in solve((tanh(x + 3)*tanh(x - 3) + 1)**2)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
def test_issue_5673():
eq = -x + exp(exp(LambertW(log(x)))*LambertW(log(x)))
assert checksol(eq, x, 2) is True
assert checksol(eq, x, 2, numerical=False) is None
def test_exclude():
R, C, Ri, Vout, V1, Vminus, Vplus, s = \
symbols('R, C, Ri, Vout, V1, Vminus, Vplus, s')
Rf = symbols('Rf', positive=True) # to eliminate Rf = 0 soln
eqs = [C*V1*s + Vplus*(-2*C*s - 1/R),
Vminus*(-1/Ri - 1/Rf) + Vout/Rf,
C*Vplus*s + V1*(-C*s - 1/R) + Vout/R,
-Vminus + Vplus]
assert solve(eqs, exclude=s*C*R) == [
{
Rf: Ri*(C*R*s + 1)**2/(C*R*s),
Vminus: Vplus,
V1: 2*Vplus + Vplus/(C*R*s),
Vout: C*R*Vplus*s + 3*Vplus + Vplus/(C*R*s)},
{
Vplus: 0,
Vminus: 0,
V1: 0,
Vout: 0},
]
# TODO: Investigate why currently solution [0] is preferred over [1].
assert solve(eqs, exclude=[Vplus, s, C]) in [[{
Vminus: Vplus,
V1: Vout/2 + Vplus/2 + sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus - sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}, {
Vminus: Vplus,
V1: Vout/2 + Vplus/2 - sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus + sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}], [{
Vminus: Vplus,
Vout: (V1**2 - V1*Vplus - Vplus**2)/(V1 - 2*Vplus),
Rf: Ri*(V1 - Vplus)**2/(Vplus*(V1 - 2*Vplus)),
R: Vplus/(C*s*(V1 - 2*Vplus)),
}]]
def test_high_order_roots():
s = x**5 + 4*x**3 + 3*x**2 + S(7)/4
assert set(solve(s)) == set(Poly(s*4, domain='ZZ').all_roots())
def test_minsolve_linear_system():
def count(dic):
return len([x for x in dic.values() if x == 0])
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=True)) \
== 3
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=False)) \
== 3
assert count(solve([x + y + z, y + z + a], particular=True, quick=True)) == 1
assert count(solve([x + y + z, y + z + a], particular=True, quick=False)) == 2
def test_real_roots():
# cf. issue 6650
x = Symbol('x', real=True)
assert len(solve(x**5 + x**3 + 1)) == 1
def test_issue_6528():
eqs = [
327600995*x**2 - 37869137*x + 1809975124*y**2 - 9998905626,
895613949*x**2 - 273830224*x*y + 530506983*y**2 - 10000000000]
# two expressions encountered are > 1400 ops long so if this hangs
# it is likely because simplification is being done
assert len(solve(eqs, y, x, check=False)) == 4
def test_overdetermined():
x = symbols('x', real=True)
eqs = [Abs(4*x - 7) - 5, Abs(3 - 8*x) - 1]
assert solve(eqs, x) == [(S.Half,)]
assert solve(eqs, x, manual=True) == [(S.Half,)]
assert solve(eqs, x, manual=True, check=False) == [(S.Half,), (S(3),)]
def test_issue_6605():
x = symbols('x')
assert solve(4**(x/2) - 2**(x/3)) == [0, 3*I*pi/log(2)]
# while the first one passed, this one failed
x = symbols('x', real=True)
assert solve(5**(x/2) - 2**(x/3)) == [0]
b = sqrt(6)*sqrt(log(2))/sqrt(log(5))
assert solve(5**(x/2) - 2**(3/x)) == [-b, b]
def test__ispow():
assert _ispow(x**2)
assert not _ispow(x)
assert not _ispow(True)
def test_issue_6644():
eq = -sqrt((m - q)**2 + (-m/(2*q) + S(1)/2)**2) + sqrt((-m**2/2 - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2 + (m**2/2 - m - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2)
sol = solve(eq, q, simplify=False, check=False)
assert len(sol) == 5
def test_issue_6752():
assert solve([a**2 + a, a - b], [a, b]) == [(-1, -1), (0, 0)]
assert solve([a**2 + a*c, a - b], [a, b]) == [(0, 0), (-c, -c)]
def test_issue_6792():
assert solve(x*(x - 1)**2*(x + 1)*(x**6 - x + 1)) == [
-1, 0, 1, CRootOf(x**6 - x + 1, 0), CRootOf(x**6 - x + 1, 1),
CRootOf(x**6 - x + 1, 2), CRootOf(x**6 - x + 1, 3),
CRootOf(x**6 - x + 1, 4), CRootOf(x**6 - x + 1, 5)]
def test_issues_6819_6820_6821_6248_8692():
# issue 6821
x, y = symbols('x y', real=True)
assert solve(abs(x + 3) - 2*abs(x - 3)) == [1, 9]
assert solve([abs(x) - 2, arg(x) - pi], x) == [(-2,), (2,)]
assert set(solve(abs(x - 7) - 8)) == set([-S(1), S(15)])
# issue 8692
assert solve(Eq(Abs(x + 1) + Abs(x**2 - 7), 9), x) == [
-S(1)/2 + sqrt(61)/2, -sqrt(69)/2 + S(1)/2]
# issue 7145
assert solve(2*abs(x) - abs(x - 1)) == [-1, Rational(1, 3)]
x = symbols('x')
assert solve([re(x) - 1, im(x) - 2], x) == [
{re(x): 1, x: 1 + 2*I, im(x): 2}]
# check for 'dict' handling of solution
eq = sqrt(re(x)**2 + im(x)**2) - 3
assert solve(eq) == solve(eq, x)
i = symbols('i', imaginary=True)
assert solve(abs(i) - 3) == [-3*I, 3*I]
raises(NotImplementedError, lambda: solve(abs(x) - 3))
w = symbols('w', integer=True)
assert solve(2*x**w - 4*y**w, w) == solve((x/y)**w - 2, w)
x, y = symbols('x y', real=True)
assert solve(x + y*I + 3) == {y: 0, x: -3}
# issue 2642
assert solve(x*(1 + I)) == [0]
x, y = symbols('x y', imaginary=True)
assert solve(x + y*I + 3 + 2*I) == {x: -2*I, y: 3*I}
x = symbols('x', real=True)
assert solve(x + y + 3 + 2*I) == {x: -3, y: -2*I}
# issue 6248
f = Function('f')
assert solve(f(x + 1) - f(2*x - 1)) == [2]
assert solve(log(x + 1) - log(2*x - 1)) == [2]
x = symbols('x')
assert solve(2**x + 4**x) == [I*pi/log(2)]
def test_issue_14607():
# issue 14607
s, tau_c, tau_1, tau_2, phi, K = symbols(
's, tau_c, tau_1, tau_2, phi, K')
target = (s**2*tau_1*tau_2 + s*tau_1 + s*tau_2 + 1)/(K*s*(-phi + tau_c))
K_C, tau_I, tau_D = symbols('K_C, tau_I, tau_D',
positive=True, nonzero=True)
PID = K_C*(1 + 1/(tau_I*s) + tau_D*s)
eq = (target - PID).together()
eq *= denom(eq).simplify()
eq = Poly(eq, s)
c = eq.coeffs()
vars = [K_C, tau_I, tau_D]
s = solve(c, vars, dict=True)
assert len(s) == 1
knownsolution = {K_C: -(tau_1 + tau_2)/(K*(phi - tau_c)),
tau_I: tau_1 + tau_2,
tau_D: tau_1*tau_2/(tau_1 + tau_2)}
for var in vars:
assert s[0][var].simplify() == knownsolution[var].simplify()
@slow
def test_lambert_multivariate():
from sympy.abc import a, x, y
from sympy.solvers.bivariate import _filtered_gens, _lambert, _solve_lambert
assert _filtered_gens(Poly(x + 1/x + exp(x) + y), x) == set([x, exp(x)])
assert _lambert(x, x) == []
assert solve((x**2 - 2*x + 1).subs(x, log(x) + 3*x)) == [LambertW(3*S.Exp1)/3]
assert solve((x**2 - 2*x + 1).subs(x, (log(x) + 3*x)**2 - 1)) == \
[LambertW(3*exp(-sqrt(2)))/3, LambertW(3*exp(sqrt(2)))/3]
assert solve((x**2 - 2*x - 2).subs(x, log(x) + 3*x)) == \
[LambertW(3*exp(1 - sqrt(3)))/3, LambertW(3*exp(1 + sqrt(3)))/3]
assert solve(x*log(x) + 3*x + 1, x) == [exp(-3 + LambertW(-exp(3)))]
eq = (x*exp(x) - 3).subs(x, x*exp(x))
assert solve(eq) == [LambertW(3*exp(-LambertW(3)))]
# coverage test
raises(NotImplementedError, lambda: solve(x - sin(x)*log(y - x), x))
x0 = 1/log(a)
x1 = LambertW(S(1)/3)
x2 = a**(-5)
x3 = 3**(S(1)/3)
x4 = 3**(S(5)/6)*I
x5 = x1**(S(1)/3)*x2**(S(1)/3)/2
ans = solve(3*log(a**(3*x + 5)) + a**(3*x + 5), x)
assert ans == [
x0*log(3*x1*x2)/3, x0*log(-x5*(x3 - x4)), x0*log(-x5*(x3 + x4))]
# check collection
K = ((b + 3)*LambertW(1/(b + 3))/a**5)**(S(1)/3)
assert solve(
3*log(a**(3*x + 5)) + b*log(a**(3*x + 5)) + a**(3*x + 5),
x) == [
log(K*(1 - sqrt(3)*I)/-2)/log(a),
log(K*(1 + sqrt(3)*I)/-2)/log(a),
log((b + 3)*LambertW(1/(b + 3))/a**5)/(3*log(a))]
p = symbols('p', positive=True)
eq = 4*2**(2*p + 3) - 2*p - 3
assert _solve_lambert(eq, p, _filtered_gens(Poly(eq), p)) == [
-S(3)/2 - LambertW(-4*log(2))/(2*log(2))]
# issue 4271
assert solve((a/x + exp(x/2)).diff(x, 2), x) == [
6*LambertW(root(-1, 3)*root(a, 3)/3)]
assert solve((log(x) + x).subs(x, x**2 + 1)) == [
-I*sqrt(-LambertW(1) + 1), sqrt(-1 + LambertW(1))]
assert solve(x**3 - 3**x, x) == [3, -3*LambertW(-log(3)/3)/log(3)]
assert solve(x**2 - 2**x, x) == [2, 4]
assert solve(-x**2 + 2**x, x) == [2, 4]
assert solve(3**cos(x) - cos(x)**3) == [acos(3), acos(-3*LambertW(-log(3)/3)/log(3))]
assert set(solve(3*log(x) - x*log(3))) == set( # 2.478... and 3
[3, -3*LambertW(-log(3)/3)/log(3)])
assert solve(LambertW(2*x) - y, x) == [y*exp(y)/2]
@XFAIL
def test_other_lambert():
from sympy.abc import x
assert solve(3*sin(x) - x*sin(3), x) == [3]
a = S(6)/5
assert set(solve(x**a - a**x)) == set(
[a, -a*LambertW(-log(a)/a)/log(a)])
assert set(solve(3**cos(x) - cos(x)**3)) == set(
[acos(3), acos(-3*LambertW(-log(3)/3)/log(3))])
def test_rewrite_trig():
assert solve(sin(x) + tan(x)) == [0, -pi, pi, 2*pi]
assert solve(sin(x) + sec(x)) == [
-2*atan(-S.Half + sqrt(2)*sqrt(1 - sqrt(3)*I)/2 + sqrt(3)*I/2),
2*atan(S.Half - sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half
+ sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half -
sqrt(3)*I/2 + sqrt(2)*sqrt(1 - sqrt(3)*I)/2)]
assert solve(sinh(x) + tanh(x)) == [0, I*pi]
# issue 6157
assert solve(2*sin(x) - cos(x), x) == [-2*atan(2 - sqrt(5)),
-2*atan(2 + sqrt(5))]
@XFAIL
def test_rewrite_trigh():
# if this import passes then the test below should also pass
from sympy import sech
assert solve(sinh(x) + sech(x)) == [
2*atanh(-S.Half + sqrt(5)/2 - sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-S.Half + sqrt(5)/2 + sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-sqrt(5)/2 - S.Half + sqrt(2 + 2*sqrt(5))/2),
2*atanh(-sqrt(2 + 2*sqrt(5))/2 - sqrt(5)/2 - S.Half)]
def test_uselogcombine():
eq = z - log(x) + log(y/(x*(-1 + y**2/x**2)))
assert solve(eq, x, force=True) == [-sqrt(y*(y - exp(z))), sqrt(y*(y - exp(z)))]
assert solve(log(x + 3) + log(1 + 3/x) - 3) in [
[-3 + sqrt(-12 + exp(3))*exp(S(3)/2)/2 + exp(3)/2,
-sqrt(-12 + exp(3))*exp(S(3)/2)/2 - 3 + exp(3)/2],
[-3 + sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2,
-3 - sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2],
]
assert solve(log(exp(2*x) + 1) + log(-tanh(x) + 1) - log(2)) == []
def test_atan2():
assert solve(atan2(x, 2) - pi/3, x) == [2*sqrt(3)]
def test_errorinverses():
assert solve(erf(x) - y, x) == [erfinv(y)]
assert solve(erfinv(x) - y, x) == [erf(y)]
assert solve(erfc(x) - y, x) == [erfcinv(y)]
assert solve(erfcinv(x) - y, x) == [erfc(y)]
def test_issue_2725():
R = Symbol('R')
eq = sqrt(2)*R*sqrt(1/(R + 1)) + (R + 1)*(sqrt(2)*sqrt(1/(R + 1)) - 1)
sol = solve(eq, R, set=True)[1]
assert sol == set([(S(5)/3 + (-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3) + 40/(9*((-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3))),), (S(5)/3 + 40/(9*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3)) + (S(251)/27 + sqrt(111)*I/9)**(S(1)/3),)])
def test_issue_5114_6611():
# See that it doesn't hang; this solves in about 2 seconds.
# Also check that the solution is relatively small.
# Note: the system in issue 6611 solves in about 5 seconds and has
# an op-count of 138336 (with simplify=False).
b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('b:r')
eqs = Matrix([
[b - c/d + r/d], [c*(1/g + 1/e + 1/d) - f/g - r/d],
[-c/g + f*(1/j + 1/i + 1/g) - h/i], [-f/i + h*(1/m + 1/l + 1/i) - k/m],
[-h/m + k*(1/p + 1/o + 1/m) - n/p], [-k/p + n*(1/q + 1/p)]])
v = Matrix([f, h, k, n, b, c])
ans = solve(list(eqs), list(v), simplify=False)
# If time is taken to simplify then then 2617 below becomes
# 1168 and the time is about 50 seconds instead of 2.
assert sum([s.count_ops() for s in ans.values()]) <= 2617
def test_det_quick():
m = Matrix(3, 3, symbols('a:9'))
assert m.det() == det_quick(m) # calls det_perm
m[0, 0] = 1
assert m.det() == det_quick(m) # calls det_minor
m = Matrix(3, 3, list(range(9)))
assert m.det() == det_quick(m) # defaults to .det()
# make sure they work with Sparse
s = SparseMatrix(2, 2, (1, 2, 1, 4))
assert det_perm(s) == det_minor(s) == s.det()
def test_real_imag_splitting():
a, b = symbols('a b', real=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == \
[-sqrt(-b**2 + 9), sqrt(-b**2 + 9)]
a, b = symbols('a b', imaginary=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == []
def test_issue_7110():
y = -2*x**3 + 4*x**2 - 2*x + 5
assert any(ask(Q.real(i)) for i in solve(y))
def test_units():
assert solve(1/x - 1/(2*cm)) == [2*cm]
def test_issue_7547():
A, B, V = symbols('A,B,V')
eq1 = Eq(630.26*(V - 39.0)*V*(V + 39) - A + B, 0)
eq2 = Eq(B, 1.36*10**8*(V - 39))
eq3 = Eq(A, 5.75*10**5*V*(V + 39.0))
sol = Matrix(nsolve(Tuple(eq1, eq2, eq3), [A, B, V], (0, 0, 0)))
assert str(sol) == str(Matrix(
[['4442890172.68209'],
['4289299466.1432'],
['70.5389666628177']]))
def test_issue_7895():
r = symbols('r', real=True)
assert solve(sqrt(r) - 2) == [4]
def test_issue_2777():
# the equations represent two circles
x, y = symbols('x y', real=True)
e1, e2 = sqrt(x**2 + y**2) - 10, sqrt(y**2 + (-x + 10)**2) - 3
a, b = 191/S(20), 3*sqrt(391)/20
ans = [(a, -b), (a, b)]
assert solve((e1, e2), (x, y)) == ans
assert solve((e1, e2/(x - a)), (x, y)) == []
# make the 2nd circle's radius be -3
e2 += 6
assert solve((e1, e2), (x, y)) == []
assert solve((e1, e2), (x, y), check=False) == ans
def test_issue_7322():
number = 5.62527e-35
assert solve(x - number, x)[0] == number
def test_nsolve():
raises(ValueError, lambda: nsolve(x, (-1, 1), method='bisect'))
raises(TypeError, lambda: nsolve((x - y + 3,x + y,z - y),(x,y,z),(-50,50)))
raises(TypeError, lambda: nsolve((x + y, x - y), (0, 1)))
@slow
def test_high_order_multivariate():
assert len(solve(a*x**3 - x + 1, x)) == 3
assert len(solve(a*x**4 - x + 1, x)) == 4
assert solve(a*x**5 - x + 1, x) == [] # incomplete solution allowed
raises(NotImplementedError, lambda:
solve(a*x**5 - x + 1, x, incomplete=False))
# result checking must always consider the denominator and CRootOf
# must be checked, too
d = x**5 - x + 1
assert solve(d*(1 + 1/d)) == [CRootOf(d + 1, i) for i in range(5)]
d = x - 1
assert solve(d*(2 + 1/d)) == [S.Half]
def test_base_0_exp_0():
assert solve(0**x - 1) == [0]
assert solve(0**(x - 2) - 1) == [2]
assert solve(S('x*(1/x**0 - x)', evaluate=False)) == \
[0, 1]
def test__simple_dens():
assert _simple_dens(1/x**0, [x]) == set()
assert _simple_dens(1/x**y, [x]) == set([x**y])
assert _simple_dens(1/root(x, 3), [x]) == set([x])
def test_issue_8755():
# This tests two things: that if full unrad is attempted and fails
# the solution should still be found; also it tests the use of
# keyword `composite`.
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
@slow
def test_issue_8828():
x1 = 0
y1 = -620
r1 = 920
x2 = 126
y2 = 276
x3 = 51
y3 = 205
r3 = 104
v = x, y, z
f1 = (x - x1)**2 + (y - y1)**2 - (r1 - z)**2
f2 = (x2 - x)**2 + (y2 - y)**2 - z**2
f3 = (x - x3)**2 + (y - y3)**2 - (r3 - z)**2
F = f1,f2,f3
g1 = sqrt((x - x1)**2 + (y - y1)**2) + z - r1
g2 = f2
g3 = sqrt((x - x3)**2 + (y - y3)**2) + z - r3
G = g1,g2,g3
A = solve(F, v)
B = solve(G, v)
C = solve(G, v, manual=True)
p, q, r = [set([tuple(i.evalf(2) for i in j) for j in R]) for R in [A, B, C]]
assert p == q == r
@slow
def test_issue_2840_8155():
assert solve(sin(3*x) + sin(6*x)) == [
0, -pi, pi, 14*pi/9, 16*pi/9, 2*pi, 2*I*(log(2) - log(-1 - sqrt(3)*I)),
2*I*(log(2) - log(-1 + sqrt(3)*I)), 2*I*(log(2) - log(1 - sqrt(3)*I)),
2*I*(log(2) - log(1 + sqrt(3)*I)), 2*I*(log(2) - log(-sqrt(3) - I)),
2*I*(log(2) - log(-sqrt(3) + I)), 2*I*(log(2) - log(sqrt(3) - I)),
2*I*(log(2) - log(sqrt(3) + I)), -2*I*log(-(-1)**(S(1)/9)), -2*I*log(
-(-1)**(S(2)/9)), -2*I*log(-sin(pi/18) - I*cos(pi/18)), -2*I*log(-sin(
pi/18) + I*cos(pi/18)), -2*I*log(sin(pi/18) - I*cos(pi/18)), -2*I*log(
sin(pi/18) + I*cos(pi/18)), -2*I*log(exp(-2*I*pi/9)), -2*I*log(exp(
-I*pi/9)), -2*I*log(exp(I*pi/9)), -2*I*log(exp(2*I*pi/9))]
assert solve(2*sin(x) - 2*sin(2*x)) == [
0, -pi, pi, 2*I*(log(2) - log(-sqrt(3) - I)), 2*I*(log(2) -
log(-sqrt(3) + I)), 2*I*(log(2) - log(sqrt(3) - I)), 2*I*(log(2) -
log(sqrt(3) + I))]
def test_issue_9567():
assert solve(1 + 1/(x - 1)) == [0]
def test_issue_11538():
assert solve(x + E) == [-E]
assert solve(x**2 + E) == [-I*sqrt(E), I*sqrt(E)]
assert solve(x**3 + 2*E) == [
-cbrt(2 * E),
cbrt(2)*cbrt(E)/2 - cbrt(2)*sqrt(3)*I*cbrt(E)/2,
cbrt(2)*cbrt(E)/2 + cbrt(2)*sqrt(3)*I*cbrt(E)/2]
assert solve([x + 4, y + E], x, y) == {x: -4, y: -E}
assert solve([x**2 + 4, y + E], x, y) == [
(-2*I, -E), (2*I, -E)]
e1 = x - y**3 + 4
e2 = x + y + 4 + 4 * E
assert len(solve([e1, e2], x, y)) == 3
@slow
def test_issue_12114():
a, b, c, d, e, f, g = symbols('a,b,c,d,e,f,g')
terms = [1 + a*b + d*e, 1 + a*c + d*f, 1 + b*c + e*f,
g - a**2 - d**2, g - b**2 - e**2, g - c**2 - f**2]
s = solve(terms, [a, b, c, d, e, f, g], dict=True)
assert s == [{a: -sqrt(-f**2 - 1), b: -sqrt(-f**2 - 1),
c: -sqrt(-f**2 - 1), d: f, e: f, g: -1},
{a: sqrt(-f**2 - 1), b: sqrt(-f**2 - 1),
c: sqrt(-f**2 - 1), d: f, e: f, g: -1},
{a: -sqrt(3)*f/2 - sqrt(-f**2 + 2)/2,
b: sqrt(3)*f/2 - sqrt(-f**2 + 2)/2, c: sqrt(-f**2 + 2),
d: -f/2 + sqrt(-3*f**2 + 6)/2,
e: -f/2 - sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: -sqrt(3)*f/2 + sqrt(-f**2 + 2)/2,
b: sqrt(3)*f/2 + sqrt(-f**2 + 2)/2, c: -sqrt(-f**2 + 2),
d: -f/2 - sqrt(-3*f**2 + 6)/2,
e: -f/2 + sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: sqrt(3)*f/2 - sqrt(-f**2 + 2)/2,
b: -sqrt(3)*f/2 - sqrt(-f**2 + 2)/2, c: sqrt(-f**2 + 2),
d: -f/2 - sqrt(-3*f**2 + 6)/2,
e: -f/2 + sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: sqrt(3)*f/2 + sqrt(-f**2 + 2)/2,
b: -sqrt(3)*f/2 + sqrt(-f**2 + 2)/2, c: -sqrt(-f**2 + 2),
d: -f/2 + sqrt(-3*f**2 + 6)/2,
e: -f/2 - sqrt(3)*sqrt(-f**2 + 2)/2, g: 2}]
def test_inf():
assert solve(1 - oo*x) == []
assert solve(oo*x, x) == []
assert solve(oo*x - oo, x) == []
def test_issue_12448():
f = Function('f')
fun = [f(i) for i in range(15)]
sym = symbols('x:15')
reps = dict(zip(fun, sym))
(x, y, z), c = sym[:3], sym[3:]
ssym = solve([c[4*i]*x + c[4*i + 1]*y + c[4*i + 2]*z + c[4*i + 3]
for i in range(3)], (x, y, z))
(x, y, z), c = fun[:3], fun[3:]
sfun = solve([c[4*i]*x + c[4*i + 1]*y + c[4*i + 2]*z + c[4*i + 3]
for i in range(3)], (x, y, z))
assert sfun[fun[0]].xreplace(reps).count_ops() == \
ssym[sym[0]].count_ops()
def test_denoms():
assert denoms(x/2 + 1/y) == set([2, y])
assert denoms(x/2 + 1/y, y) == set([y])
assert denoms(x/2 + 1/y, [y]) == set([y])
assert denoms(1/x + 1/y + 1/z, [x, y]) == set([x, y])
assert denoms(1/x + 1/y + 1/z, x, y) == set([x, y])
assert denoms(1/x + 1/y + 1/z, set([x, y])) == set([x, y])
def test_issue_12476():
x0, x1, x2, x3, x4, x5 = symbols('x0 x1 x2 x3 x4 x5')
eqns = [x0**2 - x0, x0*x1 - x1, x0*x2 - x2, x0*x3 - x3, x0*x4 - x4, x0*x5 - x5,
x0*x1 - x1, -x0/3 + x1**2 - 2*x2/3, x1*x2 - x1/3 - x2/3 - x3/3,
x1*x3 - x2/3 - x3/3 - x4/3, x1*x4 - 2*x3/3 - x5/3, x1*x5 - x4, x0*x2 - x2,
x1*x2 - x1/3 - x2/3 - x3/3, -x0/6 - x1/6 + x2**2 - x2/6 - x3/3 - x4/6,
-x1/6 + x2*x3 - x2/3 - x3/6 - x4/6 - x5/6, x2*x4 - x2/3 - x3/3 - x4/3,
x2*x5 - x3, x0*x3 - x3, x1*x3 - x2/3 - x3/3 - x4/3,
-x1/6 + x2*x3 - x2/3 - x3/6 - x4/6 - x5/6,
-x0/6 - x1/6 - x2/6 + x3**2 - x3/3 - x4/6, -x1/3 - x2/3 + x3*x4 - x3/3,
-x2 + x3*x5, x0*x4 - x4, x1*x4 - 2*x3/3 - x5/3, x2*x4 - x2/3 - x3/3 - x4/3,
-x1/3 - x2/3 + x3*x4 - x3/3, -x0/3 - 2*x2/3 + x4**2, -x1 + x4*x5, x0*x5 - x5,
x1*x5 - x4, x2*x5 - x3, -x2 + x3*x5, -x1 + x4*x5, -x0 + x5**2, x0 - 1]
sols = [{x0: 1, x3: S(1)/6, x2: S(1)/6, x4: -S(2)/3, x1: -S(2)/3, x5: 1},
{x0: 1, x3: S(1)/2, x2: -S(1)/2, x4: 0, x1: 0, x5: -1},
{x0: 1, x3: -S(1)/3, x2: -S(1)/3, x4: S(1)/3, x1: S(1)/3, x5: 1},
{x0: 1, x3: 1, x2: 1, x4: 1, x1: 1, x5: 1},
{x0: 1, x3: -S(1)/3, x2: S(1)/3, x4: sqrt(5)/3, x1: -sqrt(5)/3, x5: -1},
{x0: 1, x3: -S(1)/3, x2: S(1)/3, x4: -sqrt(5)/3, x1: sqrt(5)/3, x5: -1}]
assert solve(eqns) == sols
def test_issue_13849():
t = symbols('t')
assert solve((t*(sqrt(5) + sqrt(2)) - sqrt(2), t), t) == []
def test_issue_14860():
from sympy.physics.units import newton, kilo
assert solve(8*kilo*newton + x + y, x) == [-8000*newton - y]
def test_issue_14721():
k, h, a, b = symbols(':4')
assert solve([
-1 + (-k + 1)**2/b**2 + (-h - 1)**2/a**2,
-1 + (-k + 1)**2/b**2 + (-h + 1)**2/a**2,
h, k + 2], h, k, a, b) == [
(0, -2, -b*sqrt(1/(b**2 - 9)), b),
(0, -2, b*sqrt(1/(b**2 - 9)), b)]
assert solve([
h, h/a + 1/b**2 - 2, -h/2 + 1/b**2 - 2], a, h, b) == [
(a, 0, -sqrt(2)/2), (a, 0, sqrt(2)/2)]
assert solve((a + b**2 - 1, a + b**2 - 2)) == []
def test_issue_14779():
x = symbols('x', real=True)
assert solve(sqrt(x**4 - 130*x**2 + 1089) + sqrt(x**4 - 130*x**2
+ 3969) - 96*Abs(x)/x,x) == [sqrt(130)]
def test_issue_15307():
assert solve((y - 2, Mul(x + 3,x - 2, evaluate=False))) == \
[{x: -3, y: 2}, {x: 2, y: 2}]
assert solve((y - 2, Mul(3, x - 2, evaluate=False))) == \
{x: 2, y: 2}
assert solve((y - 2, Add(x + 4, x - 2, evaluate=False))) == \
{x: -1, y: 2}
eq1 = Eq(12513*x + 2*y - 219093, -5726*x - y)
eq2 = Eq(-2*x + 8, 2*x - 40)
assert solve([eq1, eq2]) == {x:12, y:75}
def test_issue_15415():
assert solve(x - 3, x) == [3]
assert solve([x - 3], x) == {x:3}
assert solve(Eq(y + 3*x**2/2, y + 3*x), y) == []
assert solve([Eq(y + 3*x**2/2, y + 3*x)], y) == []
assert solve([Eq(y + 3*x**2/2, y + 3*x), Eq(x, 1)], y) == []
@slow
def test_issue_15731():
# f(x)**g(x)=c
assert solve(Eq((x**2 - 7*x + 11)**(x**2 - 13*x + 42), 1)) == [2, 3, 4, 5, 6, 7]
assert solve((x)**(x + 4) - 4) == [-2]
assert solve((-x)**(-x + 4) - 4) == [2]
assert solve((x**2 - 6)**(x**2 - 2) - 4) == [-2, 2]
assert solve((x**2 - 2*x - 1)**(x**2 - 3) - 1/(1 - 2*sqrt(2))) == [sqrt(2)]
assert solve(x**(x + S.Half) - 4*sqrt(2)) == [S(2)]
assert solve((x**2 + 1)**x - 25) == [2]
assert solve(x**(2/x) - 2) == [2, 4]
assert solve((x/2)**(2/x) - sqrt(2)) == [4, 8]
assert solve(x**(x + S.Half) - S(9)/4) == [S(3)/2]
# a**g(x)=c
assert solve((-sqrt(sqrt(2)))**x - 2) == [4, log(2)/(log(2**(S(1)/4)) + I*pi)]
assert solve((sqrt(2))**x - sqrt(sqrt(2))) == [S(1)/2]
assert solve((-sqrt(2))**x + 2*(sqrt(2))) == [3,
(3*log(2)**2 + 4*pi**2 - 4*I*pi*log(2))/(log(2)**2 + 4*pi**2)]
assert solve((sqrt(2))**x - 2*(sqrt(2))) == [3]
assert solve(I**x + 1) == [2]
assert solve((1 + I)**x - 2*I) == [2]
assert solve((sqrt(2) + sqrt(3))**x - (2*sqrt(6) + 5)**(S(1)/3)) == [S(2)/3]
# bases of both sides are equal
b = Symbol('b')
assert solve(b**x - b**2, x) == [2]
assert solve(b**x - 1/b, x) == [-1]
assert solve(b**x - b, x) == [1]
b = Symbol('b', positive=True)
assert solve(b**x - b**2, x) == [2]
assert solve(b**x - 1/b, x) == [-1]
| 38.978442 | 94 | 0.482861 | from sympy import (
Abs, And, Derivative, Dummy, Eq, Float, Function, Gt, I, Integral,
LambertW, Lt, Matrix, Or, Poly, Q, Rational, S, Symbol, Ne,
Wild, acos, asin, atan, atanh, cos, cosh, diff, erf, erfinv, erfc,
erfcinv, exp, im, log, pi, re, sec, sin,
sinh, solve, solve_linear, sqrt, sstr, symbols, sympify, tan, tanh,
root, simplify, atan2, arg, Mul, SparseMatrix, ask, Tuple, nsolve, oo,
E, cbrt, denom, Add)
from sympy.core.compatibility import range
from sympy.core.function import nfloat
from sympy.solvers import solve_linear_system, solve_linear_system_LU, \
solve_undetermined_coeffs
from sympy.solvers.solvers import _invert, unrad, checksol, posify, _ispow, \
det_quick, det_perm, det_minor, _simple_dens, check_assumptions, denoms, \
failing_assumptions
from sympy.physics.units import cm
from sympy.polys.rootoftools import CRootOf
from sympy.utilities.pytest import slow, XFAIL, SKIP, raises, skip, ON_TRAVIS
from sympy.utilities.randtest import verify_numerically as tn
from sympy.abc import a, b, c, d, k, h, p, x, y, z, t, q, m
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_swap_back():
f, g = map(Function, 'fg')
fx, gx = f(x), g(x)
assert solve([fx + y - 2, fx - gx - 5], fx, y, gx) == \
{fx: gx + 5, y: -gx - 3}
assert solve(fx + gx*x - 2, [fx, gx], dict=True)[0] == {fx: 2, gx: 0}
assert solve(fx + gx**2*x - y, [fx, gx], dict=True) == [{fx: y - gx**2*x}]
assert solve([f(1) - 2, x + 2], dict=True) == [{x: -2, f(1): 2}]
def guess_solve_strategy(eq, symbol):
try:
solve(eq, symbol)
return True
except (TypeError, NotImplementedError):
return False
def test_guess_poly():
assert guess_solve_strategy( S(4), x )
assert guess_solve_strategy( x, x )
assert guess_solve_strategy( x + a, x )
assert guess_solve_strategy( 2*x, x )
assert guess_solve_strategy( x + sqrt(2), x)
assert guess_solve_strategy( x + 2**Rational(1, 4), x)
assert guess_solve_strategy( x**2 + 1, x )
assert guess_solve_strategy( x**2 - 1, x )
assert guess_solve_strategy( x*y + y, x )
assert guess_solve_strategy( x*exp(y) + y, x)
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), x)
def test_guess_poly_cv():
assert guess_solve_strategy( sqrt(x) + 1, x )
assert guess_solve_strategy(
x**Rational(1, 3) + sqrt(x) + 1, x )
assert guess_solve_strategy( 4*x*(1 - sqrt(x)), x )
assert guess_solve_strategy( x + 1/x + y, x )
def test_guess_rational_cv():
assert guess_solve_strategy( (x + 1)/(x**2 + 2), x)
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), y)
assert guess_solve_strategy( (sqrt(x) + 1)/(x**Rational(1, 3) + sqrt(x) + 1), x ) \
def test_guess_transcendental():
assert guess_solve_strategy( exp(x) + 1, x )
assert guess_solve_strategy( 2*cos(x) - y, x )
assert guess_solve_strategy(
exp(x) + exp(-x) - y, x )
assert guess_solve_strategy(3**x - 10, x)
assert guess_solve_strategy(-3**x + 10, x)
assert guess_solve_strategy(a*x**b - y, x)
def test_solve_args():
ans = {x: -3, y: 1}
eqs = (x + 5*y - 2, -3*x + 6*y - 15)
assert all(solve(container(eqs), x, y) == ans for container in
(tuple, list, set, frozenset))
assert solve(Tuple(*eqs), x, y) == ans
assert set(solve(x**2 - 4)) == set([S(2), -S(2)])
assert solve([x + y - 3, x - y - 5]) == {x: 4, y: -1}
assert solve(x - exp(x), x, implicit=True) == [exp(x)]
assert solve(42) == solve(42, x) == []
assert solve([1, 2]) == []
assert solve((x - 3, y + 2), x, y, x) == {x: 3, y: -2}
assert solve(y - 3, set([y])) == [3]
assert solve(y - 3, set([x, y])) == [{y: 3}]
assert solve(x + y - 3, [x, y]) == [(3 - y, y)]
assert solve(x + y - 3, [x, y], dict=True) == [{x: 3 - y}]
assert solve(x + y - 3) == [{x: 3 - y}]
assert solve(a + b*x - 2, [a, b]) == {a: 2, b: 0}
args = (a + b)*x - b**2 + 2, a, b
assert solve(*args) == \
[(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]
assert solve(*args, set=True) == \
([a, b], set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]))
assert solve(*args, dict=True) == \
[{b: sqrt(2), a: -sqrt(2)}, {b: -sqrt(2), a: sqrt(2)}]
eq = a*x**2 + b*x + c - ((x - h)**2 + 4*p*k)/4/p
flags = dict(dict=True)
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: c - b**2/(4*a), h: -b/(2*a), p: 1/(4*a)}]
flags.update(dict(simplify=False))
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: (4*a*c - b**2)/(4*a), h: -b/(2*a), p: 1/(4*a)}]
assert solve(a*x + b**2/(x + 4) - 3*x - 4/x, a, b, dict=True) == \
[{a: (-b**2*x + 3*x**3 + 12*x**2 + 4*x + 16)/(x**2*(x + 4))}]
assert solve(1/(1/x - y + exp(y))) == []
raises(
NotImplementedError, lambda: solve(exp(x) + sin(x) + exp(y) + sin(y)))
assert solve([y, exp(x) + x]) == [{x: -LambertW(1), y: 0}]
assert solve(
(exp(x) - x, exp(y) - y)) == [{x: -LambertW(-1), y: -LambertW(-1)}]
solve([y, exp(x) + x], x, y) == [(-LambertW(1), 0)]
assert solve(x**2 - pi, pi) == [x**2]
assert solve([], [x]) == []
assert solve([(x + y)**2 - 4, x + y - 2]) == [{x: -y + 2}]
assert solve((x + y - 2, 2*x + 2*y - 4)) == {x: -y + 2}
assert solve([True, Eq(x, 0)], [x], dict=True) == [{x: 0}]
assert solve([Eq(x, x), Eq(x, 0), Eq(x, x+1)], [x], dict=True) == []
assert not solve([Eq(x, x+1), x < 2], x)
assert solve([Eq(x, 0), x+1<2]) == Eq(x, 0)
assert solve([Eq(x, x), Eq(x, x+1)], x) == []
assert solve(True, x) == []
assert solve([x-1, False], [x], set=True) == ([], set())
def test_solve_polynomial1():
assert solve(3*x - 2, x) == [Rational(2, 3)]
assert solve(Eq(3*x, 2), x) == [Rational(2, 3)]
assert set(solve(x**2 - 1, x)) == set([-S(1), S(1)])
assert set(solve(Eq(x**2, 1), x)) == set([-S(1), S(1)])
assert solve(x - y**3, x) == [y**3]
rx = root(x, 3)
assert solve(x - y**3, y) == [
rx, -rx/2 - sqrt(3)*I*rx/2, -rx/2 + sqrt(3)*I*rx/2]
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
assert solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y) == \
{
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
solution = {y: S.Zero, x: S.Zero}
assert solve((x - y, x + y), x, y ) == solution
assert solve((x - y, x + y), (x, y)) == solution
assert solve((x - y, x + y), [x, y]) == solution
assert set(solve(x**3 - 15*x - 4, x)) == set([
-2 + 3**Rational(1, 2),
S(4),
-2 - 3**Rational(1, 2)
])
assert set(solve((x**2 - 1)**2 - a, x)) == \
set([sqrt(1 + sqrt(a)), -sqrt(1 + sqrt(a)),
sqrt(1 - sqrt(a)), -sqrt(1 - sqrt(a))])
def test_solve_polynomial2():
assert solve(4, x) == []
def test_solve_polynomial_cv_1a():
assert solve( sqrt(x) - 1, x) == [1]
assert solve( sqrt(x) - 2, x) == [4]
assert solve( x**Rational(1, 4) - 2, x) == [16]
assert solve( x**Rational(1, 3) - 3, x) == [27]
assert solve(sqrt(x) + x**Rational(1, 3) + x**Rational(1, 4), x) == [0]
def test_solve_polynomial_cv_1b():
assert set(solve(4*x*(1 - a*sqrt(x)), x)) == set([S(0), 1/a**2])
assert set(solve(x*(root(x, 3) - 3), x)) == set([S(0), S(27)])
def test_solve_polynomial_cv_2():
assert solve(x + 1/x - 1, x) in \
[[ Rational(1, 2) + I*sqrt(3)/2, Rational(1, 2) - I*sqrt(3)/2],
[ Rational(1, 2) - I*sqrt(3)/2, Rational(1, 2) + I*sqrt(3)/2]]
def test_quintics_1():
f = x**5 - 110*x**3 - 55*x**2 + 2310*x + 979
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
# it is *much* faster to use nroots to get them than to solve the
# equation only to get RootOf solutions which are then numerically
# evaluated. So for eq = x**5 + 3*x + 7 do Poly(eq).nroots() rather
# than [i.n() for i in solve(eq)] to get the numerical roots of eq.
assert nfloat(solve(x**5 + 3*x**3 + 7)[0], exponent=False) == \
CRootOf(x**5 + 3*x**3 + 7, 0).n()
def test_highorder_poly():
# just testing that the uniq generator is unpacked
sol = solve(x**6 - 2*x + 2)
assert all(isinstance(i, CRootOf) for i in sol) and len(sol) == 6
def test_quintics_2():
f = x**5 + 15*x + 12
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
def test_solve_rational():
assert solve( ( x - y**3 )/( (y**2)*sqrt(1 - y**2) ), x) == [y**3]
def test_solve_nonlinear():
assert solve(x**2 - y**2, x, y, dict=True) == [{x: -y}, {x: y}]
assert solve(x**2 - y**2/exp(x), x, y, dict=True) == [{x: 2*LambertW(y/2)}]
assert solve(x**2 - y**2/exp(x), y, x, dict=True) == [{y: -x*sqrt(exp(x))},
{y: x*sqrt(exp(x))}]
def test_issue_8666():
x = symbols('x')
assert solve(Eq(x**2 - 1/(x**2 - 4), 4 - 1/(x**2 - 4)), x) == []
assert solve(Eq(x + 1/x, 1/x), x) == []
def test_issue_7228():
assert solve(4**(2*(x**2) + 2*x) - 8, x) == [-Rational(3, 2), S.Half]
def test_issue_7190():
assert solve(log(x-3) + log(x+3), x) == [sqrt(10)]
def test_linear_system():
x, y, z, t, n = symbols('x, y, z, t, n')
assert solve([x - 1, x - y, x - 2*y, y - 1], [x, y]) == []
assert solve([x - 1, x - y, x - 2*y, x - 1], [x, y]) == []
assert solve([x - 1, x - 1, x - y, x - 2*y], [x, y]) == []
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == {x: -3, y: 1}
M = Matrix([[0, 0, n*(n + 1), (n + 1)**2, 0],
[n + 1, n + 1, -2*n - 1, -(n + 1), 0],
[-1, 0, 1, 0, 0]])
assert solve_linear_system(M, x, y, z, t) == \
{x: -t - t/n, z: -t - t/n, y: 0}
assert solve([x + y + z + t, -z - t], x, y, z, t) == {x: -y, z: -t}
def test_linear_system_function():
a = Function('a')
assert solve([a(0, 0) + a(0, 1) + a(1, 0) + a(1, 1), -a(1, 0) - a(1, 1)],
a(0, 0), a(0, 1), a(1, 0), a(1, 1)) == {a(1, 0): -a(1, 1), a(0, 0): -a(0, 1)}
def test_linear_systemLU():
n = Symbol('n')
M = Matrix([[1, 2, 0, 1], [1, 3, 2*n, 1], [4, -1, n**2, 1]])
assert solve_linear_system_LU(M, [x, y, z]) == {z: -3/(n**2 + 18*n),
x: 1 - 12*n/(n**2 + 18*n),
y: 6*n/(n**2 + 18*n)}
# Note: multiple solutions exist for some of these equations, so the tests
# should be expected to break if the implementation of the solver changes
# in such a way that a different branch is chosen
@slow
def test_solve_transcendental():
from sympy.abc import a, b
assert solve(exp(x) - 3, x) == [log(3)]
assert set(solve((a*x + b)*(exp(x) - 3), x)) == set([-b/a, log(3)])
assert solve(cos(x) - y, x) == [-acos(y) + 2*pi, acos(y)]
assert solve(2*cos(x) - y, x) == [-acos(y/2) + 2*pi, acos(y/2)]
assert solve(Eq(cos(x), sin(x)), x) == [-3*pi/4, pi/4]
assert set(solve(exp(x) + exp(-x) - y, x)) in [set([
log(y/2 - sqrt(y**2 - 4)/2),
log(y/2 + sqrt(y**2 - 4)/2),
]), set([
log(y - sqrt(y**2 - 4)) - log(2),
log(y + sqrt(y**2 - 4)) - log(2)]),
set([
log(y/2 - sqrt((y - 2)*(y + 2))/2),
log(y/2 + sqrt((y - 2)*(y + 2))/2)])]
assert solve(exp(x) - 3, x) == [log(3)]
assert solve(Eq(exp(x), 3), x) == [log(3)]
assert solve(log(x) - 3, x) == [exp(3)]
assert solve(sqrt(3*x) - 4, x) == [Rational(16, 3)]
assert solve(3**(x + 2), x) == []
assert solve(3**(2 - x), x) == []
assert solve(x + 2**x, x) == [-LambertW(log(2))/log(2)]
ans = solve(3*x + 5 + 2**(-5*x + 3), x)
assert len(ans) == 1 and ans[0].expand() == \
-Rational(5, 3) + LambertW(-10240*root(2, 3)*log(2)/3)/(5*log(2))
assert solve(5*x - 1 + 3*exp(2 - 7*x), x) == \
[Rational(1, 5) + LambertW(-21*exp(Rational(3, 5))/5)/7]
assert solve(2*x + 5 + log(3*x - 2), x) == \
[Rational(2, 3) + LambertW(2*exp(-Rational(19, 3))/3)/2]
assert solve(3*x + log(4*x), x) == [LambertW(Rational(3, 4))/3]
assert set(solve((2*x + 8)*(8 + exp(x)), x)) == set([S(-4), log(8) + pi*I])
eq = 2*exp(3*x + 4) - 3
ans = solve(eq, x) # this generated a failure in flatten
assert len(ans) == 3 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(2*log(3*x + 4) - 3, x) == [(exp(Rational(3, 2)) - 4)/3]
assert solve(exp(x) + 1, x) == [pi*I]
eq = 2*(3*x + 4)**5 - 6*7**(3*x + 9)
result = solve(eq, x)
ans = [(log(2401) + 5*LambertW(-log(7**(7*3**Rational(1, 5)/5))))/(3*log(7))/-1]
assert result == ans
# it works if expanded, too
assert solve(eq.expand(), x) == result
assert solve(z*cos(x) - y, x) == [-acos(y/z) + 2*pi, acos(y/z)]
assert solve(z*cos(2*x) - y, x) == [-acos(y/z)/2 + pi, acos(y/z)/2]
assert solve(z*cos(sin(x)) - y, x) == [
pi - asin(acos(y/z)), asin(acos(y/z) - 2*pi) + pi,
-asin(acos(y/z) - 2*pi), asin(acos(y/z))]
assert solve(z*cos(x), x) == [pi/2, 3*pi/2]
# issue 4508
assert solve(y - b*x/(a + x), x) in [[-a*y/(y - b)], [a*y/(b - y)]]
assert solve(y - b*exp(a/x), x) == [a/log(y/b)]
# issue 4507
assert solve(y - b/(1 + a*x), x) in [[(b - y)/(a*y)], [-((y - b)/(a*y))]]
# issue 4506
assert solve(y - a*x**b, x) == [(y/a)**(1/b)]
# issue 4505
assert solve(z**x - y, x) == [log(y)/log(z)]
# issue 4504
assert solve(2**x - 10, x) == [log(10)/log(2)]
# issue 6744
assert solve(x*y) == [{x: 0}, {y: 0}]
assert solve([x*y]) == [{x: 0}, {y: 0}]
assert solve(x**y - 1) == [{x: 1}, {y: 0}]
assert solve([x**y - 1]) == [{x: 1}, {y: 0}]
assert solve(x*y*(x**2 - y**2)) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
assert solve([x*y*(x**2 - y**2)]) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
# issue 4739
assert solve(exp(log(5)*x) - 2**x, x) == [0]
# issue 14791
assert solve(exp(log(5)*x) - exp(log(2)*x), x) == [0]
f = Function('f')
assert solve(y*f(log(5)*x) - y*f(log(2)*x), x) == [0]
assert solve(f(x) - f(0), x) == [0]
assert solve(f(x) - f(2 - x), x) == [1]
raises(NotImplementedError, lambda: solve(f(x, y) - f(1, 2), x))
raises(NotImplementedError, lambda: solve(f(x, y) - f(2 - x, 2), x))
raises(ValueError, lambda: solve(f(x, y) - f(1 - x), x))
raises(ValueError, lambda: solve(f(x, y) - f(1), x))
# misc
# make sure that the right variables is picked up in tsolve
# shouldn't generate a GeneratorsNeeded error in _tsolve when the NaN is generated
raises(NotImplementedError, lambda:
solve(sinh(x)*sinh(sinh(x)) + cosh(x)*cosh(sinh(x)) - 3))
raises(NotImplementedError, lambda: solve((x + 2)**y*x - 3, x))
assert solve(sin(sqrt(x))) == [0, pi**2]
a, b = symbols('a, b', real=True, negative=False)
assert str(solve(Eq(a, 0.5 - cos(pi*b)/2), b)) == \
'[2.0 - 0.318309886183791*acos(1.0 - 2.0*a), 0.318309886183791*acos(1.0 - 2.0*a)]'
assert solve(y**(1/x) - z, x) == [log(y)/log(z)]
def test_solve_for_functions_derivatives():
t = Symbol('t')
x = Function('x')(t)
y = Function('y')(t)
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
soln = solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y)
assert soln == {
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
assert solve(x - 1, x) == [1]
assert solve(3*x - 2, x) == [Rational(2, 3)]
soln = solve([a11*x.diff(t) + a12*y.diff(t) - b1, a21*x.diff(t) +
a22*y.diff(t) - b2], x.diff(t), y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x.diff(t): (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x.diff(t) - 1, x.diff(t)) == [1]
assert solve(3*x.diff(t) - 2, x.diff(t)) == [Rational(2, 3)]
eqns = set((3*x - 1, 2*y - 4))
assert solve(eqns, set((x, y))) == { x: Rational(1, 3), y: 2 }
x = Symbol('x')
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
assert solve(F.diff(x), diff(f(x), x)) == [(-x + 2)/f(x)]
x = Symbol('x')
y = Function('y')(t)
soln = solve([a11*x + a12*y.diff(t) - b1, a21*x +
a22*y.diff(t) - b2], x, y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
def test_issue_3725():
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
e = F.diff(x)
assert solve(e, f(x).diff(x)) in [[(2 - x)/f(x)], [-((x - 2)/f(x))]]
def test_issue_3870():
a, b, c, d = symbols('a b c d')
A = Matrix(2, 2, [a, b, c, d])
B = Matrix(2, 2, [0, 2, -3, 0])
C = Matrix(2, 2, [1, 2, 3, 4])
assert solve(A*B - C, [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - C], [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve(Eq(A*B, C), [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - B*A], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([A*C - C*A], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([A*B - B*A, A*C - C*A], [a, b, c, d]) == {a: d, b: 0, c: 0}
assert solve([Eq(A*B, B*A)], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([Eq(A*C, C*A)], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([Eq(A*B, B*A), Eq(A*C, C*A)], [a, b, c, d]) == {a: d, b: 0, c: 0}
def test_solve_linear():
w = Wild('w')
assert solve_linear(x, x) == (0, 1)
assert solve_linear(x, exclude=[x]) == (0, 1)
assert solve_linear(x, symbols=[w]) == (0, 1)
assert solve_linear(x, y - 2*x) in [(x, y/3), (y, 3*x)]
assert solve_linear(x, y - 2*x, exclude=[x]) == (y, 3*x)
assert solve_linear(3*x - y, 0) in [(x, y/3), (y, 3*x)]
assert solve_linear(3*x - y, 0, [x]) == (x, y/3)
assert solve_linear(3*x - y, 0, [y]) == (y, 3*x)
assert solve_linear(x**2/y, 1) == (y, x**2)
assert solve_linear(w, x) in [(w, x), (x, w)]
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y) == \
(y, -2 - cos(x)**2 - sin(x)**2)
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y, symbols=[x]) == (0, 1)
assert solve_linear(Eq(x, 3)) == (x, 3)
assert solve_linear(1/(1/x - 2)) == (0, 0)
assert solve_linear((x + 1)*exp(-x), symbols=[x]) == (x, -1)
assert solve_linear((x + 1)*exp(x), symbols=[x]) == ((x + 1)*exp(x), 1)
assert solve_linear(x*exp(-x**2), symbols=[x]) == (x, 0)
assert solve_linear(0**x - 1) == (0**x - 1, 1)
assert solve_linear(1 + 1/(x - 1)) == (x, 0)
eq = y*cos(x)**2 + y*sin(x)**2 - y
assert solve_linear(eq) == (0, 1)
eq = cos(x)**2 + sin(x)**2
assert solve_linear(eq) == (0, 1)
raises(ValueError, lambda: solve_linear(Eq(x, 3), 3))
def test_solve_undetermined_coeffs():
assert solve_undetermined_coeffs(a*x**2 + b*x**2 + b*x + 2*c*x + c + 1, [a, b, c], x) == \
{a: -2, b: 2, c: -1}
assert solve_undetermined_coeffs(a/x + b/(x + 1) - (2*x + 1)/(x**2 + x), [a, b], x) == \
{a: 1, b: 1}
assert solve_undetermined_coeffs(((c + 1)*a*x**2 + (c + 1)*b*x**2 +
(c + 1)*b*x + (c + 1)*2*c*x + (c + 1)**2)/(c + 1), [a, b, c], x) == \
{a: -2, b: 2, c: -1}
def test_solve_inequalities():
x = Symbol('x')
sol = And(S(0) < x, x < oo)
assert solve(x + 1 > 1) == sol
assert solve([x + 1 > 1]) == sol
assert solve([x + 1 > 1], x) == sol
assert solve([x + 1 > 1], [x]) == sol
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
And(Or(And(Lt(-sqrt(2), x), Lt(x, -1)),
And(Lt(1, x), Lt(x, sqrt(2)))), Eq(0, 0))
x = Symbol('x', real=True)
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
Or(And(Lt(-sqrt(2), x), Lt(x, -1)), And(Lt(1, x), Lt(x, sqrt(2))))
assert solve((x - 3)/(x - 2) < 0, x) == And(Lt(2, x), Lt(x, 3))
assert solve(x/(x + 1) > 1, x) == And(Lt(-oo, x), Lt(x, -1))
assert solve(sin(x) > S.Half) == And(pi/6 < x, x < 5*pi/6)
assert solve(Eq(False, x < 1)) == (S(1) <= x) & (x < oo)
assert solve(Eq(True, x < 1)) == (-oo < x) & (x < 1)
assert solve(Eq(x < 1, False)) == (S(1) <= x) & (x < oo)
assert solve(Eq(x < 1, True)) == (-oo < x) & (x < 1)
assert solve(Eq(False, x)) == False
assert solve(Eq(True, x)) == True
assert solve(Eq(False, ~x)) == True
assert solve(Eq(True, ~x)) == False
assert solve(Ne(True, x)) == False
def test_issue_4793():
assert solve(1/x) == []
assert solve(x*(1 - 5/x)) == [5]
assert solve(x + sqrt(x) - 2) == [1]
assert solve(-(1 + x)/(2 + x)**2 + 1/(2 + x)) == []
assert solve(-x**2 - 2*x + (x + 1)**2 - 1) == []
assert solve((x/(x + 1) + 3)**(-2)) == []
assert solve(x/sqrt(x**2 + 1), x) == [0]
assert solve(exp(x) - y, x) == [log(y)]
assert solve(exp(x)) == []
assert solve(x**2 + x + sin(y)**2 + cos(y)**2 - 1, x) in [[0, -1], [-1, 0]]
eq = 4*3**(5*x + 2) - 7
ans = solve(eq, x)
assert len(ans) == 5 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(log(x**2) - y**2/exp(x), x, y, set=True) == (
[x, y],
{(x, sqrt(exp(x) * log(x ** 2))), (x, -sqrt(exp(x) * log(x ** 2)))})
assert solve(x**2*z**2 - z**2*y**2) == [{x: -y}, {x: y}, {z: 0}]
assert solve((x - 1)/(1 + 1/(x - 1))) == []
assert solve(x**(y*z) - x, x) == [1]
raises(NotImplementedError, lambda: solve(log(x) - exp(x), x))
raises(NotImplementedError, lambda: solve(2**x - exp(x) - 3))
def test_PR1964():
assert solve(sqrt(x)) == solve(sqrt(x**3)) == [0]
assert solve(sqrt(x - 1)) == [1]
a = Symbol('a')
assert solve(-3*a/sqrt(x), x) == []
assert solve(2*x/(x + 2) - 1, x) == [2]
assert set(solve((x**2/(7 - x)).diff(x))) == set([S(0), S(14)])
f = Function('f')
assert solve((3 - 5*x/f(x))*f(x), f(x)) == [5*x/3]
assert solve(1/root(5 + x, 5) - 9, x) == [-295244/S(59049)]
assert solve(sqrt(x) + sqrt(sqrt(x)) - 4) == [(-S.Half + sqrt(17)/2)**4]
assert set(solve(Poly(sqrt(exp(x)) + sqrt(exp(-x)) - 4))) in \
[
set([log((-sqrt(3) + 2)**2), log((sqrt(3) + 2)**2)]),
set([2*log(-sqrt(3) + 2), 2*log(sqrt(3) + 2)]),
set([log(-4*sqrt(3) + 7), log(4*sqrt(3) + 7)]),
]
assert set(solve(Poly(exp(x) + exp(-x) - 4))) == \
set([log(-sqrt(3) + 2), log(sqrt(3) + 2)])
assert set(solve(x**y + x**(2*y) - 1, x)) == \
set([(-S.Half + sqrt(5)/2)**(1/y), (-S.Half - sqrt(5)/2)**(1/y)])
assert solve(exp(x/y)*exp(-z/y) - 2, y) == [(x - z)/log(2)]
assert solve(
x**z*y**z - 2, z) in [[log(2)/(log(x) + log(y))], [log(2)/(log(x*y))]]
E = S.Exp1
assert solve(exp(3*x) - exp(3), x) in [
[1, log(E*(-S.Half - sqrt(3)*I/2)), log(E*(-S.Half + sqrt(3)*I/2))],
[1, log(-E/2 - sqrt(3)*E*I/2), log(-E/2 + sqrt(3)*E*I/2)],
]
p = Symbol('p', positive=True)
assert solve((1/p + 1)**(p + 1)) == []
def test_issue_5197():
x = Symbol('x', real=True)
assert solve(x**2 + 1, x) == []
n = Symbol('n', integer=True, positive=True)
assert solve((n - 1)*(n + 2)*(2*n - 1), n) == [1]
x = Symbol('x', positive=True)
y = Symbol('y')
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == []
assert solve((x + y)*n - y**2 + 2, x, y) == [(sqrt(2), -sqrt(2))]
y = Symbol('y', positive=True)
assert solve(x**2 - y**2/exp(x), y, x, dict=True) == [{y: x*exp(x/2)}]
assert solve(x**2 - y**2/exp(x), x, y, dict=True) == [{x: 2*LambertW(y/2)}]
x, y, z = symbols('x y z', positive=True)
assert solve(z**2*x**2 - z**2*y**2/exp(x), y, x, z, dict=True) == [{y: x*exp(x/2)}]
def test_checking():
assert set(
solve(x*(x - y/x), x, check=False)) == set([sqrt(y), S(0), -sqrt(y)])
assert set(solve(x*(x - y/x), x, check=True)) == set([sqrt(y), -sqrt(y)])
assert solve((1/(1/x + 2), 1/(y - 3) - 1)) == []
assert solve(1/(1/x + 2)) == []
def test_issue_4671_4463_4467():
assert solve((sqrt(x**2 - 1) - 2)) in ([sqrt(5), -sqrt(5)],
[-sqrt(5), sqrt(5)])
assert solve((2**exp(y**2/x) + 2)/(x**2 + 15), y) == [
-sqrt(x*log(1 + I*pi/log(2))), sqrt(x*log(1 + I*pi/log(2)))]
C1, C2 = symbols('C1 C2')
f = Function('f')
assert solve(C1 + C2/x**2 - exp(-f(x)), f(x)) == [log(x**2/(C1*x**2 + C2))]
a = Symbol('a')
E = S.Exp1
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2]
)
assert solve(log(a**(-3) - x**2)/a, x) in (
[-sqrt(-1 + a**(-3)), sqrt(-1 + a**(-3))],
[sqrt(-1 + a**(-3)), -sqrt(-1 + a**(-3))],)
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2],)
assert set(solve((
a**2 + 1) * (sin(a*x) + cos(a*x)), x)) == set([-pi/(4*a), 3*pi/(4*a)])
assert solve(3 - (sinh(a*x) + cosh(a*x)), x) == [log(3)/a]
assert set(solve(3 - (sinh(a*x) + cosh(a*x)**2), x)) == \
set([log(-2 + sqrt(5))/a, log(-sqrt(2) + 1)/a,
log(-sqrt(5) - 2)/a, log(1 + sqrt(2))/a])
assert solve(atan(x) - 1) == [tan(1)]
def test_issue_5132():
r, t = symbols('r,t')
assert set(solve([r - x**2 - y**2, tan(t) - y/x], [x, y])) == \
set([(
-sqrt(r*cos(t)**2), -1*sqrt(r*cos(t)**2)*tan(t)),
(sqrt(r*cos(t)**2), sqrt(r*cos(t)**2)*tan(t))])
assert solve([exp(x) - sin(y), 1/y - 3], [x, y]) == \
[(log(sin(S(1)/3)), S(1)/3)]
assert solve([exp(x) - sin(y), 1/exp(y) - 3], [x, y]) == \
[(log(-sin(log(3))), -log(3))]
assert set(solve([exp(x) - sin(y), y**2 - 4], [x, y])) == \
set([(log(-sin(2)), -S(2)), (log(sin(2)), S(2))])
eqs = [exp(x)**2 - sin(y) + z**2, 1/exp(y) - 3]
assert solve(eqs, set=True) == \
([x, y], set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(-z**2 - sin(log(3)))/2, -log(3))]))
assert solve(eqs, x, z, set=True) == (
[x, z],
{(log(-z**2 + sin(y))/2, z), (log(-sqrt(-z**2 + sin(y))), z)})
assert set(solve(eqs, x, y)) == \
set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(-z**2 - sin(log(3)))/2, -log(3))])
assert set(solve(eqs, y, z)) == \
set([
(-log(3), -sqrt(-exp(2*x) - sin(log(3)))),
(-log(3), sqrt(-exp(2*x) - sin(log(3))))])
eqs = [exp(x)**2 - sin(y) + z, 1/exp(y) - 3]
assert solve(eqs, set=True) == ([x, y], set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(-z - sin(log(3)))/2, -log(3))]))
assert solve(eqs, x, z, set=True) == (
[x, z],
{(log(-sqrt(-z + sin(y))), z), (log(-z + sin(y))/2, z)})
assert set(solve(eqs, x, y)) == set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(-z - sin(log(3)))/2, -log(3))])
assert solve(eqs, z, y) == \
[(-exp(2*x) - sin(log(3)), -log(3))]
assert solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), set=True) == (
[x, y], set([(S(1), S(3)), (S(3), S(1))]))
assert set(solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), x, y)) == \
set([(S(1), S(3)), (S(3), S(1))])
def test_issue_5335():
lam, a0, conc = symbols('lam a0 conc')
a = 0.005
b = 0.743436700916726
eqs = [lam + 2*y - a0*(1 - x/2)*x - a*x/2*x,
a0*(1 - x/2)*x - 1*y - b*y,
x + y - conc]
sym = [x, y, a0]
assert len(solve(eqs, sym, manual=True, minimal=True)) == 2
assert len(solve(eqs, sym)) == 2
@SKIP("Hangs")
def _test_issue_5335_float():
lam, a0, conc = symbols('lam a0 conc')
a = 0.005
b = 0.743436700916726
eqs = [lam + 2*y - a0*(1 - x/2)*x - a*x/2*x,
a0*(1 - x/2)*x - 1*y - b*y,
x + y - conc]
sym = [x, y, a0]
assert len(solve(eqs, sym, rational=False)) == 2
def test_issue_5767():
assert set(solve([x**2 + y + 4], [x])) == \
set([(-sqrt(-y - 4),), (sqrt(-y - 4),)])
def test_polysys():
assert set(solve([x**2 + 2/y - 2, x + y - 3], [x, y])) == \
set([(S(1), S(2)), (1 + sqrt(5), 2 - sqrt(5)),
(1 - sqrt(5), 2 + sqrt(5))])
assert solve([x**2 + y - 2, x**2 + y]) == []
assert solve([x**2 + y - 3, x - y - 4], (x, y)) != solve([x**2 +
y - 3, x - y - 4], (y, x))
@slow
def test_unrad1():
raises(NotImplementedError, lambda:
unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x)) + 3))
raises(NotImplementedError, lambda:
unrad(sqrt(x) + (x + 1)**Rational(1, 3) + 2*sqrt(y)))
s = symbols('s', cls=Dummy)
def check(rv, ans):
assert bool(rv[1]) == bool(ans[1])
if ans[1]:
return s_check(rv, ans)
e = rv[0].expand()
a = ans[0].expand()
return e in [a, -a] and rv[1] == ans[1]
def s_check(rv, ans):
rv = list(rv)
d = rv[0].atoms(Dummy)
reps = list(zip(d, [s]*len(d)))
rv = (rv[0].subs(reps).expand(), [rv[1][0].subs(reps), rv[1][1].subs(reps)])
ans = (ans[0].subs(reps).expand(), [ans[1][0].subs(reps), ans[1][1].subs(reps)])
return str(rv[0]) in [str(ans[0]), str(-ans[0])] and \
str(rv[1]) == str(ans[1])
assert check(unrad(sqrt(x)),
(x, []))
assert check(unrad(sqrt(x) + 1),
(x - 1, []))
assert check(unrad(sqrt(x) + root(x, 3) + 2),
(s**3 + s**2 + 2, [s, s**6 - x]))
assert check(unrad(sqrt(x)*root(x, 3) + 2),
(x**5 - 64, []))
assert check(unrad(sqrt(x) + (x + 1)**Rational(1, 3)),
(x**3 - (x + 1)**2, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(2*x)),
(-2*sqrt(2)*x - 2*x + 1, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + 2),
(16*x - 9, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - x)),
(5*x**2 - 4*x, []))
assert check(unrad(a*sqrt(x) + b*sqrt(x) + c*sqrt(y) + d*sqrt(y)),
((a*sqrt(x) + b*sqrt(x))**2 - (c*sqrt(y) + d*sqrt(y))**2, []))
assert check(unrad(sqrt(x) + sqrt(1 - x)),
(2*x - 1, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) - 3),
(x**2 - x + 16, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x)),
(5*x**2 - 2*x + 1, []))
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - 3) in [
(25*x**4 + 376*x**3 + 1256*x**2 - 2272*x + 784, []),
(25*x**8 - 476*x**6 + 2534*x**4 - 1468*x**2 + 169, [])]
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - sqrt(1 - 2*x)) == \
(41*x**4 + 40*x**3 + 232*x**2 - 160*x + 16, [])
assert check(unrad(sqrt(x) + sqrt(x + 1)), (S(1), []))
eq = sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x))
assert check(unrad(eq),
(16*x**2 - 9*x, []))
assert set(solve(eq, check=False)) == set([S(0), S(9)/16])
assert solve(eq) == []
assert set(solve(sqrt(x) - sqrt(x + 1) + sqrt(1 - sqrt(x)))) == \
set([S.Zero, S(9)/16])
assert check(unrad(sqrt(x) + root(x + 1, 3) + 2*sqrt(y), y),
(S('2*sqrt(x)*(x + 1)**(1/3) + x - 4*y + (x + 1)**(2/3)'), []))
assert check(unrad(sqrt(x/(1 - x)) + (x + 1)**Rational(1, 3)),
(x**5 - x**4 - x**3 + 2*x**2 + x - 1, []))
assert check(unrad(sqrt(x/(1 - x)) + 2*sqrt(y), y),
(4*x*y + x - 4*y, []))
assert check(unrad(sqrt(x)*sqrt(1 - x) + 2, x),
(x**2 - x + 4, []))
e(Eq(x, sqrt(x + 6))) == [3]
assert solve(Eq(x + sqrt(x - 4), 4)) == [4]
assert solve(Eq(1, x + sqrt(2*x - 3))) == []
assert set(solve(Eq(sqrt(5*x + 6) - 2, x))) == set([-S(1), S(2)])
assert set(solve(Eq(sqrt(2*x - 1) - sqrt(x - 4), 2))) == set([S(5), S(13)])
assert solve(Eq(sqrt(x + 7) + 2, sqrt(3 - x))) == [-6]
assert solve((2*x - 5)**Rational(1, 3) - 3) == [16]
assert set(solve(x + 1 - root(x**4 + 4*x**3 - x, 4))) == \
set([-S(1)/2, -S(1)/3])
assert set(solve(sqrt(2*x**2 - 7) - (3 - x))) == set([-S(8), S(2)])
assert solve(sqrt(2*x + 9) - sqrt(x + 1) - sqrt(x + 4)) == [0]
assert solve(sqrt(x + 4) + sqrt(2*x - 1) - 3*sqrt(x - 1)) == [5]
assert solve(sqrt(x)*sqrt(x - 7) - 12) == [16]
assert solve(sqrt(x - 3) + sqrt(x) - 3) == [4]
assert solve(sqrt(9*x**2 + 4) - (3*x + 2)) == [0]
assert solve(sqrt(x) - 2 - 5) == [49]
assert solve(sqrt(x - 3) - sqrt(x) - 3) == []
assert solve(sqrt(x - 1) - x + 7) == [10]
assert solve(sqrt(x - 2) - 5) == [27]
assert solve(sqrt(17*x - sqrt(x**2 - 5)) - 7) == [3]
assert solve(sqrt(x) - sqrt(x - 1) + sqrt(sqrt(x))) == []
z = sqrt(2*x + 1)/sqrt(x) - sqrt(2 + 1/x)
p = posify(z)[0]
assert solve(p) == []
assert solve(z) == []
assert solve(z + 6*I) == [-S(1)/11]
assert solve(p + 6*I) == []
# issue 8622
assert unrad((root(x + 1, 5) - root(x, 3))) == (
x**5 - x**3 - 3*x**2 - 3*x - 1, [])
# issue #8679
assert check(unrad(x + root(x, 3) + root(x, 3)**2 + sqrt(y), x),
(s**3 + s**2 + s + sqrt(y), [s, s**3 - x]))
# for coverage
assert check(unrad(sqrt(x) + root(x, 3) + y),
(s**3 + s**2 + y, [s, s**6 - x]))
assert solve(sqrt(x) + root(x, 3) - 2) == [1]
raises(NotImplementedError, lambda:
solve(sqrt(x) + root(x, 3) + root(x + 1, 5) - 2))
# fails through a different code path
raises(NotImplementedError, lambda: solve(-sqrt(2) + cosh(x)/x))
# unrad some
assert solve(sqrt(x + root(x, 3))+root(x - y, 5), y) == [
x + (x**(S(1)/3) + x)**(S(5)/2)]
assert check(unrad(sqrt(x) - root(x + 1, 3)*sqrt(x + 2) + 2),
(s**10 + 8*s**8 + 24*s**6 - 12*s**5 - 22*s**4 - 160*s**3 - 212*s**2 -
192*s - 56, [s, s**2 - x]))
e = root(x + 1, 3) + root(x, 3)
assert unrad(e) == (2*x + 1, [])
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
(15625*x**4 + 173000*x**3 + 355600*x**2 - 817920*x + 331776, []))
assert check(unrad(root(x, 4) + root(x, 4)**3 - 1),
(s**3 + s - 1, [s, s**4 - x]))
assert check(unrad(root(x, 2) + root(x, 2)**3 - 1),
(x**3 + 2*x**2 + x - 1, []))
assert unrad(x**0.5) is None
assert check(unrad(t + root(x + y, 5) + root(x + y, 5)**3),
(s**3 + s + t, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, y),
(s**3 + s + x, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, x),
(s**5 + s**3 + s - y, [s, s**5 - x - y]))
assert check(unrad(root(x - 1, 3) + root(x + 1, 5) + root(2, 5)),
(s**5 + 5*2**(S(1)/5)*s**4 + s**3 + 10*2**(S(2)/5)*s**3 +
10*2**(S(3)/5)*s**2 + 5*2**(S(4)/5)*s + 4, [s, s**3 - x + 1]))
raises(NotImplementedError, lambda:
unrad((root(x, 2) + root(x, 3) + root(x, 4)).subs(x, x**5 - x + 1)))
# the simplify flag should be reset to False for unrad results;
# if it's not then this next test will take a long time
assert solve(root(x, 3) + root(x, 5) - 2) == [1]
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
((5*x - 4)*(3125*x**3 + 37100*x**2 + 100800*x - 82944), []))
ans = S('''
[4/5, -1484/375 + 172564/(140625*(114*sqrt(12657)/78125 +
12459439/52734375)**(1/3)) +
4*(114*sqrt(12657)/78125 + 12459439/52734375)**(1/3)]''')
assert solve(eq) == ans
assert check(unrad(sqrt(x + root(x + 1, 3)) - root(x + 1, 3) - 2),
(s**3 - s**2 - 3*s - 5, [s, s**3 - x - 1]))
e = root(x**2 + 1, 3) - root(x**2 - 1, 5) - 2
assert check(unrad(e),
(s**5 - 10*s**4 + 39*s**3 - 80*s**2 + 80*s - 30,
[s, s**3 - x**2 - 1]))
e = sqrt(x + root(x + 1, 2)) - root(x + 1, 3) - 2
assert check(unrad(e),
(s**6 - 2*s**5 - 7*s**4 - 3*s**3 + 26*s**2 + 40*s + 25,
[s, s**3 - x - 1]))
assert check(unrad(e, _reverse=True),
(s**6 - 14*s**5 + 73*s**4 - 187*s**3 + 276*s**2 - 228*s + 89,
[s, s**2 - x - sqrt(x + 1)]))
assert check(unrad(sqrt(x + sqrt(root(x, 3) - 1)) - root(x, 6) - 2),
(s**12 - 2*s**8 - 8*s**7 - 8*s**6 + s**4 + 8*s**3 + 23*s**2 +
32*s + 17, [s, s**6 - x]))
raises(NotImplementedError, lambda:
unrad(sqrt(cosh(x)/x) + root(x + 1,3)*sqrt(x) - 1))
assert unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1')) is None
assert check(unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1'), x),
(s**(2*y) + s + 1, [s, s**3 - x - y]))
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
eq = S('-x + (7*y/8 - (27*x/2 + 27*sqrt(x**2)/2)**(1/3)/3)**3 - 1')
assert solve(eq, y) == [
4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 - (-S(1)/2 -
sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 - S(13824)/343)**2)/2 -
S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 -
(-S(1)/2 + sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x +
27*sqrt(x**2))**(S(1)/3)/21 - (-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3]
eq = root(x + 1, 3) - (root(x, 3) + root(x, 5))
assert check(unrad(eq),
(3*s**13 + 3*s**11 + s**9 - 1, [s, s**15 - x]))
assert check(unrad(eq - 2),
(3*s**13 + 3*s**11 + 6*s**10 + s**9 + 12*s**8 + 6*s**6 + 12*s**5 +
12*s**3 + 7, [s, s**15 - x]))
assert check(unrad(root(x, 3) - root(x + 1, 4)/2 + root(x + 2, 3)),
(4096*s**13 + 960*s**12 + 48*s**11 - s**10 - 1728*s**4,
[s, s**4 - x - 1])) # orig expr has two real roots: -1, -.389
assert check(unrad(root(x, 3) + root(x + 1, 4) - root(x + 2, 3)/2),
(343*s**13 + 2904*s**12 + 1344*s**11 + 512*s**10 - 1323*s**9 -
3024*s**8 - 1728*s**7 + 1701*s**5 + 216*s**4 - 729*s, [s, s**4 - x -
1])) # orig expr has one real root: -0.048
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3)),
(729*s**13 - 216*s**12 + 1728*s**11 - 512*s**10 + 1701*s**9 -
3024*s**8 + 1344*s**7 + 1323*s**5 - 2904*s**4 + 343*s, [s, s**4 - x -
1])) # orig expr has 2 real roots: -0.91, -0.15
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3) - 2),
(729*s**13 + 1242*s**12 + 18496*s**10 + 129701*s**9 + 388602*s**8 +
453312*s**7 - 612864*s**6 - 3337173*s**5 - 6332418*s**4 - 7134912*s**3
- 5064768*s**2 - 2111913*s - 398034, [s, s**4 - x - 1]))
# orig expr has 1 real root: 19.53
ans = solve(sqrt(x) + sqrt(x + 1) -
sqrt(1 - x) - sqrt(2 + x))
assert len(ans) == 1 and NS(ans[0])[:4] == '0.73'
# the fence optimization problem
# https://github.com/sympy/sympy/issues/4793#issuecomment-36994519
F = Symbol('F')
eq = F - (2*x + 2*y + sqrt(x**2 + y**2))
ans = 2*F/7 - sqrt(2)*F/14
X = solve(eq, x, check=False)
for xi in reversed(X): # reverse since currently, ans is the 2nd one
Y = solve((x*y).subs(x, xi).diff(y), y, simplify=False, check=False)
if any((a - ans).expand().is_zero for a in Y):
break
else:
assert None # no answer was found
assert solve(sqrt(x + 1) + root(x, 3) - 2) == S('''
[(-11/(9*(47/54 + sqrt(93)/6)**(1/3)) + 1/3 + (47/54 +
sqrt(93)/6)**(1/3))**3]''')
assert solve(sqrt(sqrt(x + 1)) + x**Rational(1, 3) - 2) == S('''
[(-sqrt(-2*(-1/16 + sqrt(6913)/16)**(1/3) + 6/(-1/16 +
sqrt(6913)/16)**(1/3) + 17/2 + 121/(4*sqrt(-6/(-1/16 +
sqrt(6913)/16)**(1/3) + 2*(-1/16 + sqrt(6913)/16)**(1/3) + 17/4)))/2 +
sqrt(-6/(-1/16 + sqrt(6913)/16)**(1/3) + 2*(-1/16 +
sqrt(6913)/16)**(1/3) + 17/4)/2 + 9/4)**3]''')
assert solve(sqrt(x) + root(sqrt(x) + 1, 3) - 2) == S('''
[(-(81/2 + 3*sqrt(741)/2)**(1/3)/3 + (81/2 + 3*sqrt(741)/2)**(-1/3) +
2)**2]''')
eq = S('''
-x + (1/2 - sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3
+ x*(3*x**2 - 34) + 90)**2/4 - 39304/27) - 45)**(1/3) + 34/(3*(1/2 -
sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3 + x*(3*x**2
- 34) + 90)**2/4 - 39304/27) - 45)**(1/3))''')
assert check(unrad(eq),
(-s*(-s**6 + sqrt(3)*s**6*I - 153*2**(S(2)/3)*3**(S(1)/3)*s**4 +
51*12**(S(1)/3)*s**4 - 102*2**(S(2)/3)*3**(S(5)/6)*s**4*I - 1620*s**3 +
1620*sqrt(3)*s**3*I + 13872*18**(S(1)/3)*s**2 - 471648 +
471648*sqrt(3)*I), [s, s**3 - 306*x - sqrt(3)*sqrt(31212*x**2 -
165240*x + 61484) + 810]))
assert solve(eq) == [] # not other code errors
@slow
def test_unrad_slow():
# this has roots with multiplicity > 1; there should be no
# repeats in roots obtained, however
eq = (sqrt(1 + sqrt(1 - 4*x**2)) - x*((1 + sqrt(1 + 2*sqrt(1 - 4*x**2)))))
assert solve(eq) == [S.Half]
@XFAIL
def test_unrad_fail():
# this only works if we check real_root(eq.subs(x, S(1)/3))
# but checksol doesn't work like that
assert solve(root(x**3 - 3*x**2, 3) + 1 - x) == [S(1)/3]
assert solve(root(x + 1, 3) + root(x**2 - 2, 5) + 1) == [
-1, -1 + CRootOf(x**5 + x**4 + 5*x**3 + 8*x**2 + 10*x + 5, 0)**3]
def test_checksol():
x, y, r, t = symbols('x, y, r, t')
eq = r - x**2 - y**2
dict_var_soln = {y: - sqrt(r) / sqrt(tan(t)**2 + 1),
x: -sqrt(r)*tan(t)/sqrt(tan(t)**2 + 1)}
assert checksol(eq, dict_var_soln) == True
assert checksol(Eq(x, False), {x: False}) is True
assert checksol(Ne(x, False), {x: False}) is False
assert checksol(Eq(x < 1, True), {x: 0}) is True
assert checksol(Eq(x < 1, True), {x: 1}) is False
assert checksol(Eq(x < 1, False), {x: 1}) is True
assert checksol(Eq(x < 1, False), {x: 0}) is False
assert checksol(Eq(x + 1, x**2 + 1), {x: 1}) is True
assert checksol([x - 1, x**2 - 1], x, 1) is True
assert checksol([x - 1, x**2 - 2], x, 1) is False
assert checksol(Poly(x**2 - 1), x, 1) is True
raises(ValueError, lambda: checksol(x, 1))
raises(ValueError, lambda: checksol([], x, 1))
def test__invert():
assert _invert(x - 2) == (2, x)
assert _invert(2) == (2, 0)
assert _invert(exp(1/x) - 3, x) == (1/log(3), x)
assert _invert(exp(1/x + a/x) - 3, x) == ((a + 1)/log(3), x)
assert _invert(a, x) == (a, 0)
def test_issue_4463():
assert solve(-a*x + 2*x*log(x), x) == [exp(a/2)]
assert solve(a/x + exp(x/2), x) == [2*LambertW(-a/2)]
assert solve(x**x) == []
assert solve(x**x - 2) == [exp(LambertW(log(2)))]
assert solve(((x - 3)*(x - 2))**((x - 3)*(x - 4))) == [2]
assert solve(
(a/x + exp(x/2)).diff(x), x) == [4*LambertW(sqrt(2)*sqrt(a)/4)]
@slow
def test_issue_5114_solvers():
a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('a:r')
syms = a, b, c, f, h, k, n
eqs = [b + r/d - c/d,
c*(1/d + 1/e + 1/g) - f/g - r/d,
f*(1/g + 1/i + 1/j) - c/g - h/i,
h*(1/i + 1/l + 1/m) - f/i - k/m,
k*(1/m + 1/o + 1/p) - h/m - n/p,
n*(1/p + 1/q) - k/p]
assert len(solve(eqs, syms, manual=True, check=False, simplify=False)) == 1
def test_issue_5849():
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
ans = [{
dQ4: I3 - I5,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
I4: I3 - I5,
dQ2: I2,
Q2: 2*I3 + 2*I5 + 3*I6,
I1: I2 + I3,
Q4: -I3/2 + 3*I5/2 - dI4/2}]
v = I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4
assert solve(e, *v, manual=True, check=False, dict=True) == ans
assert solve(e, *v, manual=True) == []
# a zero row in the matrix. Is this related to issue 4551?
assert [ei.subs(
ans[0]) for ei in e] == [0, 0, I3 - I6, -I3 + I6, 0, 0, 0, 0, 0]
def test_issue_5849_matrix():
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
assert solve(e, I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4) == {
dI4: -I3 + 3*I5 - 2*Q4,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
dQ2: I2,
I1: I2 + I3,
Q2: 2*I3 + 2*I5 + 3*I6,
dQ4: I3 - I5,
I4: I3 - I5}
def test_issue_5901():
f, g, h = map(Function, 'fgh')
a = Symbol('a')
D = Derivative(f(x), x)
G = Derivative(g(a), a)
assert solve(f(x) + f(x).diff(x), f(x)) == \
[-D]
assert solve(f(x) - 3, f(x)) == \
[3]
assert solve(f(x) - 3*f(x).diff(x), f(x)) == \
[3*D]
assert solve([f(x) - 3*f(x).diff(x)], f(x)) == \
{f(x): 3*D}
assert solve([f(x) - 3*f(x).diff(x), f(x)**2 - y + 4], f(x), y) == \
[{f(x): 3*D, y: 9*D**2 + 4}]
assert solve(-f(a)**2*g(a)**2 + f(a)**2*h(a)**2 + g(a).diff(a),
h(a), g(a), set=True) == \
([g(a)], set([
(-sqrt(h(a)**2*f(a)**2 + G)/f(a),),
(sqrt(h(a)**2*f(a)**2+ G)/f(a),)]))
args = [f(x).diff(x, 2)*(f(x) + g(x)) - g(x)**2 + 2, f(x), g(x)]
assert set(solve(*args)) == \
set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))])
eqs = [f(x)**2 + g(x) - 2*f(x).diff(x), g(x)**2 - 4]
assert solve(eqs, f(x), g(x), set=True) == \
([f(x), g(x)], set([
(-sqrt(2*D - 2), S(2)),
(sqrt(2*D - 2), S(2)),
(-sqrt(2*D + 2), -S(2)),
(sqrt(2*D + 2), -S(2))]))
# the underlying problem was in solve_linear that was not masking off
# anything but a Mul or Add; it now raises an error if it gets anything
# but a symbol and solve handles the substitutions necessary so solve_linear
# won't make this error
raises(
ValueError, lambda: solve_linear(f(x) + f(x).diff(x), symbols=[f(x)]))
assert solve_linear(f(x) + f(x).diff(x), symbols=[x]) == \
(f(x) + Derivative(f(x), x), 1)
assert solve_linear(f(x) + Integral(x, (x, y)), symbols=[x]) == \
(f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(x) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x + f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(y) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x, -f(y) - Integral(x, (x, y)))
assert solve_linear(x - f(x)/a + (f(x) - 1)/a, symbols=[x]) == \
(x, 1/a)
assert solve_linear(x + Derivative(2*x, x)) == \
(x, -2)
assert solve_linear(x + Integral(x, y), symbols=[x]) == \
(x, 0)
assert solve_linear(x + Integral(x, y) - 2, symbols=[x]) == \
(x, 2/(y + 1))
assert set(solve(x + exp(x)**2, exp(x))) == \
set([-sqrt(-x), sqrt(-x)])
assert solve(x + exp(x), x, implicit=True) == \
[-exp(x)]
assert solve(cos(x) - sin(x), x, implicit=True) == []
assert solve(x - sin(x), x, implicit=True) == \
[sin(x)]
assert solve(x**2 + x - 3, x, implicit=True) == \
[-x**2 + 3]
assert solve(x**2 + x - 3, x**2, implicit=True) == \
[-x + 3]
def test_issue_5912():
assert set(solve(x**2 - x - 0.1, rational=True)) == \
set([S(1)/2 + sqrt(35)/10, -sqrt(35)/10 + S(1)/2])
ans = solve(x**2 - x - 0.1, rational=False)
assert len(ans) == 2 and all(a.is_Number for a in ans)
ans = solve(x**2 - x - 0.1)
assert len(ans) == 2 and all(a.is_Number for a in ans)
def test_float_handling():
def test(e1, e2):
return len(e1.atoms(Float)) == len(e2.atoms(Float))
assert solve(x - 0.5, rational=True)[0].is_Rational
assert solve(x - 0.5, rational=False)[0].is_Float
assert solve(x - S.Half, rational=False)[0].is_Rational
assert solve(x - 0.5, rational=None)[0].is_Float
assert solve(x - S.Half, rational=None)[0].is_Rational
assert test(nfloat(1 + 2*x), 1.0 + 2.0*x)
for contain in [list, tuple, set]:
ans = nfloat(contain([1 + 2*x]))
assert type(ans) is contain and test(list(ans)[0], 1.0 + 2.0*x)
k, v = list(nfloat({2*x: [1 + 2*x]}).items())[0]
assert test(k, 2*x) and test(v[0], 1.0 + 2.0*x)
assert test(nfloat(cos(2*x)), cos(2.0*x))
assert test(nfloat(3*x**2), 3.0*x**2)
assert test(nfloat(3*x**2, exponent=True), 3.0*x**2.0)
assert test(nfloat(exp(2*x)), exp(2.0*x))
assert test(nfloat(x/3), x/3.0)
assert test(nfloat(x**4 + 2*x + cos(S(1)/3) + 1),
x**4 + 2.0*x + 1.94495694631474)
tot = 100 + c + z + t
assert solve(((.7 + c)/tot - .6, (.2 + z)/tot - .3, t/tot - .1)) == []
def test_check_assumptions():
x = symbols('x', positive=True)
assert solve(x**2 - 1) == [1]
assert check_assumptions(1, x) == True
raises(AssertionError, lambda: check_assumptions(2*x, x, positive=True))
raises(TypeError, lambda: check_assumptions(1, 1))
def test_failing_assumptions():
x = Symbol('x', real=True, positive=True)
y = Symbol('y')
assert failing_assumptions(6*x + y, **x.assumptions0) == \
{'real': None, 'imaginary': None, 'complex': None, 'hermitian': None,
'positive': None, 'nonpositive': None, 'nonnegative': None, 'nonzero': None,
'negative': None, 'zero': None}
def test_issue_6056():
assert solve(tanh(x + 3)*tanh(x - 3) - 1) == []
assert set([simplify(w) for w in solve(tanh(x - 1)*tanh(x + 1) + 1)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
assert set([simplify(w) for w in solve((tanh(x + 3)*tanh(x - 3) + 1)**2)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
def test_issue_5673():
eq = -x + exp(exp(LambertW(log(x)))*LambertW(log(x)))
assert checksol(eq, x, 2) is True
assert checksol(eq, x, 2, numerical=False) is None
def test_exclude():
R, C, Ri, Vout, V1, Vminus, Vplus, s = \
symbols('R, C, Ri, Vout, V1, Vminus, Vplus, s')
Rf = symbols('Rf', positive=True) # to eliminate Rf = 0 soln
eqs = [C*V1*s + Vplus*(-2*C*s - 1/R),
Vminus*(-1/Ri - 1/Rf) + Vout/Rf,
C*Vplus*s + V1*(-C*s - 1/R) + Vout/R,
-Vminus + Vplus]
assert solve(eqs, exclude=s*C*R) == [
{
Rf: Ri*(C*R*s + 1)**2/(C*R*s),
Vminus: Vplus,
V1: 2*Vplus + Vplus/(C*R*s),
Vout: C*R*Vplus*s + 3*Vplus + Vplus/(C*R*s)},
{
Vplus: 0,
Vminus: 0,
V1: 0,
Vout: 0},
]
# TODO: Investigate why currently solution [0] is preferred over [1].
assert solve(eqs, exclude=[Vplus, s, C]) in [[{
Vminus: Vplus,
V1: Vout/2 + Vplus/2 + sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus - sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}, {
Vminus: Vplus,
V1: Vout/2 + Vplus/2 - sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus + sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}], [{
Vminus: Vplus,
Vout: (V1**2 - V1*Vplus - Vplus**2)/(V1 - 2*Vplus),
Rf: Ri*(V1 - Vplus)**2/(Vplus*(V1 - 2*Vplus)),
R: Vplus/(C*s*(V1 - 2*Vplus)),
}]]
def test_high_order_roots():
s = x**5 + 4*x**3 + 3*x**2 + S(7)/4
assert set(solve(s)) == set(Poly(s*4, domain='ZZ').all_roots())
def test_minsolve_linear_system():
def count(dic):
return len([x for x in dic.values() if x == 0])
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=True)) \
== 3
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=False)) \
== 3
assert count(solve([x + y + z, y + z + a], particular=True, quick=True)) == 1
assert count(solve([x + y + z, y + z + a], particular=True, quick=False)) == 2
def test_real_roots():
# cf. issue 6650
x = Symbol('x', real=True)
assert len(solve(x**5 + x**3 + 1)) == 1
def test_issue_6528():
eqs = [
327600995*x**2 - 37869137*x + 1809975124*y**2 - 9998905626,
895613949*x**2 - 273830224*x*y + 530506983*y**2 - 10000000000]
# two expressions encountered are > 1400 ops long so if this hangs
# it is likely because simplification is being done
assert len(solve(eqs, y, x, check=False)) == 4
def test_overdetermined():
x = symbols('x', real=True)
eqs = [Abs(4*x - 7) - 5, Abs(3 - 8*x) - 1]
assert solve(eqs, x) == [(S.Half,)]
assert solve(eqs, x, manual=True) == [(S.Half,)]
assert solve(eqs, x, manual=True, check=False) == [(S.Half,), (S(3),)]
def test_issue_6605():
x = symbols('x')
assert solve(4**(x/2) - 2**(x/3)) == [0, 3*I*pi/log(2)]
# while the first one passed, this one failed
x = symbols('x', real=True)
assert solve(5**(x/2) - 2**(x/3)) == [0]
b = sqrt(6)*sqrt(log(2))/sqrt(log(5))
assert solve(5**(x/2) - 2**(3/x)) == [-b, b]
def test__ispow():
assert _ispow(x**2)
assert not _ispow(x)
assert not _ispow(True)
def test_issue_6644():
eq = -sqrt((m - q)**2 + (-m/(2*q) + S(1)/2)**2) + sqrt((-m**2/2 - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2 + (m**2/2 - m - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2)
sol = solve(eq, q, simplify=False, check=False)
assert len(sol) == 5
def test_issue_6752():
assert solve([a**2 + a, a - b], [a, b]) == [(-1, -1), (0, 0)]
assert solve([a**2 + a*c, a - b], [a, b]) == [(0, 0), (-c, -c)]
def test_issue_6792():
assert solve(x*(x - 1)**2*(x + 1)*(x**6 - x + 1)) == [
-1, 0, 1, CRootOf(x**6 - x + 1, 0), CRootOf(x**6 - x + 1, 1),
CRootOf(x**6 - x + 1, 2), CRootOf(x**6 - x + 1, 3),
CRootOf(x**6 - x + 1, 4), CRootOf(x**6 - x + 1, 5)]
def test_issues_6819_6820_6821_6248_8692():
# issue 6821
x, y = symbols('x y', real=True)
assert solve(abs(x + 3) - 2*abs(x - 3)) == [1, 9]
assert solve([abs(x) - 2, arg(x) - pi], x) == [(-2,), (2,)]
assert set(solve(abs(x - 7) - 8)) == set([-S(1), S(15)])
# issue 8692
assert solve(Eq(Abs(x + 1) + Abs(x**2 - 7), 9), x) == [
-S(1)/2 + sqrt(61)/2, -sqrt(69)/2 + S(1)/2]
# issue 7145
assert solve(2*abs(x) - abs(x - 1)) == [-1, Rational(1, 3)]
x = symbols('x')
assert solve([re(x) - 1, im(x) - 2], x) == [
{re(x): 1, x: 1 + 2*I, im(x): 2}]
# check for 'dict' handling of solution
eq = sqrt(re(x)**2 + im(x)**2) - 3
assert solve(eq) == solve(eq, x)
i = symbols('i', imaginary=True)
assert solve(abs(i) - 3) == [-3*I, 3*I]
raises(NotImplementedError, lambda: solve(abs(x) - 3))
w = symbols('w', integer=True)
assert solve(2*x**w - 4*y**w, w) == solve((x/y)**w - 2, w)
x, y = symbols('x y', real=True)
assert solve(x + y*I + 3) == {y: 0, x: -3}
# issue 2642
assert solve(x*(1 + I)) == [0]
x, y = symbols('x y', imaginary=True)
assert solve(x + y*I + 3 + 2*I) == {x: -2*I, y: 3*I}
x = symbols('x', real=True)
assert solve(x + y + 3 + 2*I) == {x: -3, y: -2*I}
# issue 6248
f = Function('f')
assert solve(f(x + 1) - f(2*x - 1)) == [2]
assert solve(log(x + 1) - log(2*x - 1)) == [2]
x = symbols('x')
assert solve(2**x + 4**x) == [I*pi/log(2)]
def test_issue_14607():
# issue 14607
s, tau_c, tau_1, tau_2, phi, K = symbols(
's, tau_c, tau_1, tau_2, phi, K')
target = (s**2*tau_1*tau_2 + s*tau_1 + s*tau_2 + 1)/(K*s*(-phi + tau_c))
K_C, tau_I, tau_D = symbols('K_C, tau_I, tau_D',
positive=True, nonzero=True)
PID = K_C*(1 + 1/(tau_I*s) + tau_D*s)
eq = (target - PID).together()
eq *= denom(eq).simplify()
eq = Poly(eq, s)
c = eq.coeffs()
vars = [K_C, tau_I, tau_D]
s = solve(c, vars, dict=True)
assert len(s) == 1
knownsolution = {K_C: -(tau_1 + tau_2)/(K*(phi - tau_c)),
tau_I: tau_1 + tau_2,
tau_D: tau_1*tau_2/(tau_1 + tau_2)}
for var in vars:
assert s[0][var].simplify() == knownsolution[var].simplify()
@slow
def test_lambert_multivariate():
from sympy.abc import a, x, y
from sympy.solvers.bivariate import _filtered_gens, _lambert, _solve_lambert
assert _filtered_gens(Poly(x + 1/x + exp(x) + y), x) == set([x, exp(x)])
assert _lambert(x, x) == []
assert solve((x**2 - 2*x + 1).subs(x, log(x) + 3*x)) == [LambertW(3*S.Exp1)/3]
assert solve((x**2 - 2*x + 1).subs(x, (log(x) + 3*x)**2 - 1)) == \
[LambertW(3*exp(-sqrt(2)))/3, LambertW(3*exp(sqrt(2)))/3]
assert solve((x**2 - 2*x - 2).subs(x, log(x) + 3*x)) == \
[LambertW(3*exp(1 - sqrt(3)))/3, LambertW(3*exp(1 + sqrt(3)))/3]
assert solve(x*log(x) + 3*x + 1, x) == [exp(-3 + LambertW(-exp(3)))]
eq = (x*exp(x) - 3).subs(x, x*exp(x))
assert solve(eq) == [LambertW(3*exp(-LambertW(3)))]
# coverage test
raises(NotImplementedError, lambda: solve(x - sin(x)*log(y - x), x))
x0 = 1/log(a)
x1 = LambertW(S(1)/3)
x2 = a**(-5)
x3 = 3**(S(1)/3)
x4 = 3**(S(5)/6)*I
x5 = x1**(S(1)/3)*x2**(S(1)/3)/2
ans = solve(3*log(a**(3*x + 5)) + a**(3*x + 5), x)
assert ans == [
x0*log(3*x1*x2)/3, x0*log(-x5*(x3 - x4)), x0*log(-x5*(x3 + x4))]
# check collection
K = ((b + 3)*LambertW(1/(b + 3))/a**5)**(S(1)/3)
assert solve(
3*log(a**(3*x + 5)) + b*log(a**(3*x + 5)) + a**(3*x + 5),
x) == [
log(K*(1 - sqrt(3)*I)/-2)/log(a),
log(K*(1 + sqrt(3)*I)/-2)/log(a),
log((b + 3)*LambertW(1/(b + 3))/a**5)/(3*log(a))]
p = symbols('p', positive=True)
eq = 4*2**(2*p + 3) - 2*p - 3
assert _solve_lambert(eq, p, _filtered_gens(Poly(eq), p)) == [
-S(3)/2 - LambertW(-4*log(2))/(2*log(2))]
# issue 4271
assert solve((a/x + exp(x/2)).diff(x, 2), x) == [
6*LambertW(root(-1, 3)*root(a, 3)/3)]
assert solve((log(x) + x).subs(x, x**2 + 1)) == [
-I*sqrt(-LambertW(1) + 1), sqrt(-1 + LambertW(1))]
assert solve(x**3 - 3**x, x) == [3, -3*LambertW(-log(3)/3)/log(3)]
assert solve(x**2 - 2**x, x) == [2, 4]
assert solve(-x**2 + 2**x, x) == [2, 4]
assert solve(3**cos(x) - cos(x)**3) == [acos(3), acos(-3*LambertW(-log(3)/3)/log(3))]
assert set(solve(3*log(x) - x*log(3))) == set( # 2.478... and 3
[3, -3*LambertW(-log(3)/3)/log(3)])
assert solve(LambertW(2*x) - y, x) == [y*exp(y)/2]
@XFAIL
def test_other_lambert():
from sympy.abc import x
assert solve(3*sin(x) - x*sin(3), x) == [3]
a = S(6)/5
assert set(solve(x**a - a**x)) == set(
[a, -a*LambertW(-log(a)/a)/log(a)])
assert set(solve(3**cos(x) - cos(x)**3)) == set(
[acos(3), acos(-3*LambertW(-log(3)/3)/log(3))])
def test_rewrite_trig():
assert solve(sin(x) + tan(x)) == [0, -pi, pi, 2*pi]
assert solve(sin(x) + sec(x)) == [
-2*atan(-S.Half + sqrt(2)*sqrt(1 - sqrt(3)*I)/2 + sqrt(3)*I/2),
2*atan(S.Half - sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half
+ sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half -
sqrt(3)*I/2 + sqrt(2)*sqrt(1 - sqrt(3)*I)/2)]
assert solve(sinh(x) + tanh(x)) == [0, I*pi]
# issue 6157
assert solve(2*sin(x) - cos(x), x) == [-2*atan(2 - sqrt(5)),
-2*atan(2 + sqrt(5))]
@XFAIL
def test_rewrite_trigh():
# if this import passes then the test below should also pass
from sympy import sech
assert solve(sinh(x) + sech(x)) == [
2*atanh(-S.Half + sqrt(5)/2 - sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-S.Half + sqrt(5)/2 + sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-sqrt(5)/2 - S.Half + sqrt(2 + 2*sqrt(5))/2),
2*atanh(-sqrt(2 + 2*sqrt(5))/2 - sqrt(5)/2 - S.Half)]
def test_uselogcombine():
eq = z - log(x) + log(y/(x*(-1 + y**2/x**2)))
assert solve(eq, x, force=True) == [-sqrt(y*(y - exp(z))), sqrt(y*(y - exp(z)))]
assert solve(log(x + 3) + log(1 + 3/x) - 3) in [
[-3 + sqrt(-12 + exp(3))*exp(S(3)/2)/2 + exp(3)/2,
-sqrt(-12 + exp(3))*exp(S(3)/2)/2 - 3 + exp(3)/2],
[-3 + sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2,
-3 - sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2],
]
assert solve(log(exp(2*x) + 1) + log(-tanh(x) + 1) - log(2)) == []
def test_atan2():
assert solve(atan2(x, 2) - pi/3, x) == [2*sqrt(3)]
def test_errorinverses():
assert solve(erf(x) - y, x) == [erfinv(y)]
assert solve(erfinv(x) - y, x) == [erf(y)]
assert solve(erfc(x) - y, x) == [erfcinv(y)]
assert solve(erfcinv(x) - y, x) == [erfc(y)]
def test_issue_2725():
R = Symbol('R')
eq = sqrt(2)*R*sqrt(1/(R + 1)) + (R + 1)*(sqrt(2)*sqrt(1/(R + 1)) - 1)
sol = solve(eq, R, set=True)[1]
assert sol == set([(S(5)/3 + (-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3) + 40/(9*((-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3))),), (S(5)/3 + 40/(9*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3)) + (S(251)/27 + sqrt(111)*I/9)**(S(1)/3),)])
def test_issue_5114_6611():
# See that it doesn't hang; this solves in about 2 seconds.
b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('b:r')
eqs = Matrix([
[b - c/d + r/d], [c*(1/g + 1/e + 1/d) - f/g - r/d],
[-c/g + f*(1/j + 1/i + 1/g) - h/i], [-f/i + h*(1/m + 1/l + 1/i) - k/m],
[-h/m + k*(1/p + 1/o + 1/m) - n/p], [-k/p + n*(1/q + 1/p)]])
v = Matrix([f, h, k, n, b, c])
ans = solve(list(eqs), list(v), simplify=False)
assert sum([s.count_ops() for s in ans.values()]) <= 2617
def test_det_quick():
m = Matrix(3, 3, symbols('a:9'))
assert m.det() == det_quick(m)
m[0, 0] = 1
assert m.det() == det_quick(m)
m = Matrix(3, 3, list(range(9)))
assert m.det() == det_quick(m)
s = SparseMatrix(2, 2, (1, 2, 1, 4))
assert det_perm(s) == det_minor(s) == s.det()
def test_real_imag_splitting():
a, b = symbols('a b', real=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == \
[-sqrt(-b**2 + 9), sqrt(-b**2 + 9)]
a, b = symbols('a b', imaginary=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == []
def test_issue_7110():
y = -2*x**3 + 4*x**2 - 2*x + 5
assert any(ask(Q.real(i)) for i in solve(y))
def test_units():
assert solve(1/x - 1/(2*cm)) == [2*cm]
def test_issue_7547():
A, B, V = symbols('A,B,V')
eq1 = Eq(630.26*(V - 39.0)*V*(V + 39) - A + B, 0)
eq2 = Eq(B, 1.36*10**8*(V - 39))
eq3 = Eq(A, 5.75*10**5*V*(V + 39.0))
sol = Matrix(nsolve(Tuple(eq1, eq2, eq3), [A, B, V], (0, 0, 0)))
assert str(sol) == str(Matrix(
[['4442890172.68209'],
['4289299466.1432'],
['70.5389666628177']]))
def test_issue_7895():
r = symbols('r', real=True)
assert solve(sqrt(r) - 2) == [4]
def test_issue_2777():
x, y = symbols('x y', real=True)
e1, e2 = sqrt(x**2 + y**2) - 10, sqrt(y**2 + (-x + 10)**2) - 3
a, b = 191/S(20), 3*sqrt(391)/20
ans = [(a, -b), (a, b)]
assert solve((e1, e2), (x, y)) == ans
assert solve((e1, e2/(x - a)), (x, y)) == []
e2 += 6
assert solve((e1, e2), (x, y)) == []
assert solve((e1, e2), (x, y), check=False) == ans
def test_issue_7322():
number = 5.62527e-35
assert solve(x - number, x)[0] == number
def test_nsolve():
raises(ValueError, lambda: nsolve(x, (-1, 1), method='bisect'))
raises(TypeError, lambda: nsolve((x - y + 3,x + y,z - y),(x,y,z),(-50,50)))
raises(TypeError, lambda: nsolve((x + y, x - y), (0, 1)))
@slow
def test_high_order_multivariate():
assert len(solve(a*x**3 - x + 1, x)) == 3
assert len(solve(a*x**4 - x + 1, x)) == 4
assert solve(a*x**5 - x + 1, x) == [] # incomplete solution allowed
raises(NotImplementedError, lambda:
solve(a*x**5 - x + 1, x, incomplete=False))
# result checking must always consider the denominator and CRootOf
# must be checked, too
d = x**5 - x + 1
assert solve(d*(1 + 1/d)) == [CRootOf(d + 1, i) for i in range(5)]
d = x - 1
assert solve(d*(2 + 1/d)) == [S.Half]
def test_base_0_exp_0():
assert solve(0**x - 1) == [0]
assert solve(0**(x - 2) - 1) == [2]
assert solve(S('x*(1/x**0 - x)', evaluate=False)) == \
[0, 1]
def test__simple_dens():
assert _simple_dens(1/x**0, [x]) == set()
assert _simple_dens(1/x**y, [x]) == set([x**y])
assert _simple_dens(1/root(x, 3), [x]) == set([x])
def test_issue_8755():
# This tests two things: that if full unrad is attempted and fails
# the solution should still be found; also it tests the use of
# keyword `composite`.
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
@slow
def test_issue_8828():
x1 = 0
y1 = -620
r1 = 920
x2 = 126
y2 = 276
x3 = 51
y3 = 205
r3 = 104
v = x, y, z
f1 = (x - x1)**2 + (y - y1)**2 - (r1 - z)**2
f2 = (x2 - x)**2 + (y2 - y)**2 - z**2
f3 = (x - x3)**2 + (y - y3)**2 - (r3 - z)**2
F = f1,f2,f3
g1 = sqrt((x - x1)**2 + (y - y1)**2) + z - r1
g2 = f2
g3 = sqrt((x - x3)**2 + (y - y3)**2) + z - r3
G = g1,g2,g3
A = solve(F, v)
B = solve(G, v)
C = solve(G, v, manual=True)
p, q, r = [set([tuple(i.evalf(2) for i in j) for j in R]) for R in [A, B, C]]
assert p == q == r
@slow
def test_issue_2840_8155():
assert solve(sin(3*x) + sin(6*x)) == [
0, -pi, pi, 14*pi/9, 16*pi/9, 2*pi, 2*I*(log(2) - log(-1 - sqrt(3)*I)),
2*I*(log(2) - log(-1 + sqrt(3)*I)), 2*I*(log(2) - log(1 - sqrt(3)*I)),
2*I*(log(2) - log(1 + sqrt(3)*I)), 2*I*(log(2) - log(-sqrt(3) - I)),
2*I*(log(2) - log(-sqrt(3) + I)), 2*I*(log(2) - log(sqrt(3) - I)),
2*I*(log(2) - log(sqrt(3) + I)), -2*I*log(-(-1)**(S(1)/9)), -2*I*log(
-(-1)**(S(2)/9)), -2*I*log(-sin(pi/18) - I*cos(pi/18)), -2*I*log(-sin(
pi/18) + I*cos(pi/18)), -2*I*log(sin(pi/18) - I*cos(pi/18)), -2*I*log(
sin(pi/18) + I*cos(pi/18)), -2*I*log(exp(-2*I*pi/9)), -2*I*log(exp(
-I*pi/9)), -2*I*log(exp(I*pi/9)), -2*I*log(exp(2*I*pi/9))]
assert solve(2*sin(x) - 2*sin(2*x)) == [
0, -pi, pi, 2*I*(log(2) - log(-sqrt(3) - I)), 2*I*(log(2) -
log(-sqrt(3) + I)), 2*I*(log(2) - log(sqrt(3) - I)), 2*I*(log(2) -
log(sqrt(3) + I))]
def test_issue_9567():
assert solve(1 + 1/(x - 1)) == [0]
def test_issue_11538():
assert solve(x + E) == [-E]
assert solve(x**2 + E) == [-I*sqrt(E), I*sqrt(E)]
assert solve(x**3 + 2*E) == [
-cbrt(2 * E),
cbrt(2)*cbrt(E)/2 - cbrt(2)*sqrt(3)*I*cbrt(E)/2,
cbrt(2)*cbrt(E)/2 + cbrt(2)*sqrt(3)*I*cbrt(E)/2]
assert solve([x + 4, y + E], x, y) == {x: -4, y: -E}
assert solve([x**2 + 4, y + E], x, y) == [
(-2*I, -E), (2*I, -E)]
e1 = x - y**3 + 4
e2 = x + y + 4 + 4 * E
assert len(solve([e1, e2], x, y)) == 3
@slow
def test_issue_12114():
a, b, c, d, e, f, g = symbols('a,b,c,d,e,f,g')
terms = [1 + a*b + d*e, 1 + a*c + d*f, 1 + b*c + e*f,
g - a**2 - d**2, g - b**2 - e**2, g - c**2 - f**2]
s = solve(terms, [a, b, c, d, e, f, g], dict=True)
assert s == [{a: -sqrt(-f**2 - 1), b: -sqrt(-f**2 - 1),
c: -sqrt(-f**2 - 1), d: f, e: f, g: -1},
{a: sqrt(-f**2 - 1), b: sqrt(-f**2 - 1),
c: sqrt(-f**2 - 1), d: f, e: f, g: -1},
{a: -sqrt(3)*f/2 - sqrt(-f**2 + 2)/2,
b: sqrt(3)*f/2 - sqrt(-f**2 + 2)/2, c: sqrt(-f**2 + 2),
d: -f/2 + sqrt(-3*f**2 + 6)/2,
e: -f/2 - sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: -sqrt(3)*f/2 + sqrt(-f**2 + 2)/2,
b: sqrt(3)*f/2 + sqrt(-f**2 + 2)/2, c: -sqrt(-f**2 + 2),
d: -f/2 - sqrt(-3*f**2 + 6)/2,
e: -f/2 + sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: sqrt(3)*f/2 - sqrt(-f**2 + 2)/2,
b: -sqrt(3)*f/2 - sqrt(-f**2 + 2)/2, c: sqrt(-f**2 + 2),
d: -f/2 - sqrt(-3*f**2 + 6)/2,
e: -f/2 + sqrt(3)*sqrt(-f**2 + 2)/2, g: 2},
{a: sqrt(3)*f/2 + sqrt(-f**2 + 2)/2,
b: -sqrt(3)*f/2 + sqrt(-f**2 + 2)/2, c: -sqrt(-f**2 + 2),
d: -f/2 + sqrt(-3*f**2 + 6)/2,
e: -f/2 - sqrt(3)*sqrt(-f**2 + 2)/2, g: 2}]
def test_inf():
assert solve(1 - oo*x) == []
assert solve(oo*x, x) == []
assert solve(oo*x - oo, x) == []
def test_issue_12448():
f = Function('f')
fun = [f(i) for i in range(15)]
sym = symbols('x:15')
reps = dict(zip(fun, sym))
(x, y, z), c = sym[:3], sym[3:]
ssym = solve([c[4*i]*x + c[4*i + 1]*y + c[4*i + 2]*z + c[4*i + 3]
for i in range(3)], (x, y, z))
(x, y, z), c = fun[:3], fun[3:]
sfun = solve([c[4*i]*x + c[4*i + 1]*y + c[4*i + 2]*z + c[4*i + 3]
for i in range(3)], (x, y, z))
assert sfun[fun[0]].xreplace(reps).count_ops() == \
ssym[sym[0]].count_ops()
def test_denoms():
assert denoms(x/2 + 1/y) == set([2, y])
assert denoms(x/2 + 1/y, y) == set([y])
assert denoms(x/2 + 1/y, [y]) == set([y])
assert denoms(1/x + 1/y + 1/z, [x, y]) == set([x, y])
assert denoms(1/x + 1/y + 1/z, x, y) == set([x, y])
assert denoms(1/x + 1/y + 1/z, set([x, y])) == set([x, y])
def test_issue_12476():
x0, x1, x2, x3, x4, x5 = symbols('x0 x1 x2 x3 x4 x5')
eqns = [x0**2 - x0, x0*x1 - x1, x0*x2 - x2, x0*x3 - x3, x0*x4 - x4, x0*x5 - x5,
x0*x1 - x1, -x0/3 + x1**2 - 2*x2/3, x1*x2 - x1/3 - x2/3 - x3/3,
x1*x3 - x2/3 - x3/3 - x4/3, x1*x4 - 2*x3/3 - x5/3, x1*x5 - x4, x0*x2 - x2,
x1*x2 - x1/3 - x2/3 - x3/3, -x0/6 - x1/6 + x2**2 - x2/6 - x3/3 - x4/6,
-x1/6 + x2*x3 - x2/3 - x3/6 - x4/6 - x5/6, x2*x4 - x2/3 - x3/3 - x4/3,
x2*x5 - x3, x0*x3 - x3, x1*x3 - x2/3 - x3/3 - x4/3,
-x1/6 + x2*x3 - x2/3 - x3/6 - x4/6 - x5/6,
-x0/6 - x1/6 - x2/6 + x3**2 - x3/3 - x4/6, -x1/3 - x2/3 + x3*x4 - x3/3,
-x2 + x3*x5, x0*x4 - x4, x1*x4 - 2*x3/3 - x5/3, x2*x4 - x2/3 - x3/3 - x4/3,
-x1/3 - x2/3 + x3*x4 - x3/3, -x0/3 - 2*x2/3 + x4**2, -x1 + x4*x5, x0*x5 - x5,
x1*x5 - x4, x2*x5 - x3, -x2 + x3*x5, -x1 + x4*x5, -x0 + x5**2, x0 - 1]
sols = [{x0: 1, x3: S(1)/6, x2: S(1)/6, x4: -S(2)/3, x1: -S(2)/3, x5: 1},
{x0: 1, x3: S(1)/2, x2: -S(1)/2, x4: 0, x1: 0, x5: -1},
{x0: 1, x3: -S(1)/3, x2: -S(1)/3, x4: S(1)/3, x1: S(1)/3, x5: 1},
{x0: 1, x3: 1, x2: 1, x4: 1, x1: 1, x5: 1},
{x0: 1, x3: -S(1)/3, x2: S(1)/3, x4: sqrt(5)/3, x1: -sqrt(5)/3, x5: -1},
{x0: 1, x3: -S(1)/3, x2: S(1)/3, x4: -sqrt(5)/3, x1: sqrt(5)/3, x5: -1}]
assert solve(eqns) == sols
def test_issue_13849():
t = symbols('t')
assert solve((t*(sqrt(5) + sqrt(2)) - sqrt(2), t), t) == []
def test_issue_14860():
from sympy.physics.units import newton, kilo
assert solve(8*kilo*newton + x + y, x) == [-8000*newton - y]
def test_issue_14721():
k, h, a, b = symbols(':4')
assert solve([
-1 + (-k + 1)**2/b**2 + (-h - 1)**2/a**2,
-1 + (-k + 1)**2/b**2 + (-h + 1)**2/a**2,
h, k + 2], h, k, a, b) == [
(0, -2, -b*sqrt(1/(b**2 - 9)), b),
(0, -2, b*sqrt(1/(b**2 - 9)), b)]
assert solve([
h, h/a + 1/b**2 - 2, -h/2 + 1/b**2 - 2], a, h, b) == [
(a, 0, -sqrt(2)/2), (a, 0, sqrt(2)/2)]
assert solve((a + b**2 - 1, a + b**2 - 2)) == []
def test_issue_14779():
x = symbols('x', real=True)
assert solve(sqrt(x**4 - 130*x**2 + 1089) + sqrt(x**4 - 130*x**2
+ 3969) - 96*Abs(x)/x,x) == [sqrt(130)]
def test_issue_15307():
assert solve((y - 2, Mul(x + 3,x - 2, evaluate=False))) == \
[{x: -3, y: 2}, {x: 2, y: 2}]
assert solve((y - 2, Mul(3, x - 2, evaluate=False))) == \
{x: 2, y: 2}
assert solve((y - 2, Add(x + 4, x - 2, evaluate=False))) == \
{x: -1, y: 2}
eq1 = Eq(12513*x + 2*y - 219093, -5726*x - y)
eq2 = Eq(-2*x + 8, 2*x - 40)
assert solve([eq1, eq2]) == {x:12, y:75}
def test_issue_15415():
assert solve(x - 3, x) == [3]
assert solve([x - 3], x) == {x:3}
assert solve(Eq(y + 3*x**2/2, y + 3*x), y) == []
assert solve([Eq(y + 3*x**2/2, y + 3*x)], y) == []
assert solve([Eq(y + 3*x**2/2, y + 3*x), Eq(x, 1)], y) == []
@slow
def test_issue_15731():
# f(x)**g(x)=c
assert solve(Eq((x**2 - 7*x + 11)**(x**2 - 13*x + 42), 1)) == [2, 3, 4, 5, 6, 7]
assert solve((x)**(x + 4) - 4) == [-2]
assert solve((-x)**(-x + 4) - 4) == [2]
assert solve((x**2 - 6)**(x**2 - 2) - 4) == [-2, 2]
assert solve((x**2 - 2*x - 1)**(x**2 - 3) - 1/(1 - 2*sqrt(2))) == [sqrt(2)]
assert solve(x**(x + S.Half) - 4*sqrt(2)) == [S(2)]
assert solve((x**2 + 1)**x - 25) == [2]
assert solve(x**(2/x) - 2) == [2, 4]
assert solve((x/2)**(2/x) - sqrt(2)) == [4, 8]
assert solve(x**(x + S.Half) - S(9)/4) == [S(3)/2]
# a**g(x)=c
assert solve((-sqrt(sqrt(2)))**x - 2) == [4, log(2)/(log(2**(S(1)/4)) + I*pi)]
assert solve((sqrt(2))**x - sqrt(sqrt(2))) == [S(1)/2]
assert solve((-sqrt(2))**x + 2*(sqrt(2))) == [3,
(3*log(2)**2 + 4*pi**2 - 4*I*pi*log(2))/(log(2)**2 + 4*pi**2)]
assert solve((sqrt(2))**x - 2*(sqrt(2))) == [3]
assert solve(I**x + 1) == [2]
assert solve((1 + I)**x - 2*I) == [2]
assert solve((sqrt(2) + sqrt(3))**x - (2*sqrt(6) + 5)**(S(1)/3)) == [S(2)/3]
# bases of both sides are equal
b = Symbol('b')
assert solve(b**x - b**2, x) == [2]
assert solve(b**x - 1/b, x) == [-1]
assert solve(b**x - b, x) == [1]
b = Symbol('b', positive=True)
assert solve(b**x - b**2, x) == [2]
assert solve(b**x - 1/b, x) == [-1]
| true | true |
f7fa9f0d72f77c263bb33fdb9957905626d997d8 | 3,114 | py | Python | mopidy/audio/dummy.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | 1 | 2018-05-12T21:28:49.000Z | 2018-05-12T21:28:49.000Z | mopidy/audio/dummy.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | null | null | null | mopidy/audio/dummy.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | null | null | null | """A dummy audio actor for use in tests.
This class implements the audio API in the simplest way possible. It is used in
tests of the core and backends.
"""
from __future__ import absolute_import, unicode_literals
import pykka
from .constants import PlaybackState
from .listener import AudioListener
class DummyAudio(pykka.ThreadingActor):
def __init__(self, config=None, mixer=None):
super(DummyAudio, self).__init__()
self.state = PlaybackState.STOPPED
self._volume = 0
self._position = 0
self._callback = None
self._uri = None
self._state_change_result = True
def set_uri(self, uri):
assert self._uri is None, 'prepare change not called before set'
self._uri = uri
def set_appsrc(self, *args, **kwargs):
pass
def emit_data(self, buffer_):
pass
def emit_end_of_stream(self):
pass
def get_position(self):
return self._position
def set_position(self, position):
self._position = position
AudioListener.send('position_changed', position=position)
return True
def start_playback(self):
return self._change_state(PlaybackState.PLAYING)
def pause_playback(self):
return self._change_state(PlaybackState.PAUSED)
def prepare_change(self):
self._uri = None
return True
def stop_playback(self):
return self._change_state(PlaybackState.STOPPED)
def get_volume(self):
return self._volume
def set_volume(self, volume):
self._volume = volume
return True
def set_metadata(self, track):
pass
def set_about_to_finish_callback(self, callback):
self._callback = callback
def enable_sync_handler(self):
pass
def wait_for_state_change(self):
pass
def _change_state(self, new_state):
if not self._uri:
return False
if self.state == PlaybackState.STOPPED and self._uri:
AudioListener.send('position_changed', position=0)
AudioListener.send('stream_changed', uri=self._uri)
if new_state == PlaybackState.STOPPED:
self._uri = None
AudioListener.send('stream_changed', uri=self._uri)
old_state, self.state = self.state, new_state
AudioListener.send('state_changed', old_state=old_state,
new_state=new_state, target_state=None)
return self._state_change_result
def trigger_fake_playback_failure(self):
self._state_change_result = False
def get_about_to_finish_callback(self):
# This needs to be called from outside the actor or we lock up.
def wrapper():
if self._callback:
self.prepare_change()
self._callback()
if not self._uri or not self._callback:
AudioListener.send('reached_end_of_stream')
else:
AudioListener.send('position_changed', position=0)
AudioListener.send('stream_changed', uri=self._uri)
return wrapper
| 27.557522 | 79 | 0.648041 |
from __future__ import absolute_import, unicode_literals
import pykka
from .constants import PlaybackState
from .listener import AudioListener
class DummyAudio(pykka.ThreadingActor):
def __init__(self, config=None, mixer=None):
super(DummyAudio, self).__init__()
self.state = PlaybackState.STOPPED
self._volume = 0
self._position = 0
self._callback = None
self._uri = None
self._state_change_result = True
def set_uri(self, uri):
assert self._uri is None, 'prepare change not called before set'
self._uri = uri
def set_appsrc(self, *args, **kwargs):
pass
def emit_data(self, buffer_):
pass
def emit_end_of_stream(self):
pass
def get_position(self):
return self._position
def set_position(self, position):
self._position = position
AudioListener.send('position_changed', position=position)
return True
def start_playback(self):
return self._change_state(PlaybackState.PLAYING)
def pause_playback(self):
return self._change_state(PlaybackState.PAUSED)
def prepare_change(self):
self._uri = None
return True
def stop_playback(self):
return self._change_state(PlaybackState.STOPPED)
def get_volume(self):
return self._volume
def set_volume(self, volume):
self._volume = volume
return True
def set_metadata(self, track):
pass
def set_about_to_finish_callback(self, callback):
self._callback = callback
def enable_sync_handler(self):
pass
def wait_for_state_change(self):
pass
def _change_state(self, new_state):
if not self._uri:
return False
if self.state == PlaybackState.STOPPED and self._uri:
AudioListener.send('position_changed', position=0)
AudioListener.send('stream_changed', uri=self._uri)
if new_state == PlaybackState.STOPPED:
self._uri = None
AudioListener.send('stream_changed', uri=self._uri)
old_state, self.state = self.state, new_state
AudioListener.send('state_changed', old_state=old_state,
new_state=new_state, target_state=None)
return self._state_change_result
def trigger_fake_playback_failure(self):
self._state_change_result = False
def get_about_to_finish_callback(self):
def wrapper():
if self._callback:
self.prepare_change()
self._callback()
if not self._uri or not self._callback:
AudioListener.send('reached_end_of_stream')
else:
AudioListener.send('position_changed', position=0)
AudioListener.send('stream_changed', uri=self._uri)
return wrapper
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.